OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 // | 4 // |
5 // The intrinsic code below is executed before a method has built its frame. | 5 // The intrinsic code below is executed before a method has built its frame. |
6 // The return address is on the stack and the arguments below it. | 6 // The return address is on the stack and the arguments below it. |
7 // Registers EDX (arguments descriptor) and ECX (function) must be preserved. | 7 // Registers EDX (arguments descriptor) and ECX (function) must be preserved. |
8 // Each intrinsification method returns true if the corresponding | 8 // Each intrinsification method returns true if the corresponding |
9 // Dart method was intrinsified. | 9 // Dart method was intrinsified. |
10 | 10 |
(...skipping 19 matching lines...) Expand all Loading... |
30 // EDX: Arguments descriptor | 30 // EDX: Arguments descriptor |
31 // TOS: Return address | 31 // TOS: Return address |
32 // The ECX, EDX registers can be destroyed only if there is no slow-path, i.e. | 32 // The ECX, EDX registers can be destroyed only if there is no slow-path, i.e. |
33 // if the intrinsified method always executes a return. | 33 // if the intrinsified method always executes a return. |
34 // The EBP register should not be modified, because it is used by the profiler. | 34 // The EBP register should not be modified, because it is used by the profiler. |
35 // The THR register (see constants_ia32.h) must be preserved. | 35 // The THR register (see constants_ia32.h) must be preserved. |
36 | 36 |
37 #define __ assembler-> | 37 #define __ assembler-> |
38 | 38 |
39 | 39 |
40 intptr_t Intrinsifier::ParameterSlotFromSp() { return 0; } | 40 intptr_t Intrinsifier::ParameterSlotFromSp() { |
| 41 return 0; |
| 42 } |
41 | 43 |
42 | 44 |
43 void Intrinsifier::IntrinsicCallPrologue(Assembler* assembler) { | 45 void Intrinsifier::IntrinsicCallPrologue(Assembler* assembler) { |
44 COMPILE_ASSERT(CALLEE_SAVED_TEMP != ARGS_DESC_REG); | 46 COMPILE_ASSERT(CALLEE_SAVED_TEMP != ARGS_DESC_REG); |
45 | 47 |
46 assembler->Comment("IntrinsicCallPrologue"); | 48 assembler->Comment("IntrinsicCallPrologue"); |
47 assembler->movl(CALLEE_SAVED_TEMP, ARGS_DESC_REG); | 49 assembler->movl(CALLEE_SAVED_TEMP, ARGS_DESC_REG); |
48 } | 50 } |
49 | 51 |
50 | 52 |
51 void Intrinsifier::IntrinsicCallEpilogue(Assembler* assembler) { | 53 void Intrinsifier::IntrinsicCallEpilogue(Assembler* assembler) { |
52 assembler->Comment("IntrinsicCallEpilogue"); | 54 assembler->Comment("IntrinsicCallEpilogue"); |
53 assembler->movl(ARGS_DESC_REG, CALLEE_SAVED_TEMP); | 55 assembler->movl(ARGS_DESC_REG, CALLEE_SAVED_TEMP); |
54 } | 56 } |
55 | 57 |
56 | 58 |
57 static intptr_t ComputeObjectArrayTypeArgumentsOffset() { | 59 static intptr_t ComputeObjectArrayTypeArgumentsOffset() { |
58 const Library& core_lib = Library::Handle(Library::CoreLibrary()); | 60 const Library& core_lib = Library::Handle(Library::CoreLibrary()); |
59 const Class& cls = Class::Handle( | 61 const Class& cls = |
60 core_lib.LookupClassAllowPrivate(Symbols::_List())); | 62 Class::Handle(core_lib.LookupClassAllowPrivate(Symbols::_List())); |
61 ASSERT(!cls.IsNull()); | 63 ASSERT(!cls.IsNull()); |
62 ASSERT(cls.NumTypeArguments() == 1); | 64 ASSERT(cls.NumTypeArguments() == 1); |
63 const intptr_t field_offset = cls.type_arguments_field_offset(); | 65 const intptr_t field_offset = cls.type_arguments_field_offset(); |
64 ASSERT(field_offset != Class::kNoTypeArguments); | 66 ASSERT(field_offset != Class::kNoTypeArguments); |
65 return field_offset; | 67 return field_offset; |
66 } | 68 } |
67 | 69 |
68 | 70 |
69 // Intrinsify only for Smi value and index. Non-smi values need a store buffer | 71 // Intrinsify only for Smi value and index. Non-smi values need a store buffer |
70 // update. Array length is always a Smi. | 72 // update. Array length is always a Smi. |
71 void Intrinsifier::ObjectArraySetIndexed(Assembler* assembler) { | 73 void Intrinsifier::ObjectArraySetIndexed(Assembler* assembler) { |
72 Label fall_through; | 74 Label fall_through; |
73 if (Isolate::Current()->type_checks()) { | 75 if (Isolate::Current()->type_checks()) { |
74 const intptr_t type_args_field_offset = | 76 const intptr_t type_args_field_offset = |
75 ComputeObjectArrayTypeArgumentsOffset(); | 77 ComputeObjectArrayTypeArgumentsOffset(); |
76 // Inline simple tests (Smi, null), fallthrough if not positive. | 78 // Inline simple tests (Smi, null), fallthrough if not positive. |
77 const Immediate& raw_null = | 79 const Immediate& raw_null = |
78 Immediate(reinterpret_cast<intptr_t>(Object::null())); | 80 Immediate(reinterpret_cast<intptr_t>(Object::null())); |
79 Label checked_ok; | 81 Label checked_ok; |
80 __ movl(EDI, Address(ESP, + 1 * kWordSize)); // Value. | 82 __ movl(EDI, Address(ESP, +1 * kWordSize)); // Value. |
81 // Null value is valid for any type. | 83 // Null value is valid for any type. |
82 __ cmpl(EDI, raw_null); | 84 __ cmpl(EDI, raw_null); |
83 __ j(EQUAL, &checked_ok, Assembler::kNearJump); | 85 __ j(EQUAL, &checked_ok, Assembler::kNearJump); |
84 | 86 |
85 __ movl(EBX, Address(ESP, + 3 * kWordSize)); // Array. | 87 __ movl(EBX, Address(ESP, +3 * kWordSize)); // Array. |
86 __ movl(EBX, FieldAddress(EBX, type_args_field_offset)); | 88 __ movl(EBX, FieldAddress(EBX, type_args_field_offset)); |
87 // EBX: Type arguments of array. | 89 // EBX: Type arguments of array. |
88 __ cmpl(EBX, raw_null); | 90 __ cmpl(EBX, raw_null); |
89 __ j(EQUAL, &checked_ok, Assembler::kNearJump); | 91 __ j(EQUAL, &checked_ok, Assembler::kNearJump); |
90 // Check if it's dynamic. | 92 // Check if it's dynamic. |
91 // Get type at index 0. | 93 // Get type at index 0. |
92 __ movl(EAX, FieldAddress(EBX, TypeArguments::type_at_offset(0))); | 94 __ movl(EAX, FieldAddress(EBX, TypeArguments::type_at_offset(0))); |
93 __ CompareObject(EAX, Object::dynamic_type()); | 95 __ CompareObject(EAX, Object::dynamic_type()); |
94 __ j(EQUAL, &checked_ok, Assembler::kNearJump); | 96 __ j(EQUAL, &checked_ok, Assembler::kNearJump); |
95 // Check for int and num. | 97 // Check for int and num. |
96 __ testl(EDI, Immediate(kSmiTagMask)); // Value is Smi? | 98 __ testl(EDI, Immediate(kSmiTagMask)); // Value is Smi? |
97 __ j(NOT_ZERO, &fall_through); // Non-smi value. | 99 __ j(NOT_ZERO, &fall_through); // Non-smi value. |
98 __ CompareObject(EAX, Type::ZoneHandle(Type::IntType())); | 100 __ CompareObject(EAX, Type::ZoneHandle(Type::IntType())); |
99 __ j(EQUAL, &checked_ok, Assembler::kNearJump); | 101 __ j(EQUAL, &checked_ok, Assembler::kNearJump); |
100 __ CompareObject(EAX, Type::ZoneHandle(Type::Number())); | 102 __ CompareObject(EAX, Type::ZoneHandle(Type::Number())); |
101 __ j(NOT_EQUAL, &fall_through); | 103 __ j(NOT_EQUAL, &fall_through); |
102 __ Bind(&checked_ok); | 104 __ Bind(&checked_ok); |
103 } | 105 } |
104 __ movl(EBX, Address(ESP, + 2 * kWordSize)); // Index. | 106 __ movl(EBX, Address(ESP, +2 * kWordSize)); // Index. |
105 __ testl(EBX, Immediate(kSmiTagMask)); | 107 __ testl(EBX, Immediate(kSmiTagMask)); |
106 // Index not Smi. | 108 // Index not Smi. |
107 __ j(NOT_ZERO, &fall_through); | 109 __ j(NOT_ZERO, &fall_through); |
108 __ movl(EAX, Address(ESP, + 3 * kWordSize)); // Array. | 110 __ movl(EAX, Address(ESP, +3 * kWordSize)); // Array. |
109 // Range check. | 111 // Range check. |
110 __ cmpl(EBX, FieldAddress(EAX, Array::length_offset())); | 112 __ cmpl(EBX, FieldAddress(EAX, Array::length_offset())); |
111 // Runtime throws exception. | 113 // Runtime throws exception. |
112 __ j(ABOVE_EQUAL, &fall_through); | 114 __ j(ABOVE_EQUAL, &fall_through); |
113 // Note that EBX is Smi, i.e, times 2. | 115 // Note that EBX is Smi, i.e, times 2. |
114 ASSERT(kSmiTagShift == 1); | 116 ASSERT(kSmiTagShift == 1); |
115 // Destroy ECX (ic data) as we will not continue in the function. | 117 // Destroy ECX (ic data) as we will not continue in the function. |
116 __ movl(ECX, Address(ESP, + 1 * kWordSize)); // Value. | 118 __ movl(ECX, Address(ESP, +1 * kWordSize)); // Value. |
117 __ StoreIntoObject(EAX, | 119 __ StoreIntoObject(EAX, FieldAddress(EAX, EBX, TIMES_2, Array::data_offset()), |
118 FieldAddress(EAX, EBX, TIMES_2, Array::data_offset()), | |
119 ECX); | 120 ECX); |
120 // Caller is responsible of preserving the value if necessary. | 121 // Caller is responsible of preserving the value if necessary. |
121 __ ret(); | 122 __ ret(); |
122 __ Bind(&fall_through); | 123 __ Bind(&fall_through); |
123 } | 124 } |
124 | 125 |
125 | 126 |
126 // Allocate a GrowableObjectArray using the backing array specified. | 127 // Allocate a GrowableObjectArray using the backing array specified. |
127 // On stack: type argument (+2), data (+1), return-address (+0). | 128 // On stack: type argument (+2), data (+1), return-address (+0). |
128 void Intrinsifier::GrowableArray_Allocate(Assembler* assembler) { | 129 void Intrinsifier::GrowableArray_Allocate(Assembler* assembler) { |
129 // This snippet of inlined code uses the following registers: | 130 // This snippet of inlined code uses the following registers: |
130 // EAX, EBX | 131 // EAX, EBX |
131 // and the newly allocated object is returned in EAX. | 132 // and the newly allocated object is returned in EAX. |
132 const intptr_t kTypeArgumentsOffset = 2 * kWordSize; | 133 const intptr_t kTypeArgumentsOffset = 2 * kWordSize; |
133 const intptr_t kArrayOffset = 1 * kWordSize; | 134 const intptr_t kArrayOffset = 1 * kWordSize; |
134 Label fall_through; | 135 Label fall_through; |
135 | 136 |
136 // Try allocating in new space. | 137 // Try allocating in new space. |
137 const Class& cls = Class::Handle( | 138 const Class& cls = Class::Handle( |
138 Isolate::Current()->object_store()->growable_object_array_class()); | 139 Isolate::Current()->object_store()->growable_object_array_class()); |
139 __ TryAllocate(cls, &fall_through, Assembler::kNearJump, EAX, EBX); | 140 __ TryAllocate(cls, &fall_through, Assembler::kNearJump, EAX, EBX); |
140 | 141 |
141 // Store backing array object in growable array object. | 142 // Store backing array object in growable array object. |
142 __ movl(EBX, Address(ESP, kArrayOffset)); // data argument. | 143 __ movl(EBX, Address(ESP, kArrayOffset)); // data argument. |
143 // EAX is new, no barrier needed. | 144 // EAX is new, no barrier needed. |
144 __ StoreIntoObjectNoBarrier( | 145 __ StoreIntoObjectNoBarrier( |
145 EAX, | 146 EAX, FieldAddress(EAX, GrowableObjectArray::data_offset()), EBX); |
146 FieldAddress(EAX, GrowableObjectArray::data_offset()), | |
147 EBX); | |
148 | 147 |
149 // EAX: new growable array object start as a tagged pointer. | 148 // EAX: new growable array object start as a tagged pointer. |
150 // Store the type argument field in the growable array object. | 149 // Store the type argument field in the growable array object. |
151 __ movl(EBX, Address(ESP, kTypeArgumentsOffset)); // type argument. | 150 __ movl(EBX, Address(ESP, kTypeArgumentsOffset)); // type argument. |
152 __ StoreIntoObjectNoBarrier( | 151 __ StoreIntoObjectNoBarrier( |
153 EAX, | 152 EAX, FieldAddress(EAX, GrowableObjectArray::type_arguments_offset()), |
154 FieldAddress(EAX, GrowableObjectArray::type_arguments_offset()), | |
155 EBX); | 153 EBX); |
156 | 154 |
157 __ ZeroInitSmiField(FieldAddress(EAX, GrowableObjectArray::length_offset())); | 155 __ ZeroInitSmiField(FieldAddress(EAX, GrowableObjectArray::length_offset())); |
158 __ ret(); // returns the newly allocated object in EAX. | 156 __ ret(); // returns the newly allocated object in EAX. |
159 | 157 |
160 __ Bind(&fall_through); | 158 __ Bind(&fall_through); |
161 } | 159 } |
162 | 160 |
163 | 161 |
164 // Add an element to growable array if it doesn't need to grow, otherwise | 162 // Add an element to growable array if it doesn't need to grow, otherwise |
165 // call into regular code. | 163 // call into regular code. |
166 // On stack: growable array (+2), value (+1), return-address (+0). | 164 // On stack: growable array (+2), value (+1), return-address (+0). |
167 void Intrinsifier::GrowableArray_add(Assembler* assembler) { | 165 void Intrinsifier::GrowableArray_add(Assembler* assembler) { |
168 // In checked mode we need to type-check the incoming argument. | 166 // In checked mode we need to type-check the incoming argument. |
169 if (Isolate::Current()->type_checks()) return; | 167 if (Isolate::Current()->type_checks()) return; |
170 | 168 |
171 Label fall_through; | 169 Label fall_through; |
172 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Array. | 170 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Array. |
173 __ movl(EBX, FieldAddress(EAX, GrowableObjectArray::length_offset())); | 171 __ movl(EBX, FieldAddress(EAX, GrowableObjectArray::length_offset())); |
174 // EBX: length. | 172 // EBX: length. |
175 __ movl(EDI, FieldAddress(EAX, GrowableObjectArray::data_offset())); | 173 __ movl(EDI, FieldAddress(EAX, GrowableObjectArray::data_offset())); |
176 // EDI: data. | 174 // EDI: data. |
177 // Compare length with capacity. | 175 // Compare length with capacity. |
178 __ cmpl(EBX, FieldAddress(EDI, Array::length_offset())); | 176 __ cmpl(EBX, FieldAddress(EDI, Array::length_offset())); |
179 __ j(EQUAL, &fall_through); // Must grow data. | 177 __ j(EQUAL, &fall_through); // Must grow data. |
180 __ IncrementSmiField(FieldAddress(EAX, GrowableObjectArray::length_offset()), | 178 __ IncrementSmiField(FieldAddress(EAX, GrowableObjectArray::length_offset()), |
181 1); | 179 1); |
182 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Value | 180 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Value |
183 ASSERT(kSmiTagShift == 1); | 181 ASSERT(kSmiTagShift == 1); |
184 __ StoreIntoObject(EDI, | 182 __ StoreIntoObject(EDI, FieldAddress(EDI, EBX, TIMES_2, Array::data_offset()), |
185 FieldAddress(EDI, EBX, TIMES_2, Array::data_offset()), | |
186 EAX); | 183 EAX); |
187 const Immediate& raw_null = | 184 const Immediate& raw_null = |
188 Immediate(reinterpret_cast<int32_t>(Object::null())); | 185 Immediate(reinterpret_cast<int32_t>(Object::null())); |
189 __ movl(EAX, raw_null); | 186 __ movl(EAX, raw_null); |
190 __ ret(); | 187 __ ret(); |
191 __ Bind(&fall_through); | 188 __ Bind(&fall_through); |
192 } | 189 } |
193 | 190 |
194 | 191 |
195 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor) \ | 192 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor) \ |
196 Label fall_through; \ | 193 Label fall_through; \ |
197 const intptr_t kArrayLengthStackOffset = 1 * kWordSize; \ | 194 const intptr_t kArrayLengthStackOffset = 1 * kWordSize; \ |
198 NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, EDI, &fall_through, false)); \ | 195 NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, EDI, &fall_through, false)); \ |
199 __ movl(EDI, Address(ESP, kArrayLengthStackOffset)); /* Array length. */ \ | 196 __ movl(EDI, Address(ESP, kArrayLengthStackOffset)); /* Array length. */ \ |
200 /* Check that length is a positive Smi. */ \ | 197 /* Check that length is a positive Smi. */ \ |
201 /* EDI: requested array length argument. */ \ | 198 /* EDI: requested array length argument. */ \ |
202 __ testl(EDI, Immediate(kSmiTagMask)); \ | 199 __ testl(EDI, Immediate(kSmiTagMask)); \ |
203 __ j(NOT_ZERO, &fall_through); \ | 200 __ j(NOT_ZERO, &fall_through); \ |
204 __ cmpl(EDI, Immediate(0)); \ | 201 __ cmpl(EDI, Immediate(0)); \ |
205 __ j(LESS, &fall_through); \ | 202 __ j(LESS, &fall_through); \ |
206 __ SmiUntag(EDI); \ | 203 __ SmiUntag(EDI); \ |
207 /* Check for maximum allowed length. */ \ | 204 /* Check for maximum allowed length. */ \ |
208 /* EDI: untagged array length. */ \ | 205 /* EDI: untagged array length. */ \ |
209 __ cmpl(EDI, Immediate(max_len)); \ | 206 __ cmpl(EDI, Immediate(max_len)); \ |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
251 __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump); \ | 248 __ j(ABOVE, &size_tag_overflow, Assembler::kNearJump); \ |
252 __ shll(EDI, Immediate(RawObject::kSizeTagPos - kObjectAlignmentLog2)); \ | 249 __ shll(EDI, Immediate(RawObject::kSizeTagPos - kObjectAlignmentLog2)); \ |
253 __ jmp(&done, Assembler::kNearJump); \ | 250 __ jmp(&done, Assembler::kNearJump); \ |
254 \ | 251 \ |
255 __ Bind(&size_tag_overflow); \ | 252 __ Bind(&size_tag_overflow); \ |
256 __ movl(EDI, Immediate(0)); \ | 253 __ movl(EDI, Immediate(0)); \ |
257 __ Bind(&done); \ | 254 __ Bind(&done); \ |
258 \ | 255 \ |
259 /* Get the class index and insert it into the tags. */ \ | 256 /* Get the class index and insert it into the tags. */ \ |
260 __ orl(EDI, Immediate(RawObject::ClassIdTag::encode(cid))); \ | 257 __ orl(EDI, Immediate(RawObject::ClassIdTag::encode(cid))); \ |
261 __ movl(FieldAddress(EAX, type_name::tags_offset()), EDI); /* Tags. */ \ | 258 __ movl(FieldAddress(EAX, type_name::tags_offset()), EDI); /* Tags. */ \ |
262 } \ | 259 } \ |
263 /* Set the length field. */ \ | 260 /* Set the length field. */ \ |
264 /* EAX: new object start as a tagged pointer. */ \ | 261 /* EAX: new object start as a tagged pointer. */ \ |
265 /* EBX: new object end address. */ \ | 262 /* EBX: new object end address. */ \ |
266 __ movl(EDI, Address(ESP, kArrayLengthStackOffset)); /* Array length. */ \ | 263 __ movl(EDI, Address(ESP, kArrayLengthStackOffset)); /* Array length. */ \ |
267 __ StoreIntoObjectNoBarrier(EAX, \ | 264 __ StoreIntoObjectNoBarrier( \ |
268 FieldAddress(EAX, type_name::length_offset()), \ | 265 EAX, FieldAddress(EAX, type_name::length_offset()), EDI); \ |
269 EDI); \ | |
270 /* Initialize all array elements to 0. */ \ | 266 /* Initialize all array elements to 0. */ \ |
271 /* EAX: new object start as a tagged pointer. */ \ | 267 /* EAX: new object start as a tagged pointer. */ \ |
272 /* EBX: new object end address. */ \ | 268 /* EBX: new object end address. */ \ |
273 /* EDI: iterator which initially points to the start of the variable */ \ | 269 /* EDI: iterator which initially points to the start of the variable */ \ |
274 /* ECX: scratch register. */ \ | 270 /* ECX: scratch register. */ \ |
275 /* data area to be initialized. */ \ | 271 /* data area to be initialized. */ \ |
276 __ xorl(ECX, ECX); /* Zero. */ \ | 272 __ xorl(ECX, ECX); /* Zero. */ \ |
277 __ leal(EDI, FieldAddress(EAX, sizeof(Raw##type_name))); \ | 273 __ leal(EDI, FieldAddress(EAX, sizeof(Raw##type_name))); \ |
278 Label done, init_loop; \ | 274 Label done, init_loop; \ |
279 __ Bind(&init_loop); \ | 275 __ Bind(&init_loop); \ |
280 __ cmpl(EDI, EBX); \ | 276 __ cmpl(EDI, EBX); \ |
281 __ j(ABOVE_EQUAL, &done, Assembler::kNearJump); \ | 277 __ j(ABOVE_EQUAL, &done, Assembler::kNearJump); \ |
282 __ movl(Address(EDI, 0), ECX); \ | 278 __ movl(Address(EDI, 0), ECX); \ |
283 __ addl(EDI, Immediate(kWordSize)); \ | 279 __ addl(EDI, Immediate(kWordSize)); \ |
284 __ jmp(&init_loop, Assembler::kNearJump); \ | 280 __ jmp(&init_loop, Assembler::kNearJump); \ |
285 __ Bind(&done); \ | 281 __ Bind(&done); \ |
286 \ | 282 \ |
287 __ ret(); \ | 283 __ ret(); \ |
288 __ Bind(&fall_through); \ | 284 __ Bind(&fall_through); |
289 | 285 |
290 | 286 |
291 static ScaleFactor GetScaleFactor(intptr_t size) { | 287 static ScaleFactor GetScaleFactor(intptr_t size) { |
292 switch (size) { | 288 switch (size) { |
293 case 1: return TIMES_1; | 289 case 1: |
294 case 2: return TIMES_2; | 290 return TIMES_1; |
295 case 4: return TIMES_4; | 291 case 2: |
296 case 8: return TIMES_8; | 292 return TIMES_2; |
297 case 16: return TIMES_16; | 293 case 4: |
| 294 return TIMES_4; |
| 295 case 8: |
| 296 return TIMES_8; |
| 297 case 16: |
| 298 return TIMES_16; |
298 } | 299 } |
299 UNREACHABLE(); | 300 UNREACHABLE(); |
300 return static_cast<ScaleFactor>(0); | 301 return static_cast<ScaleFactor>(0); |
301 } | 302 } |
302 | 303 |
303 | 304 |
304 #define TYPED_DATA_ALLOCATOR(clazz) \ | 305 #define TYPED_DATA_ALLOCATOR(clazz) \ |
305 void Intrinsifier::TypedData_##clazz##_factory(Assembler* assembler) { \ | 306 void Intrinsifier::TypedData_##clazz##_factory(Assembler* assembler) { \ |
306 intptr_t size = TypedData::ElementSizeInBytes(kTypedData##clazz##Cid); \ | 307 intptr_t size = TypedData::ElementSizeInBytes(kTypedData##clazz##Cid); \ |
307 intptr_t max_len = TypedData::MaxElements(kTypedData##clazz##Cid); \ | 308 intptr_t max_len = TypedData::MaxElements(kTypedData##clazz##Cid); \ |
308 ScaleFactor scale = GetScaleFactor(size); \ | 309 ScaleFactor scale = GetScaleFactor(size); \ |
309 TYPED_ARRAY_ALLOCATION(TypedData, kTypedData##clazz##Cid, max_len, scale); \ | 310 TYPED_ARRAY_ALLOCATION(TypedData, kTypedData##clazz##Cid, max_len, scale); \ |
310 } | 311 } |
311 CLASS_LIST_TYPED_DATA(TYPED_DATA_ALLOCATOR) | 312 CLASS_LIST_TYPED_DATA(TYPED_DATA_ALLOCATOR) |
312 #undef TYPED_DATA_ALLOCATOR | 313 #undef TYPED_DATA_ALLOCATOR |
313 | 314 |
314 | 315 |
315 // Tests if two top most arguments are smis, jumps to label not_smi if not. | 316 // Tests if two top most arguments are smis, jumps to label not_smi if not. |
316 // Topmost argument is in EAX. | 317 // Topmost argument is in EAX. |
317 static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) { | 318 static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) { |
318 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 319 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
319 __ movl(EBX, Address(ESP, + 2 * kWordSize)); | 320 __ movl(EBX, Address(ESP, +2 * kWordSize)); |
320 __ orl(EBX, EAX); | 321 __ orl(EBX, EAX); |
321 __ testl(EBX, Immediate(kSmiTagMask)); | 322 __ testl(EBX, Immediate(kSmiTagMask)); |
322 __ j(NOT_ZERO, not_smi, Assembler::kNearJump); | 323 __ j(NOT_ZERO, not_smi, Assembler::kNearJump); |
323 } | 324 } |
324 | 325 |
325 | 326 |
326 void Intrinsifier::Integer_addFromInteger(Assembler* assembler) { | 327 void Intrinsifier::Integer_addFromInteger(Assembler* assembler) { |
327 Label fall_through; | 328 Label fall_through; |
328 TestBothArgumentsSmis(assembler, &fall_through); | 329 TestBothArgumentsSmis(assembler, &fall_through); |
329 __ addl(EAX, Address(ESP, + 2 * kWordSize)); | 330 __ addl(EAX, Address(ESP, +2 * kWordSize)); |
330 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); | 331 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); |
331 // Result is in EAX. | 332 // Result is in EAX. |
332 __ ret(); | 333 __ ret(); |
333 __ Bind(&fall_through); | 334 __ Bind(&fall_through); |
334 } | 335 } |
335 | 336 |
336 | 337 |
337 void Intrinsifier::Integer_add(Assembler* assembler) { | 338 void Intrinsifier::Integer_add(Assembler* assembler) { |
338 Integer_addFromInteger(assembler); | 339 Integer_addFromInteger(assembler); |
339 } | 340 } |
340 | 341 |
341 | 342 |
342 void Intrinsifier::Integer_subFromInteger(Assembler* assembler) { | 343 void Intrinsifier::Integer_subFromInteger(Assembler* assembler) { |
343 Label fall_through; | 344 Label fall_through; |
344 TestBothArgumentsSmis(assembler, &fall_through); | 345 TestBothArgumentsSmis(assembler, &fall_through); |
345 __ subl(EAX, Address(ESP, + 2 * kWordSize)); | 346 __ subl(EAX, Address(ESP, +2 * kWordSize)); |
346 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); | 347 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); |
347 // Result is in EAX. | 348 // Result is in EAX. |
348 __ ret(); | 349 __ ret(); |
349 __ Bind(&fall_through); | 350 __ Bind(&fall_through); |
350 } | 351 } |
351 | 352 |
352 | 353 |
353 void Intrinsifier::Integer_sub(Assembler* assembler) { | 354 void Intrinsifier::Integer_sub(Assembler* assembler) { |
354 Label fall_through; | 355 Label fall_through; |
355 TestBothArgumentsSmis(assembler, &fall_through); | 356 TestBothArgumentsSmis(assembler, &fall_through); |
356 __ movl(EBX, EAX); | 357 __ movl(EBX, EAX); |
357 __ movl(EAX, Address(ESP, + 2 * kWordSize)); | 358 __ movl(EAX, Address(ESP, +2 * kWordSize)); |
358 __ subl(EAX, EBX); | 359 __ subl(EAX, EBX); |
359 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); | 360 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); |
360 // Result is in EAX. | 361 // Result is in EAX. |
361 __ ret(); | 362 __ ret(); |
362 __ Bind(&fall_through); | 363 __ Bind(&fall_through); |
363 } | 364 } |
364 | 365 |
365 | 366 |
366 | |
367 void Intrinsifier::Integer_mulFromInteger(Assembler* assembler) { | 367 void Intrinsifier::Integer_mulFromInteger(Assembler* assembler) { |
368 Label fall_through; | 368 Label fall_through; |
369 TestBothArgumentsSmis(assembler, &fall_through); | 369 TestBothArgumentsSmis(assembler, &fall_through); |
370 ASSERT(kSmiTag == 0); // Adjust code below if not the case. | 370 ASSERT(kSmiTag == 0); // Adjust code below if not the case. |
371 __ SmiUntag(EAX); | 371 __ SmiUntag(EAX); |
372 __ imull(EAX, Address(ESP, + 2 * kWordSize)); | 372 __ imull(EAX, Address(ESP, +2 * kWordSize)); |
373 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); | 373 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); |
374 // Result is in EAX. | 374 // Result is in EAX. |
375 __ ret(); | 375 __ ret(); |
376 __ Bind(&fall_through); | 376 __ Bind(&fall_through); |
377 } | 377 } |
378 | 378 |
379 | 379 |
380 void Intrinsifier::Integer_mul(Assembler* assembler) { | 380 void Intrinsifier::Integer_mul(Assembler* assembler) { |
381 Integer_mulFromInteger(assembler); | 381 Integer_mulFromInteger(assembler); |
382 } | 382 } |
(...skipping 16 matching lines...) Expand all Loading... |
399 __ cmpl(EAX, Immediate(0)); | 399 __ cmpl(EAX, Immediate(0)); |
400 __ j(EQUAL, &return_zero, Assembler::kNearJump); | 400 __ j(EQUAL, &return_zero, Assembler::kNearJump); |
401 __ cmpl(EAX, EBX); | 401 __ cmpl(EAX, EBX); |
402 __ j(EQUAL, &return_zero, Assembler::kNearJump); | 402 __ j(EQUAL, &return_zero, Assembler::kNearJump); |
403 | 403 |
404 // Check if result equals left. | 404 // Check if result equals left. |
405 __ cmpl(EAX, Immediate(0)); | 405 __ cmpl(EAX, Immediate(0)); |
406 __ j(LESS, &modulo, Assembler::kNearJump); | 406 __ j(LESS, &modulo, Assembler::kNearJump); |
407 // left is positive. | 407 // left is positive. |
408 __ cmpl(EAX, EBX); | 408 __ cmpl(EAX, EBX); |
409 __ j(GREATER, &modulo, Assembler::kNearJump); | 409 __ j(GREATER, &modulo, Assembler::kNearJump); |
410 // left is less than right, result is left (EAX). | 410 // left is less than right, result is left (EAX). |
411 __ ret(); | 411 __ ret(); |
412 | 412 |
413 __ Bind(&return_zero); | 413 __ Bind(&return_zero); |
414 __ xorl(EAX, EAX); | 414 __ xorl(EAX, EAX); |
415 __ ret(); | 415 __ ret(); |
416 | 416 |
417 __ Bind(&modulo); | 417 __ Bind(&modulo); |
418 __ SmiUntag(EBX); | 418 __ SmiUntag(EBX); |
419 __ SmiUntag(EAX); | 419 __ SmiUntag(EAX); |
420 __ cdq(); | 420 __ cdq(); |
421 __ idivl(EBX); | 421 __ idivl(EBX); |
422 } | 422 } |
423 | 423 |
424 | 424 |
425 // Implementation: | 425 // Implementation: |
426 // res = left % right; | 426 // res = left % right; |
427 // if (res < 0) { | 427 // if (res < 0) { |
428 // if (right < 0) { | 428 // if (right < 0) { |
429 // res = res - right; | 429 // res = res - right; |
430 // } else { | 430 // } else { |
431 // res = res + right; | 431 // res = res + right; |
432 // } | 432 // } |
433 // } | 433 // } |
434 void Intrinsifier::Integer_moduloFromInteger(Assembler* assembler) { | 434 void Intrinsifier::Integer_moduloFromInteger(Assembler* assembler) { |
435 Label fall_through, subtract; | 435 Label fall_through, subtract; |
436 TestBothArgumentsSmis(assembler, &fall_through); | 436 TestBothArgumentsSmis(assembler, &fall_through); |
437 __ movl(EBX, Address(ESP, + 2 * kWordSize)); | 437 __ movl(EBX, Address(ESP, +2 * kWordSize)); |
438 // EAX: Tagged left (dividend). | 438 // EAX: Tagged left (dividend). |
439 // EBX: Tagged right (divisor). | 439 // EBX: Tagged right (divisor). |
440 // Check if modulo by zero -> exception thrown in main function. | 440 // Check if modulo by zero -> exception thrown in main function. |
441 __ cmpl(EBX, Immediate(0)); | 441 __ cmpl(EBX, Immediate(0)); |
442 __ j(EQUAL, &fall_through, Assembler::kNearJump); | 442 __ j(EQUAL, &fall_through, Assembler::kNearJump); |
443 EmitRemainderOperation(assembler); | 443 EmitRemainderOperation(assembler); |
444 // Untagged remainder result in EDX. | 444 // Untagged remainder result in EDX. |
445 Label done; | 445 Label done; |
446 __ movl(EAX, EDX); | 446 __ movl(EAX, EDX); |
447 __ cmpl(EAX, Immediate(0)); | 447 __ cmpl(EAX, Immediate(0)); |
(...skipping 18 matching lines...) Expand all Loading... |
466 | 466 |
467 | 467 |
468 void Intrinsifier::Integer_truncDivide(Assembler* assembler) { | 468 void Intrinsifier::Integer_truncDivide(Assembler* assembler) { |
469 Label fall_through; | 469 Label fall_through; |
470 TestBothArgumentsSmis(assembler, &fall_through); | 470 TestBothArgumentsSmis(assembler, &fall_through); |
471 // EAX: right argument (divisor) | 471 // EAX: right argument (divisor) |
472 __ cmpl(EAX, Immediate(0)); | 472 __ cmpl(EAX, Immediate(0)); |
473 __ j(EQUAL, &fall_through, Assembler::kNearJump); | 473 __ j(EQUAL, &fall_through, Assembler::kNearJump); |
474 __ movl(EBX, EAX); | 474 __ movl(EBX, EAX); |
475 __ SmiUntag(EBX); | 475 __ SmiUntag(EBX); |
476 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Left argument (dividend). | 476 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument (dividend). |
477 __ SmiUntag(EAX); | 477 __ SmiUntag(EAX); |
478 __ pushl(EDX); // Preserve EDX in case of 'fall_through'. | 478 __ pushl(EDX); // Preserve EDX in case of 'fall_through'. |
479 __ cdq(); | 479 __ cdq(); |
480 __ idivl(EBX); | 480 __ idivl(EBX); |
481 __ popl(EDX); | 481 __ popl(EDX); |
482 // Check the corner case of dividing the 'MIN_SMI' with -1, in which case we | 482 // Check the corner case of dividing the 'MIN_SMI' with -1, in which case we |
483 // cannot tag the result. | 483 // cannot tag the result. |
484 __ cmpl(EAX, Immediate(0x40000000)); | 484 __ cmpl(EAX, Immediate(0x40000000)); |
485 __ j(EQUAL, &fall_through); | 485 __ j(EQUAL, &fall_through); |
486 __ SmiTag(EAX); | 486 __ SmiTag(EAX); |
487 __ ret(); | 487 __ ret(); |
488 __ Bind(&fall_through); | 488 __ Bind(&fall_through); |
489 } | 489 } |
490 | 490 |
491 | 491 |
492 void Intrinsifier::Integer_negate(Assembler* assembler) { | 492 void Intrinsifier::Integer_negate(Assembler* assembler) { |
493 Label fall_through; | 493 Label fall_through; |
494 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 494 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
495 __ testl(EAX, Immediate(kSmiTagMask)); | 495 __ testl(EAX, Immediate(kSmiTagMask)); |
496 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi value. | 496 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi value. |
497 __ negl(EAX); | 497 __ negl(EAX); |
498 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); | 498 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); |
499 // Result is in EAX. | 499 // Result is in EAX. |
500 __ ret(); | 500 __ ret(); |
501 __ Bind(&fall_through); | 501 __ Bind(&fall_through); |
502 } | 502 } |
503 | 503 |
504 | 504 |
505 void Intrinsifier::Integer_bitAndFromInteger(Assembler* assembler) { | 505 void Intrinsifier::Integer_bitAndFromInteger(Assembler* assembler) { |
506 Label fall_through; | 506 Label fall_through; |
507 TestBothArgumentsSmis(assembler, &fall_through); | 507 TestBothArgumentsSmis(assembler, &fall_through); |
508 __ movl(EBX, Address(ESP, + 2 * kWordSize)); | 508 __ movl(EBX, Address(ESP, +2 * kWordSize)); |
509 __ andl(EAX, EBX); | 509 __ andl(EAX, EBX); |
510 // Result is in EAX. | 510 // Result is in EAX. |
511 __ ret(); | 511 __ ret(); |
512 __ Bind(&fall_through); | 512 __ Bind(&fall_through); |
513 } | 513 } |
514 | 514 |
515 | 515 |
516 void Intrinsifier::Integer_bitAnd(Assembler* assembler) { | 516 void Intrinsifier::Integer_bitAnd(Assembler* assembler) { |
517 Integer_bitAndFromInteger(assembler); | 517 Integer_bitAndFromInteger(assembler); |
518 } | 518 } |
519 | 519 |
520 | 520 |
521 void Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) { | 521 void Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) { |
522 Label fall_through; | 522 Label fall_through; |
523 TestBothArgumentsSmis(assembler, &fall_through); | 523 TestBothArgumentsSmis(assembler, &fall_through); |
524 __ movl(EBX, Address(ESP, + 2 * kWordSize)); | 524 __ movl(EBX, Address(ESP, +2 * kWordSize)); |
525 __ orl(EAX, EBX); | 525 __ orl(EAX, EBX); |
526 // Result is in EAX. | 526 // Result is in EAX. |
527 __ ret(); | 527 __ ret(); |
528 __ Bind(&fall_through); | 528 __ Bind(&fall_through); |
529 } | 529 } |
530 | 530 |
531 | 531 |
532 void Intrinsifier::Integer_bitOr(Assembler* assembler) { | 532 void Intrinsifier::Integer_bitOr(Assembler* assembler) { |
533 Integer_bitOrFromInteger(assembler); | 533 Integer_bitOrFromInteger(assembler); |
534 } | 534 } |
535 | 535 |
536 | 536 |
537 void Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) { | 537 void Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) { |
538 Label fall_through; | 538 Label fall_through; |
539 TestBothArgumentsSmis(assembler, &fall_through); | 539 TestBothArgumentsSmis(assembler, &fall_through); |
540 __ movl(EBX, Address(ESP, + 2 * kWordSize)); | 540 __ movl(EBX, Address(ESP, +2 * kWordSize)); |
541 __ xorl(EAX, EBX); | 541 __ xorl(EAX, EBX); |
542 // Result is in EAX. | 542 // Result is in EAX. |
543 __ ret(); | 543 __ ret(); |
544 __ Bind(&fall_through); | 544 __ Bind(&fall_through); |
545 } | 545 } |
546 | 546 |
547 | 547 |
548 void Intrinsifier::Integer_bitXor(Assembler* assembler) { | 548 void Intrinsifier::Integer_bitXor(Assembler* assembler) { |
549 Integer_bitXorFromInteger(assembler); | 549 Integer_bitXorFromInteger(assembler); |
550 } | 550 } |
551 | 551 |
552 | 552 |
553 void Intrinsifier::Integer_shl(Assembler* assembler) { | 553 void Intrinsifier::Integer_shl(Assembler* assembler) { |
554 ASSERT(kSmiTagShift == 1); | 554 ASSERT(kSmiTagShift == 1); |
555 ASSERT(kSmiTag == 0); | 555 ASSERT(kSmiTag == 0); |
556 Label fall_through, overflow; | 556 Label fall_through, overflow; |
557 TestBothArgumentsSmis(assembler, &fall_through); | 557 TestBothArgumentsSmis(assembler, &fall_through); |
558 // Shift value is in EAX. Compare with tagged Smi. | 558 // Shift value is in EAX. Compare with tagged Smi. |
559 __ cmpl(EAX, Immediate(Smi::RawValue(Smi::kBits))); | 559 __ cmpl(EAX, Immediate(Smi::RawValue(Smi::kBits))); |
560 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); | 560 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); |
561 | 561 |
562 __ SmiUntag(EAX); | 562 __ SmiUntag(EAX); |
563 __ movl(ECX, EAX); // Shift amount must be in ECX. | 563 __ movl(ECX, EAX); // Shift amount must be in ECX. |
564 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Value. | 564 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Value. |
565 | 565 |
566 // Overflow test - all the shifted-out bits must be same as the sign bit. | 566 // Overflow test - all the shifted-out bits must be same as the sign bit. |
567 __ movl(EBX, EAX); | 567 __ movl(EBX, EAX); |
568 __ shll(EAX, ECX); | 568 __ shll(EAX, ECX); |
569 __ sarl(EAX, ECX); | 569 __ sarl(EAX, ECX); |
570 __ cmpl(EAX, EBX); | 570 __ cmpl(EAX, EBX); |
571 __ j(NOT_EQUAL, &overflow, Assembler::kNearJump); | 571 __ j(NOT_EQUAL, &overflow, Assembler::kNearJump); |
572 | 572 |
573 __ shll(EAX, ECX); // Shift for result now we know there is no overflow. | 573 __ shll(EAX, ECX); // Shift for result now we know there is no overflow. |
574 | 574 |
575 // EAX is a correctly tagged Smi. | 575 // EAX is a correctly tagged Smi. |
576 __ ret(); | 576 __ ret(); |
577 | 577 |
578 __ Bind(&overflow); | 578 __ Bind(&overflow); |
579 // Arguments are Smi but the shift produced an overflow to Mint. | 579 // Arguments are Smi but the shift produced an overflow to Mint. |
580 __ cmpl(EBX, Immediate(0)); | 580 __ cmpl(EBX, Immediate(0)); |
581 // TODO(srdjan): Implement negative values, for now fall through. | 581 // TODO(srdjan): Implement negative values, for now fall through. |
582 __ j(LESS, &fall_through, Assembler::kNearJump); | 582 __ j(LESS, &fall_through, Assembler::kNearJump); |
583 __ SmiUntag(EBX); | 583 __ SmiUntag(EBX); |
584 __ movl(EAX, EBX); | 584 __ movl(EAX, EBX); |
585 __ shll(EBX, ECX); | 585 __ shll(EBX, ECX); |
586 __ xorl(EDI, EDI); | 586 __ xorl(EDI, EDI); |
587 __ shldl(EDI, EAX, ECX); | 587 __ shldl(EDI, EAX, ECX); |
588 // Result in EDI (high) and EBX (low). | 588 // Result in EDI (high) and EBX (low). |
589 const Class& mint_class = Class::Handle( | 589 const Class& mint_class = |
590 Isolate::Current()->object_store()->mint_class()); | 590 Class::Handle(Isolate::Current()->object_store()->mint_class()); |
591 __ TryAllocate(mint_class, | 591 __ TryAllocate(mint_class, &fall_through, Assembler::kNearJump, |
592 &fall_through, | 592 EAX, // Result register. |
593 Assembler::kNearJump, | |
594 EAX, // Result register. | |
595 ECX); // temp | 593 ECX); // temp |
596 // EBX and EDI are not objects but integer values. | 594 // EBX and EDI are not objects but integer values. |
597 __ movl(FieldAddress(EAX, Mint::value_offset()), EBX); | 595 __ movl(FieldAddress(EAX, Mint::value_offset()), EBX); |
598 __ movl(FieldAddress(EAX, Mint::value_offset() + kWordSize), EDI); | 596 __ movl(FieldAddress(EAX, Mint::value_offset() + kWordSize), EDI); |
599 __ ret(); | 597 __ ret(); |
600 __ Bind(&fall_through); | 598 __ Bind(&fall_through); |
601 } | 599 } |
602 | 600 |
603 | 601 |
604 static void Push64SmiOrMint(Assembler* assembler, | 602 static void Push64SmiOrMint(Assembler* assembler, |
(...skipping 17 matching lines...) Expand all Loading... |
622 __ pushl(FieldAddress(reg, Mint::value_offset() + kWordSize)); | 620 __ pushl(FieldAddress(reg, Mint::value_offset() + kWordSize)); |
623 __ pushl(FieldAddress(reg, Mint::value_offset())); | 621 __ pushl(FieldAddress(reg, Mint::value_offset())); |
624 __ Bind(&done); | 622 __ Bind(&done); |
625 } | 623 } |
626 | 624 |
627 | 625 |
628 static void CompareIntegers(Assembler* assembler, Condition true_condition) { | 626 static void CompareIntegers(Assembler* assembler, Condition true_condition) { |
629 Label try_mint_smi, is_true, is_false, drop_two_fall_through, fall_through; | 627 Label try_mint_smi, is_true, is_false, drop_two_fall_through, fall_through; |
630 TestBothArgumentsSmis(assembler, &try_mint_smi); | 628 TestBothArgumentsSmis(assembler, &try_mint_smi); |
631 // EAX contains the right argument. | 629 // EAX contains the right argument. |
632 __ cmpl(Address(ESP, + 2 * kWordSize), EAX); | 630 __ cmpl(Address(ESP, +2 * kWordSize), EAX); |
633 __ j(true_condition, &is_true, Assembler::kNearJump); | 631 __ j(true_condition, &is_true, Assembler::kNearJump); |
634 __ Bind(&is_false); | 632 __ Bind(&is_false); |
635 __ LoadObject(EAX, Bool::False()); | 633 __ LoadObject(EAX, Bool::False()); |
636 __ ret(); | 634 __ ret(); |
637 __ Bind(&is_true); | 635 __ Bind(&is_true); |
638 __ LoadObject(EAX, Bool::True()); | 636 __ LoadObject(EAX, Bool::True()); |
639 __ ret(); | 637 __ ret(); |
640 | 638 |
641 // 64-bit comparison | 639 // 64-bit comparison |
642 Condition hi_true_cond, hi_false_cond, lo_false_cond; | 640 Condition hi_true_cond, hi_false_cond, lo_false_cond; |
(...skipping 11 matching lines...) Expand all Loading... |
654 lo_false_cond = (true_condition == GREATER) ? BELOW_EQUAL : BELOW; | 652 lo_false_cond = (true_condition == GREATER) ? BELOW_EQUAL : BELOW; |
655 break; | 653 break; |
656 default: | 654 default: |
657 UNREACHABLE(); | 655 UNREACHABLE(); |
658 hi_true_cond = hi_false_cond = lo_false_cond = OVERFLOW; | 656 hi_true_cond = hi_false_cond = lo_false_cond = OVERFLOW; |
659 } | 657 } |
660 __ Bind(&try_mint_smi); | 658 __ Bind(&try_mint_smi); |
661 // Note that EDX and ECX must be preserved in case we fall through to main | 659 // Note that EDX and ECX must be preserved in case we fall through to main |
662 // method. | 660 // method. |
663 // EAX contains the right argument. | 661 // EAX contains the right argument. |
664 __ movl(EBX, Address(ESP, + 2 * kWordSize)); // Left argument. | 662 __ movl(EBX, Address(ESP, +2 * kWordSize)); // Left argument. |
665 // Push left as 64 bit integer. | 663 // Push left as 64 bit integer. |
666 Push64SmiOrMint(assembler, EBX, EDI, &fall_through); | 664 Push64SmiOrMint(assembler, EBX, EDI, &fall_through); |
667 // Push right as 64 bit integer. | 665 // Push right as 64 bit integer. |
668 Push64SmiOrMint(assembler, EAX, EDI, &drop_two_fall_through); | 666 Push64SmiOrMint(assembler, EAX, EDI, &drop_two_fall_through); |
669 __ popl(EBX); // Right.LO. | 667 __ popl(EBX); // Right.LO. |
670 __ popl(ECX); // Right.HI. | 668 __ popl(ECX); // Right.HI. |
671 __ popl(EAX); // Left.LO. | 669 __ popl(EAX); // Left.LO. |
672 __ popl(EDX); // Left.HI. | 670 __ popl(EDX); // Left.HI. |
673 __ cmpl(EDX, ECX); // cmpl left.HI, right.HI. | 671 __ cmpl(EDX, ECX); // cmpl left.HI, right.HI. |
674 __ j(hi_false_cond, &is_false, Assembler::kNearJump); | 672 __ j(hi_false_cond, &is_false, Assembler::kNearJump); |
675 __ j(hi_true_cond, &is_true, Assembler::kNearJump); | 673 __ j(hi_true_cond, &is_true, Assembler::kNearJump); |
676 __ cmpl(EAX, EBX); // cmpl left.LO, right.LO. | 674 __ cmpl(EAX, EBX); // cmpl left.LO, right.LO. |
677 __ j(lo_false_cond, &is_false, Assembler::kNearJump); | 675 __ j(lo_false_cond, &is_false, Assembler::kNearJump); |
678 // Else is true. | 676 // Else is true. |
679 __ jmp(&is_true); | 677 __ jmp(&is_true); |
680 | 678 |
681 __ Bind(&drop_two_fall_through); | 679 __ Bind(&drop_two_fall_through); |
682 __ Drop(2); | 680 __ Drop(2); |
683 __ Bind(&fall_through); | 681 __ Bind(&fall_through); |
684 } | 682 } |
685 | 683 |
686 | 684 |
687 | |
688 void Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) { | 685 void Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) { |
689 CompareIntegers(assembler, LESS); | 686 CompareIntegers(assembler, LESS); |
690 } | 687 } |
691 | 688 |
692 | 689 |
693 void Intrinsifier::Integer_lessThan(Assembler* assembler) { | 690 void Intrinsifier::Integer_lessThan(Assembler* assembler) { |
694 Integer_greaterThanFromInt(assembler); | 691 Integer_greaterThanFromInt(assembler); |
695 } | 692 } |
696 | 693 |
697 | 694 |
(...skipping 10 matching lines...) Expand all Loading... |
708 void Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) { | 705 void Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) { |
709 CompareIntegers(assembler, GREATER_EQUAL); | 706 CompareIntegers(assembler, GREATER_EQUAL); |
710 } | 707 } |
711 | 708 |
712 | 709 |
713 // This is called for Smi, Mint and Bigint receivers. The right argument | 710 // This is called for Smi, Mint and Bigint receivers. The right argument |
714 // can be Smi, Mint, Bigint or double. | 711 // can be Smi, Mint, Bigint or double. |
715 void Intrinsifier::Integer_equalToInteger(Assembler* assembler) { | 712 void Intrinsifier::Integer_equalToInteger(Assembler* assembler) { |
716 Label fall_through, true_label, check_for_mint; | 713 Label fall_through, true_label, check_for_mint; |
717 // For integer receiver '===' check first. | 714 // For integer receiver '===' check first. |
718 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 715 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
719 __ cmpl(EAX, Address(ESP, + 2 * kWordSize)); | 716 __ cmpl(EAX, Address(ESP, +2 * kWordSize)); |
720 __ j(EQUAL, &true_label, Assembler::kNearJump); | 717 __ j(EQUAL, &true_label, Assembler::kNearJump); |
721 __ movl(EBX, Address(ESP, + 2 * kWordSize)); | 718 __ movl(EBX, Address(ESP, +2 * kWordSize)); |
722 __ orl(EAX, EBX); | 719 __ orl(EAX, EBX); |
723 __ testl(EAX, Immediate(kSmiTagMask)); | 720 __ testl(EAX, Immediate(kSmiTagMask)); |
724 __ j(NOT_ZERO, &check_for_mint, Assembler::kNearJump); | 721 __ j(NOT_ZERO, &check_for_mint, Assembler::kNearJump); |
725 // Both arguments are smi, '===' is good enough. | 722 // Both arguments are smi, '===' is good enough. |
726 __ LoadObject(EAX, Bool::False()); | 723 __ LoadObject(EAX, Bool::False()); |
727 __ ret(); | 724 __ ret(); |
728 __ Bind(&true_label); | 725 __ Bind(&true_label); |
729 __ LoadObject(EAX, Bool::True()); | 726 __ LoadObject(EAX, Bool::True()); |
730 __ ret(); | 727 __ ret(); |
731 | 728 |
732 // At least one of the arguments was not Smi. | 729 // At least one of the arguments was not Smi. |
733 Label receiver_not_smi; | 730 Label receiver_not_smi; |
734 __ Bind(&check_for_mint); | 731 __ Bind(&check_for_mint); |
735 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Receiver. | 732 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Receiver. |
736 __ testl(EAX, Immediate(kSmiTagMask)); | 733 __ testl(EAX, Immediate(kSmiTagMask)); |
737 __ j(NOT_ZERO, &receiver_not_smi); | 734 __ j(NOT_ZERO, &receiver_not_smi); |
738 | 735 |
739 // Left (receiver) is Smi, return false if right is not Double. | 736 // Left (receiver) is Smi, return false if right is not Double. |
740 // Note that an instance of Mint or Bigint never contains a value that can be | 737 // Note that an instance of Mint or Bigint never contains a value that can be |
741 // represented by Smi. | 738 // represented by Smi. |
742 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Right argument. | 739 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Right argument. |
743 __ CompareClassId(EAX, kDoubleCid, EDI); | 740 __ CompareClassId(EAX, kDoubleCid, EDI); |
744 __ j(EQUAL, &fall_through); | 741 __ j(EQUAL, &fall_through); |
745 __ LoadObject(EAX, Bool::False()); // Smi == Mint -> false. | 742 __ LoadObject(EAX, Bool::False()); // Smi == Mint -> false. |
746 __ ret(); | 743 __ ret(); |
747 | 744 |
748 __ Bind(&receiver_not_smi); | 745 __ Bind(&receiver_not_smi); |
749 // EAX:: receiver. | 746 // EAX:: receiver. |
750 __ CompareClassId(EAX, kMintCid, EDI); | 747 __ CompareClassId(EAX, kMintCid, EDI); |
751 __ j(NOT_EQUAL, &fall_through); | 748 __ j(NOT_EQUAL, &fall_through); |
752 // Receiver is Mint, return false if right is Smi. | 749 // Receiver is Mint, return false if right is Smi. |
753 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Right argument. | 750 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Right argument. |
754 __ testl(EAX, Immediate(kSmiTagMask)); | 751 __ testl(EAX, Immediate(kSmiTagMask)); |
755 __ j(NOT_ZERO, &fall_through); | 752 __ j(NOT_ZERO, &fall_through); |
756 __ LoadObject(EAX, Bool::False()); | 753 __ LoadObject(EAX, Bool::False()); |
757 __ ret(); | 754 __ ret(); |
758 // TODO(srdjan): Implement Mint == Mint comparison. | 755 // TODO(srdjan): Implement Mint == Mint comparison. |
759 | 756 |
760 __ Bind(&fall_through); | 757 __ Bind(&fall_through); |
761 } | 758 } |
762 | 759 |
763 | 760 |
(...skipping 11 matching lines...) Expand all Loading... |
775 // For shifting right a Smi the result is the same for all numbers | 772 // For shifting right a Smi the result is the same for all numbers |
776 // >= count_limit. | 773 // >= count_limit. |
777 __ SmiUntag(EAX); | 774 __ SmiUntag(EAX); |
778 // Negative counts throw exception. | 775 // Negative counts throw exception. |
779 __ cmpl(EAX, Immediate(0)); | 776 __ cmpl(EAX, Immediate(0)); |
780 __ j(LESS, &fall_through, Assembler::kNearJump); | 777 __ j(LESS, &fall_through, Assembler::kNearJump); |
781 __ cmpl(EAX, count_limit); | 778 __ cmpl(EAX, count_limit); |
782 __ j(LESS_EQUAL, &shift_count_ok, Assembler::kNearJump); | 779 __ j(LESS_EQUAL, &shift_count_ok, Assembler::kNearJump); |
783 __ movl(EAX, count_limit); | 780 __ movl(EAX, count_limit); |
784 __ Bind(&shift_count_ok); | 781 __ Bind(&shift_count_ok); |
785 __ movl(ECX, EAX); // Shift amount must be in ECX. | 782 __ movl(ECX, EAX); // Shift amount must be in ECX. |
786 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Value. | 783 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Value. |
787 __ SmiUntag(EAX); // Value. | 784 __ SmiUntag(EAX); // Value. |
788 __ sarl(EAX, ECX); | 785 __ sarl(EAX, ECX); |
789 __ SmiTag(EAX); | 786 __ SmiTag(EAX); |
790 __ ret(); | 787 __ ret(); |
791 __ Bind(&fall_through); | 788 __ Bind(&fall_through); |
792 } | 789 } |
793 | 790 |
794 | 791 |
795 // Argument is Smi (receiver). | 792 // Argument is Smi (receiver). |
796 void Intrinsifier::Smi_bitNegate(Assembler* assembler) { | 793 void Intrinsifier::Smi_bitNegate(Assembler* assembler) { |
797 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Receiver. | 794 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Receiver. |
798 __ notl(EAX); | 795 __ notl(EAX); |
799 __ andl(EAX, Immediate(~kSmiTagMask)); // Remove inverted smi-tag. | 796 __ andl(EAX, Immediate(~kSmiTagMask)); // Remove inverted smi-tag. |
800 __ ret(); | 797 __ ret(); |
801 } | 798 } |
802 | 799 |
803 | 800 |
804 void Intrinsifier::Smi_bitLength(Assembler* assembler) { | 801 void Intrinsifier::Smi_bitLength(Assembler* assembler) { |
805 ASSERT(kSmiTagShift == 1); | 802 ASSERT(kSmiTagShift == 1); |
806 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // Receiver. | 803 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Receiver. |
807 // XOR with sign bit to complement bits if value is negative. | 804 // XOR with sign bit to complement bits if value is negative. |
808 __ movl(ECX, EAX); | 805 __ movl(ECX, EAX); |
809 __ sarl(ECX, Immediate(31)); // All 0 or all 1. | 806 __ sarl(ECX, Immediate(31)); // All 0 or all 1. |
810 __ xorl(EAX, ECX); | 807 __ xorl(EAX, ECX); |
811 // BSR does not write the destination register if source is zero. Put a 1 in | 808 // BSR does not write the destination register if source is zero. Put a 1 in |
812 // the Smi tag bit to ensure BSR writes to destination register. | 809 // the Smi tag bit to ensure BSR writes to destination register. |
813 __ orl(EAX, Immediate(kSmiTagMask)); | 810 __ orl(EAX, Immediate(kSmiTagMask)); |
814 __ bsrl(EAX, EAX); | 811 __ bsrl(EAX, EAX); |
815 __ SmiTag(EAX); | 812 __ SmiTag(EAX); |
816 __ ret(); | 813 __ ret(); |
(...skipping 26 matching lines...) Expand all Loading... |
843 __ xorl(EAX, EAX); // EAX = 0. | 840 __ xorl(EAX, EAX); // EAX = 0. |
844 __ movl(EDX, FieldAddress(EDI, ESI, TIMES_4, TypedData::data_offset())); | 841 __ movl(EDX, FieldAddress(EDI, ESI, TIMES_4, TypedData::data_offset())); |
845 __ shldl(EAX, EDX, ECX); | 842 __ shldl(EAX, EDX, ECX); |
846 __ movl(Address(EBX, ESI, TIMES_4, Bigint::kBytesPerDigit), EAX); | 843 __ movl(Address(EBX, ESI, TIMES_4, Bigint::kBytesPerDigit), EAX); |
847 Label last; | 844 Label last; |
848 __ cmpl(ESI, Immediate(0)); | 845 __ cmpl(ESI, Immediate(0)); |
849 __ j(EQUAL, &last, Assembler::kNearJump); | 846 __ j(EQUAL, &last, Assembler::kNearJump); |
850 Label loop; | 847 Label loop; |
851 __ Bind(&loop); | 848 __ Bind(&loop); |
852 __ movl(EAX, EDX); | 849 __ movl(EAX, EDX); |
853 __ movl(EDX, | 850 __ movl(EDX, FieldAddress(EDI, ESI, TIMES_4, |
854 FieldAddress(EDI, ESI, TIMES_4, | 851 TypedData::data_offset() - Bigint::kBytesPerDigit)); |
855 TypedData::data_offset() - Bigint::kBytesPerDigit)); | |
856 __ shldl(EAX, EDX, ECX); | 852 __ shldl(EAX, EDX, ECX); |
857 __ movl(Address(EBX, ESI, TIMES_4, 0), EAX); | 853 __ movl(Address(EBX, ESI, TIMES_4, 0), EAX); |
858 __ decl(ESI); | 854 __ decl(ESI); |
859 __ j(NOT_ZERO, &loop, Assembler::kNearJump); | 855 __ j(NOT_ZERO, &loop, Assembler::kNearJump); |
860 __ Bind(&last); | 856 __ Bind(&last); |
861 __ shldl(EDX, ESI, ECX); // ESI == 0. | 857 __ shldl(EDX, ESI, ECX); // ESI == 0. |
862 __ movl(Address(EBX, 0), EDX); | 858 __ movl(Address(EBX, 0), EDX); |
863 | 859 |
864 // Restore THR and return. | 860 // Restore THR and return. |
865 __ popl(THR); | 861 __ popl(THR); |
866 // Returning Object::null() is not required, since this method is private. | 862 // Returning Object::null() is not required, since this method is private. |
867 __ ret(); | 863 __ ret(); |
868 } | 864 } |
869 | 865 |
870 | 866 |
871 void Intrinsifier::Bigint_rsh(Assembler* assembler) { | 867 void Intrinsifier::Bigint_rsh(Assembler* assembler) { |
872 // static void _rsh(Uint32List x_digits, int x_used, int n, | 868 // static void _rsh(Uint32List x_digits, int x_used, int n, |
873 // Uint32List r_digits) | 869 // Uint32List r_digits) |
874 | 870 |
875 // Preserve THR to free ESI. | 871 // Preserve THR to free ESI. |
876 __ pushl(THR); | 872 __ pushl(THR); |
877 ASSERT(THR == ESI); | 873 ASSERT(THR == ESI); |
878 | 874 |
879 __ movl(EDI, Address(ESP, 5 * kWordSize)); // x_digits | 875 __ movl(EDI, Address(ESP, 5 * kWordSize)); // x_digits |
880 __ movl(ECX, Address(ESP, 3 * kWordSize)); // n is Smi | 876 __ movl(ECX, Address(ESP, 3 * kWordSize)); // n is Smi |
881 __ SmiUntag(ECX); | 877 __ SmiUntag(ECX); |
882 __ movl(EBX, Address(ESP, 2 * kWordSize)); // r_digits | 878 __ movl(EBX, Address(ESP, 2 * kWordSize)); // r_digits |
883 __ movl(EDX, ECX); | 879 __ movl(EDX, ECX); |
884 __ sarl(EDX, Immediate(5)); // EDX = n ~/ _DIGIT_BITS. | 880 __ sarl(EDX, Immediate(5)); // EDX = n ~/ _DIGIT_BITS. |
885 __ movl(ESI, Address(ESP, 4 * kWordSize)); // x_used > 0, Smi. | 881 __ movl(ESI, Address(ESP, 4 * kWordSize)); // x_used > 0, Smi. |
886 __ SmiUntag(ESI); | 882 __ SmiUntag(ESI); |
887 __ decl(ESI); | 883 __ decl(ESI); |
888 // EDI = &x_digits[x_used - 1]. | 884 // EDI = &x_digits[x_used - 1]. |
889 __ leal(EDI, FieldAddress(EDI, ESI, TIMES_4, TypedData::data_offset())); | 885 __ leal(EDI, FieldAddress(EDI, ESI, TIMES_4, TypedData::data_offset())); |
890 __ subl(ESI, EDX); | 886 __ subl(ESI, EDX); |
891 // EBX = &r_digits[x_used - 1 - (n ~/ 32)]. | 887 // EBX = &r_digits[x_used - 1 - (n ~/ 32)]. |
892 __ leal(EBX, FieldAddress(EBX, ESI, TIMES_4, TypedData::data_offset())); | 888 __ leal(EBX, FieldAddress(EBX, ESI, TIMES_4, TypedData::data_offset())); |
893 __ negl(ESI); | 889 __ negl(ESI); |
894 __ movl(EDX, Address(EDI, ESI, TIMES_4, 0)); | 890 __ movl(EDX, Address(EDI, ESI, TIMES_4, 0)); |
(...skipping 23 matching lines...) Expand all Loading... |
918 // static void _absAdd(Uint32List digits, int used, | 914 // static void _absAdd(Uint32List digits, int used, |
919 // Uint32List a_digits, int a_used, | 915 // Uint32List a_digits, int a_used, |
920 // Uint32List r_digits) | 916 // Uint32List r_digits) |
921 | 917 |
922 // Preserve THR to free ESI. | 918 // Preserve THR to free ESI. |
923 __ pushl(THR); | 919 __ pushl(THR); |
924 ASSERT(THR == ESI); | 920 ASSERT(THR == ESI); |
925 | 921 |
926 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits | 922 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits |
927 __ movl(EAX, Address(ESP, 5 * kWordSize)); // used is Smi | 923 __ movl(EAX, Address(ESP, 5 * kWordSize)); // used is Smi |
928 __ SmiUntag(EAX); // used > 0. | 924 __ SmiUntag(EAX); // used > 0. |
929 __ movl(ESI, Address(ESP, 4 * kWordSize)); // a_digits | 925 __ movl(ESI, Address(ESP, 4 * kWordSize)); // a_digits |
930 __ movl(ECX, Address(ESP, 3 * kWordSize)); // a_used is Smi | 926 __ movl(ECX, Address(ESP, 3 * kWordSize)); // a_used is Smi |
931 __ SmiUntag(ECX); // a_used > 0. | 927 __ SmiUntag(ECX); // a_used > 0. |
932 __ movl(EBX, Address(ESP, 2 * kWordSize)); // r_digits | 928 __ movl(EBX, Address(ESP, 2 * kWordSize)); // r_digits |
933 | 929 |
934 // Precompute 'used - a_used' now so that carry flag is not lost later. | 930 // Precompute 'used - a_used' now so that carry flag is not lost later. |
935 __ subl(EAX, ECX); | 931 __ subl(EAX, ECX); |
936 __ incl(EAX); // To account for the extra test between loops. | 932 __ incl(EAX); // To account for the extra test between loops. |
937 __ pushl(EAX); | 933 __ pushl(EAX); |
938 | 934 |
939 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0. | 935 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0. |
940 Label add_loop; | 936 Label add_loop; |
941 __ Bind(&add_loop); | 937 __ Bind(&add_loop); |
942 // Loop a_used times, ECX = a_used, ECX > 0. | 938 // Loop a_used times, ECX = a_used, ECX > 0. |
943 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); | 939 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); |
944 __ adcl(EAX, FieldAddress(ESI, EDX, TIMES_4, TypedData::data_offset())); | 940 __ adcl(EAX, FieldAddress(ESI, EDX, TIMES_4, TypedData::data_offset())); |
945 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); | 941 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); |
946 __ incl(EDX); // Does not affect carry flag. | 942 __ incl(EDX); // Does not affect carry flag. |
947 __ decl(ECX); // Does not affect carry flag. | 943 __ decl(ECX); // Does not affect carry flag. |
948 __ j(NOT_ZERO, &add_loop, Assembler::kNearJump); | 944 __ j(NOT_ZERO, &add_loop, Assembler::kNearJump); |
949 | 945 |
950 Label last_carry; | 946 Label last_carry; |
951 __ popl(ECX); | 947 __ popl(ECX); |
952 __ decl(ECX); // Does not affect carry flag. | 948 __ decl(ECX); // Does not affect carry flag. |
953 __ j(ZERO, &last_carry, Assembler::kNearJump); // If used - a_used == 0. | 949 __ j(ZERO, &last_carry, Assembler::kNearJump); // If used - a_used == 0. |
954 | 950 |
955 Label carry_loop; | 951 Label carry_loop; |
956 __ Bind(&carry_loop); | 952 __ Bind(&carry_loop); |
957 // Loop used - a_used times, ECX = used - a_used, ECX > 0. | 953 // Loop used - a_used times, ECX = used - a_used, ECX > 0. |
958 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); | 954 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); |
959 __ adcl(EAX, Immediate(0)); | 955 __ adcl(EAX, Immediate(0)); |
960 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); | 956 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); |
961 __ incl(EDX); // Does not affect carry flag. | 957 __ incl(EDX); // Does not affect carry flag. |
962 __ decl(ECX); // Does not affect carry flag. | 958 __ decl(ECX); // Does not affect carry flag. |
(...skipping 15 matching lines...) Expand all Loading... |
978 // static void _absSub(Uint32List digits, int used, | 974 // static void _absSub(Uint32List digits, int used, |
979 // Uint32List a_digits, int a_used, | 975 // Uint32List a_digits, int a_used, |
980 // Uint32List r_digits) | 976 // Uint32List r_digits) |
981 | 977 |
982 // Preserve THR to free ESI. | 978 // Preserve THR to free ESI. |
983 __ pushl(THR); | 979 __ pushl(THR); |
984 ASSERT(THR == ESI); | 980 ASSERT(THR == ESI); |
985 | 981 |
986 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits | 982 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits |
987 __ movl(EAX, Address(ESP, 5 * kWordSize)); // used is Smi | 983 __ movl(EAX, Address(ESP, 5 * kWordSize)); // used is Smi |
988 __ SmiUntag(EAX); // used > 0. | 984 __ SmiUntag(EAX); // used > 0. |
989 __ movl(ESI, Address(ESP, 4 * kWordSize)); // a_digits | 985 __ movl(ESI, Address(ESP, 4 * kWordSize)); // a_digits |
990 __ movl(ECX, Address(ESP, 3 * kWordSize)); // a_used is Smi | 986 __ movl(ECX, Address(ESP, 3 * kWordSize)); // a_used is Smi |
991 __ SmiUntag(ECX); // a_used > 0. | 987 __ SmiUntag(ECX); // a_used > 0. |
992 __ movl(EBX, Address(ESP, 2 * kWordSize)); // r_digits | 988 __ movl(EBX, Address(ESP, 2 * kWordSize)); // r_digits |
993 | 989 |
994 // Precompute 'used - a_used' now so that carry flag is not lost later. | 990 // Precompute 'used - a_used' now so that carry flag is not lost later. |
995 __ subl(EAX, ECX); | 991 __ subl(EAX, ECX); |
996 __ incl(EAX); // To account for the extra test between loops. | 992 __ incl(EAX); // To account for the extra test between loops. |
997 __ pushl(EAX); | 993 __ pushl(EAX); |
998 | 994 |
999 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0. | 995 __ xorl(EDX, EDX); // EDX = 0, carry flag = 0. |
1000 Label sub_loop; | 996 Label sub_loop; |
1001 __ Bind(&sub_loop); | 997 __ Bind(&sub_loop); |
1002 // Loop a_used times, ECX = a_used, ECX > 0. | 998 // Loop a_used times, ECX = a_used, ECX > 0. |
1003 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); | 999 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); |
1004 __ sbbl(EAX, FieldAddress(ESI, EDX, TIMES_4, TypedData::data_offset())); | 1000 __ sbbl(EAX, FieldAddress(ESI, EDX, TIMES_4, TypedData::data_offset())); |
1005 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); | 1001 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); |
1006 __ incl(EDX); // Does not affect carry flag. | 1002 __ incl(EDX); // Does not affect carry flag. |
1007 __ decl(ECX); // Does not affect carry flag. | 1003 __ decl(ECX); // Does not affect carry flag. |
1008 __ j(NOT_ZERO, &sub_loop, Assembler::kNearJump); | 1004 __ j(NOT_ZERO, &sub_loop, Assembler::kNearJump); |
1009 | 1005 |
1010 Label done; | 1006 Label done; |
1011 __ popl(ECX); | 1007 __ popl(ECX); |
1012 __ decl(ECX); // Does not affect carry flag. | 1008 __ decl(ECX); // Does not affect carry flag. |
1013 __ j(ZERO, &done, Assembler::kNearJump); // If used - a_used == 0. | 1009 __ j(ZERO, &done, Assembler::kNearJump); // If used - a_used == 0. |
1014 | 1010 |
1015 Label carry_loop; | 1011 Label carry_loop; |
1016 __ Bind(&carry_loop); | 1012 __ Bind(&carry_loop); |
1017 // Loop used - a_used times, ECX = used - a_used, ECX > 0. | 1013 // Loop used - a_used times, ECX = used - a_used, ECX > 0. |
1018 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); | 1014 __ movl(EAX, FieldAddress(EDI, EDX, TIMES_4, TypedData::data_offset())); |
1019 __ sbbl(EAX, Immediate(0)); | 1015 __ sbbl(EAX, Immediate(0)); |
1020 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); | 1016 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); |
1021 __ incl(EDX); // Does not affect carry flag. | 1017 __ incl(EDX); // Does not affect carry flag. |
1022 __ decl(ECX); // Does not affect carry flag. | 1018 __ decl(ECX); // Does not affect carry flag. |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1099 // ajp: ESI | 1095 // ajp: ESI |
1100 // c: ECX | 1096 // c: ECX |
1101 // t: EDX:EAX (not live at loop entry) | 1097 // t: EDX:EAX (not live at loop entry) |
1102 // n: ESP[0] | 1098 // n: ESP[0] |
1103 | 1099 |
1104 // uint32_t mi = *mip++ | 1100 // uint32_t mi = *mip++ |
1105 __ movl(EAX, Address(EDI, 0)); | 1101 __ movl(EAX, Address(EDI, 0)); |
1106 __ addl(EDI, Immediate(Bigint::kBytesPerDigit)); | 1102 __ addl(EDI, Immediate(Bigint::kBytesPerDigit)); |
1107 | 1103 |
1108 // uint64_t t = x*mi | 1104 // uint64_t t = x*mi |
1109 __ mull(EBX); // t = EDX:EAX = EAX * EBX | 1105 __ mull(EBX); // t = EDX:EAX = EAX * EBX |
1110 __ addl(EAX, ECX); // t += c | 1106 __ addl(EAX, ECX); // t += c |
1111 __ adcl(EDX, Immediate(0)); | 1107 __ adcl(EDX, Immediate(0)); |
1112 | 1108 |
1113 // uint32_t aj = *ajp; t += aj | 1109 // uint32_t aj = *ajp; t += aj |
1114 __ addl(EAX, Address(ESI, 0)); | 1110 __ addl(EAX, Address(ESI, 0)); |
1115 __ adcl(EDX, Immediate(0)); | 1111 __ adcl(EDX, Immediate(0)); |
1116 | 1112 |
1117 // *ajp++ = low32(t) | 1113 // *ajp++ = low32(t) |
1118 __ movl(Address(ESI, 0), EAX); | 1114 __ movl(Address(ESI, 0), EAX); |
1119 __ addl(ESI, Immediate(Bigint::kBytesPerDigit)); | 1115 __ addl(ESI, Immediate(Bigint::kBytesPerDigit)); |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1209 | 1205 |
1210 // int n = used - i - 1 | 1206 // int n = used - i - 1 |
1211 __ movl(EAX, Address(ESP, 2 * kWordSize)); // used is Smi | 1207 __ movl(EAX, Address(ESP, 2 * kWordSize)); // used is Smi |
1212 __ subl(EAX, Address(ESP, 4 * kWordSize)); // i is Smi | 1208 __ subl(EAX, Address(ESP, 4 * kWordSize)); // i is Smi |
1213 __ SmiUntag(EAX); | 1209 __ SmiUntag(EAX); |
1214 __ decl(EAX); | 1210 __ decl(EAX); |
1215 __ pushl(EAX); // Save n on stack. | 1211 __ pushl(EAX); // Save n on stack. |
1216 | 1212 |
1217 // uint64_t c = high32(t) | 1213 // uint64_t c = high32(t) |
1218 __ pushl(Immediate(0)); // push high32(c) == 0 | 1214 __ pushl(Immediate(0)); // push high32(c) == 0 |
1219 __ pushl(EDX); // push low32(c) == high32(t) | 1215 __ pushl(EDX); // push low32(c) == high32(t) |
1220 | 1216 |
1221 Address n_addr = Address(ESP, 2 * kWordSize); | 1217 Address n_addr = Address(ESP, 2 * kWordSize); |
1222 Address ch_addr = Address(ESP, 1 * kWordSize); | 1218 Address ch_addr = Address(ESP, 1 * kWordSize); |
1223 Address cl_addr = Address(ESP, 0 * kWordSize); | 1219 Address cl_addr = Address(ESP, 0 * kWordSize); |
1224 | 1220 |
1225 Label loop, done; | 1221 Label loop, done; |
1226 __ Bind(&loop); | 1222 __ Bind(&loop); |
1227 // x: EBX | 1223 // x: EBX |
1228 // xip: EDI | 1224 // xip: EDI |
1229 // ajp: ESI | 1225 // ajp: ESI |
1230 // c: ESP[1]:ESP[0] | 1226 // c: ESP[1]:ESP[0] |
1231 // t: ECX:EDX:EAX (not live at loop entry) | 1227 // t: ECX:EDX:EAX (not live at loop entry) |
1232 // n: ESP[2] | 1228 // n: ESP[2] |
1233 | 1229 |
1234 // while (--n >= 0) | 1230 // while (--n >= 0) |
1235 __ decl(Address(ESP, 2 * kWordSize)); // --n | 1231 __ decl(Address(ESP, 2 * kWordSize)); // --n |
1236 __ j(NEGATIVE, &done, Assembler::kNearJump); | 1232 __ j(NEGATIVE, &done, Assembler::kNearJump); |
1237 | 1233 |
1238 // uint32_t xi = *xip++ | 1234 // uint32_t xi = *xip++ |
1239 __ movl(EAX, Address(EDI, 0)); | 1235 __ movl(EAX, Address(EDI, 0)); |
1240 __ addl(EDI, Immediate(Bigint::kBytesPerDigit)); | 1236 __ addl(EDI, Immediate(Bigint::kBytesPerDigit)); |
1241 | 1237 |
1242 // uint96_t t = ECX:EDX:EAX = 2*x*xi + aj + c | 1238 // uint96_t t = ECX:EDX:EAX = 2*x*xi + aj + c |
1243 __ mull(EBX); // EDX:EAX = EAX * EBX | 1239 __ mull(EBX); // EDX:EAX = EAX * EBX |
1244 __ xorl(ECX, ECX); // ECX = 0 | 1240 __ xorl(ECX, ECX); // ECX = 0 |
1245 __ shldl(ECX, EDX, Immediate(1)); | 1241 __ shldl(ECX, EDX, Immediate(1)); |
1246 __ shldl(EDX, EAX, Immediate(1)); | 1242 __ shldl(EDX, EAX, Immediate(1)); |
1247 __ shll(EAX, Immediate(1)); // ECX:EDX:EAX <<= 1 | 1243 __ shll(EAX, Immediate(1)); // ECX:EDX:EAX <<= 1 |
1248 __ addl(EAX, Address(ESI, 0)); // t += aj | 1244 __ addl(EAX, Address(ESI, 0)); // t += aj |
1249 __ adcl(EDX, Immediate(0)); | 1245 __ adcl(EDX, Immediate(0)); |
1250 __ adcl(ECX, Immediate(0)); | 1246 __ adcl(ECX, Immediate(0)); |
1251 __ addl(EAX, cl_addr); // t += low32(c) | 1247 __ addl(EAX, cl_addr); // t += low32(c) |
1252 __ adcl(EDX, ch_addr); // t += high32(c) << 32 | 1248 __ adcl(EDX, ch_addr); // t += high32(c) << 32 |
1253 __ adcl(ECX, Immediate(0)); | 1249 __ adcl(ECX, Immediate(0)); |
1254 | 1250 |
1255 // *ajp++ = low32(t) | 1251 // *ajp++ = low32(t) |
1256 __ movl(Address(ESI, 0), EAX); | 1252 __ movl(Address(ESI, 0), EAX); |
1257 __ addl(ESI, Immediate(Bigint::kBytesPerDigit)); | 1253 __ addl(ESI, Immediate(Bigint::kBytesPerDigit)); |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1324 __ j(EQUAL, &return_qd, Assembler::kNearJump); | 1320 __ j(EQUAL, &return_qd, Assembler::kNearJump); |
1325 | 1321 |
1326 // EAX = dl = dp[-1] | 1322 // EAX = dl = dp[-1] |
1327 __ movl(EAX, Address(EBX, -Bigint::kBytesPerDigit)); | 1323 __ movl(EAX, Address(EBX, -Bigint::kBytesPerDigit)); |
1328 | 1324 |
1329 // EAX = qd = dh:dl / yt = EDX:EAX / ECX | 1325 // EAX = qd = dh:dl / yt = EDX:EAX / ECX |
1330 __ divl(ECX); | 1326 __ divl(ECX); |
1331 | 1327 |
1332 __ Bind(&return_qd); | 1328 __ Bind(&return_qd); |
1333 // args[2] = qd | 1329 // args[2] = qd |
1334 __ movl(FieldAddress(EDI, | 1330 __ movl( |
1335 TypedData::data_offset() + 2*Bigint::kBytesPerDigit), | 1331 FieldAddress(EDI, TypedData::data_offset() + 2 * Bigint::kBytesPerDigit), |
1336 EAX); | 1332 EAX); |
1337 | 1333 |
1338 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. | 1334 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. |
1339 __ ret(); | 1335 __ ret(); |
1340 } | 1336 } |
1341 | 1337 |
1342 | 1338 |
1343 void Intrinsifier::Montgomery_mulMod(Assembler* assembler) { | 1339 void Intrinsifier::Montgomery_mulMod(Assembler* assembler) { |
1344 // Pseudo code: | 1340 // Pseudo code: |
1345 // static int _mulMod(Uint32List args, Uint32List digits, int i) { | 1341 // static int _mulMod(Uint32List args, Uint32List digits, int i) { |
1346 // uint32_t rho = args[_RHO]; // _RHO == 2. | 1342 // uint32_t rho = args[_RHO]; // _RHO == 2. |
1347 // uint32_t d = digits[i >> 1]; // i is Smi. | 1343 // uint32_t d = digits[i >> 1]; // i is Smi. |
1348 // uint64_t t = rho*d; | 1344 // uint64_t t = rho*d; |
1349 // args[_MU] = t mod DIGIT_BASE; // _MU == 4. | 1345 // args[_MU] = t mod DIGIT_BASE; // _MU == 4. |
1350 // return 1; | 1346 // return 1; |
1351 // } | 1347 // } |
1352 | 1348 |
1353 // EDI = args | 1349 // EDI = args |
1354 __ movl(EDI, Address(ESP, 3 * kWordSize)); // args | 1350 __ movl(EDI, Address(ESP, 3 * kWordSize)); // args |
1355 | 1351 |
1356 // ECX = rho = args[2] | 1352 // ECX = rho = args[2] |
1357 __ movl(ECX, | 1353 __ movl(ECX, FieldAddress( |
1358 FieldAddress(EDI, | 1354 EDI, TypedData::data_offset() + 2 * Bigint::kBytesPerDigit)); |
1359 TypedData::data_offset() + 2*Bigint::kBytesPerDigit)); | |
1360 | 1355 |
1361 // EAX = digits[i >> 1] | 1356 // EAX = digits[i >> 1] |
1362 __ movl(EBX, Address(ESP, 2 * kWordSize)); // digits | 1357 __ movl(EBX, Address(ESP, 2 * kWordSize)); // digits |
1363 __ movl(EAX, Address(ESP, 1 * kWordSize)); // i is Smi | 1358 __ movl(EAX, Address(ESP, 1 * kWordSize)); // i is Smi |
1364 __ movl(EAX, FieldAddress(EBX, EAX, TIMES_2, TypedData::data_offset())); | 1359 __ movl(EAX, FieldAddress(EBX, EAX, TIMES_2, TypedData::data_offset())); |
1365 | 1360 |
1366 // EDX:EAX = t = rho*d | 1361 // EDX:EAX = t = rho*d |
1367 __ mull(ECX); | 1362 __ mull(ECX); |
1368 | 1363 |
1369 // args[4] = t mod DIGIT_BASE = low32(t) | 1364 // args[4] = t mod DIGIT_BASE = low32(t) |
1370 __ movl(FieldAddress(EDI, | 1365 __ movl( |
1371 TypedData::data_offset() + 4*Bigint::kBytesPerDigit), | 1366 FieldAddress(EDI, TypedData::data_offset() + 4 * Bigint::kBytesPerDigit), |
1372 EAX); | 1367 EAX); |
1373 | 1368 |
1374 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. | 1369 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. |
1375 __ ret(); | 1370 __ ret(); |
1376 } | 1371 } |
1377 | 1372 |
1378 | 1373 |
1379 // Check if the last argument is a double, jump to label 'is_smi' if smi | 1374 // Check if the last argument is a double, jump to label 'is_smi' if smi |
1380 // (easy to convert to double), otherwise jump to label 'not_double_smi', | 1375 // (easy to convert to double), otherwise jump to label 'not_double_smi', |
1381 // Returns the last argument in EAX. | 1376 // Returns the last argument in EAX. |
1382 static void TestLastArgumentIsDouble(Assembler* assembler, | 1377 static void TestLastArgumentIsDouble(Assembler* assembler, |
1383 Label* is_smi, | 1378 Label* is_smi, |
1384 Label* not_double_smi) { | 1379 Label* not_double_smi) { |
1385 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 1380 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
1386 __ testl(EAX, Immediate(kSmiTagMask)); | 1381 __ testl(EAX, Immediate(kSmiTagMask)); |
1387 __ j(ZERO, is_smi, Assembler::kNearJump); // Jump if Smi. | 1382 __ j(ZERO, is_smi, Assembler::kNearJump); // Jump if Smi. |
1388 __ CompareClassId(EAX, kDoubleCid, EBX); | 1383 __ CompareClassId(EAX, kDoubleCid, EBX); |
1389 __ j(NOT_EQUAL, not_double_smi, Assembler::kNearJump); | 1384 __ j(NOT_EQUAL, not_double_smi, Assembler::kNearJump); |
1390 // Fall through if double. | 1385 // Fall through if double. |
1391 } | 1386 } |
1392 | 1387 |
1393 | 1388 |
1394 // Both arguments on stack, arg0 (left) is a double, arg1 (right) is of unknown | 1389 // Both arguments on stack, arg0 (left) is a double, arg1 (right) is of unknown |
1395 // type. Return true or false object in the register EAX. Any NaN argument | 1390 // type. Return true or false object in the register EAX. Any NaN argument |
1396 // returns false. Any non-double arg1 causes control flow to fall through to the | 1391 // returns false. Any non-double arg1 causes control flow to fall through to the |
1397 // slow case (compiled method body). | 1392 // slow case (compiled method body). |
1398 static void CompareDoubles(Assembler* assembler, Condition true_condition) { | 1393 static void CompareDoubles(Assembler* assembler, Condition true_condition) { |
1399 Label fall_through, is_false, is_true, is_smi, double_op; | 1394 Label fall_through, is_false, is_true, is_smi, double_op; |
1400 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); | 1395 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); |
1401 // Both arguments are double, right operand is in EAX. | 1396 // Both arguments are double, right operand is in EAX. |
1402 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); | 1397 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); |
1403 __ Bind(&double_op); | 1398 __ Bind(&double_op); |
1404 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Left argument. | 1399 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument. |
1405 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); | 1400 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); |
1406 __ comisd(XMM0, XMM1); | 1401 __ comisd(XMM0, XMM1); |
1407 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false; | 1402 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false; |
1408 __ j(true_condition, &is_true, Assembler::kNearJump); | 1403 __ j(true_condition, &is_true, Assembler::kNearJump); |
1409 // Fall through false. | 1404 // Fall through false. |
1410 __ Bind(&is_false); | 1405 __ Bind(&is_false); |
1411 __ LoadObject(EAX, Bool::False()); | 1406 __ LoadObject(EAX, Bool::False()); |
1412 __ ret(); | 1407 __ ret(); |
1413 __ Bind(&is_true); | 1408 __ Bind(&is_true); |
1414 __ LoadObject(EAX, Bool::True()); | 1409 __ LoadObject(EAX, Bool::True()); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1452 | 1447 |
1453 | 1448 |
1454 // Expects left argument to be double (receiver). Right argument is unknown. | 1449 // Expects left argument to be double (receiver). Right argument is unknown. |
1455 // Both arguments are on stack. | 1450 // Both arguments are on stack. |
1456 static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { | 1451 static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { |
1457 Label fall_through, is_smi, double_op; | 1452 Label fall_through, is_smi, double_op; |
1458 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); | 1453 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); |
1459 // Both arguments are double, right operand is in EAX. | 1454 // Both arguments are double, right operand is in EAX. |
1460 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); | 1455 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); |
1461 __ Bind(&double_op); | 1456 __ Bind(&double_op); |
1462 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // Left argument. | 1457 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument. |
1463 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); | 1458 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); |
1464 switch (kind) { | 1459 switch (kind) { |
1465 case Token::kADD: __ addsd(XMM0, XMM1); break; | 1460 case Token::kADD: |
1466 case Token::kSUB: __ subsd(XMM0, XMM1); break; | 1461 __ addsd(XMM0, XMM1); |
1467 case Token::kMUL: __ mulsd(XMM0, XMM1); break; | 1462 break; |
1468 case Token::kDIV: __ divsd(XMM0, XMM1); break; | 1463 case Token::kSUB: |
1469 default: UNREACHABLE(); | 1464 __ subsd(XMM0, XMM1); |
| 1465 break; |
| 1466 case Token::kMUL: |
| 1467 __ mulsd(XMM0, XMM1); |
| 1468 break; |
| 1469 case Token::kDIV: |
| 1470 __ divsd(XMM0, XMM1); |
| 1471 break; |
| 1472 default: |
| 1473 UNREACHABLE(); |
1470 } | 1474 } |
1471 const Class& double_class = Class::Handle( | 1475 const Class& double_class = |
1472 Isolate::Current()->object_store()->double_class()); | 1476 Class::Handle(Isolate::Current()->object_store()->double_class()); |
1473 __ TryAllocate(double_class, | 1477 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump, |
1474 &fall_through, | |
1475 Assembler::kNearJump, | |
1476 EAX, // Result register. | 1478 EAX, // Result register. |
1477 EBX); | 1479 EBX); |
1478 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); | 1480 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); |
1479 __ ret(); | 1481 __ ret(); |
1480 __ Bind(&is_smi); | 1482 __ Bind(&is_smi); |
1481 __ SmiUntag(EAX); | 1483 __ SmiUntag(EAX); |
1482 __ cvtsi2sd(XMM1, EAX); | 1484 __ cvtsi2sd(XMM1, EAX); |
1483 __ jmp(&double_op); | 1485 __ jmp(&double_op); |
1484 __ Bind(&fall_through); | 1486 __ Bind(&fall_through); |
1485 } | 1487 } |
(...skipping 16 matching lines...) Expand all Loading... |
1502 | 1504 |
1503 void Intrinsifier::Double_div(Assembler* assembler) { | 1505 void Intrinsifier::Double_div(Assembler* assembler) { |
1504 DoubleArithmeticOperations(assembler, Token::kDIV); | 1506 DoubleArithmeticOperations(assembler, Token::kDIV); |
1505 } | 1507 } |
1506 | 1508 |
1507 | 1509 |
1508 // Left is double right is integer (Bigint, Mint or Smi) | 1510 // Left is double right is integer (Bigint, Mint or Smi) |
1509 void Intrinsifier::Double_mulFromInteger(Assembler* assembler) { | 1511 void Intrinsifier::Double_mulFromInteger(Assembler* assembler) { |
1510 Label fall_through; | 1512 Label fall_through; |
1511 // Only smis allowed. | 1513 // Only smis allowed. |
1512 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 1514 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
1513 __ testl(EAX, Immediate(kSmiTagMask)); | 1515 __ testl(EAX, Immediate(kSmiTagMask)); |
1514 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); | 1516 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); |
1515 // Is Smi. | 1517 // Is Smi. |
1516 __ SmiUntag(EAX); | 1518 __ SmiUntag(EAX); |
1517 __ cvtsi2sd(XMM1, EAX); | 1519 __ cvtsi2sd(XMM1, EAX); |
1518 __ movl(EAX, Address(ESP, + 2 * kWordSize)); | 1520 __ movl(EAX, Address(ESP, +2 * kWordSize)); |
1519 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); | 1521 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); |
1520 __ mulsd(XMM0, XMM1); | 1522 __ mulsd(XMM0, XMM1); |
1521 const Class& double_class = Class::Handle( | 1523 const Class& double_class = |
1522 Isolate::Current()->object_store()->double_class()); | 1524 Class::Handle(Isolate::Current()->object_store()->double_class()); |
1523 __ TryAllocate(double_class, | 1525 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump, |
1524 &fall_through, | |
1525 Assembler::kNearJump, | |
1526 EAX, // Result register. | 1526 EAX, // Result register. |
1527 EBX); | 1527 EBX); |
1528 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); | 1528 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); |
1529 __ ret(); | 1529 __ ret(); |
1530 __ Bind(&fall_through); | 1530 __ Bind(&fall_through); |
1531 } | 1531 } |
1532 | 1532 |
1533 | 1533 |
1534 void Intrinsifier::DoubleFromInteger(Assembler* assembler) { | 1534 void Intrinsifier::DoubleFromInteger(Assembler* assembler) { |
1535 Label fall_through; | 1535 Label fall_through; |
1536 __ movl(EAX, Address(ESP, +1 * kWordSize)); | 1536 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
1537 __ testl(EAX, Immediate(kSmiTagMask)); | 1537 __ testl(EAX, Immediate(kSmiTagMask)); |
1538 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); | 1538 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); |
1539 // Is Smi. | 1539 // Is Smi. |
1540 __ SmiUntag(EAX); | 1540 __ SmiUntag(EAX); |
1541 __ cvtsi2sd(XMM0, EAX); | 1541 __ cvtsi2sd(XMM0, EAX); |
1542 const Class& double_class = Class::Handle( | 1542 const Class& double_class = |
1543 Isolate::Current()->object_store()->double_class()); | 1543 Class::Handle(Isolate::Current()->object_store()->double_class()); |
1544 __ TryAllocate(double_class, | 1544 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump, |
1545 &fall_through, | |
1546 Assembler::kNearJump, | |
1547 EAX, // Result register. | 1545 EAX, // Result register. |
1548 EBX); | 1546 EBX); |
1549 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); | 1547 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); |
1550 __ ret(); | 1548 __ ret(); |
1551 __ Bind(&fall_through); | 1549 __ Bind(&fall_through); |
1552 } | 1550 } |
1553 | 1551 |
1554 | 1552 |
1555 void Intrinsifier::Double_getIsNaN(Assembler* assembler) { | 1553 void Intrinsifier::Double_getIsNaN(Assembler* assembler) { |
1556 Label is_true; | 1554 Label is_true; |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1630 | 1628 |
1631 | 1629 |
1632 // Argument type is not known | 1630 // Argument type is not known |
1633 void Intrinsifier::MathSqrt(Assembler* assembler) { | 1631 void Intrinsifier::MathSqrt(Assembler* assembler) { |
1634 Label fall_through, is_smi, double_op; | 1632 Label fall_through, is_smi, double_op; |
1635 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); | 1633 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); |
1636 // Argument is double and is in EAX. | 1634 // Argument is double and is in EAX. |
1637 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); | 1635 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); |
1638 __ Bind(&double_op); | 1636 __ Bind(&double_op); |
1639 __ sqrtsd(XMM0, XMM1); | 1637 __ sqrtsd(XMM0, XMM1); |
1640 const Class& double_class = Class::Handle( | 1638 const Class& double_class = |
1641 Isolate::Current()->object_store()->double_class()); | 1639 Class::Handle(Isolate::Current()->object_store()->double_class()); |
1642 __ TryAllocate(double_class, | 1640 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump, |
1643 &fall_through, | |
1644 Assembler::kNearJump, | |
1645 EAX, // Result register. | 1641 EAX, // Result register. |
1646 EBX); | 1642 EBX); |
1647 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); | 1643 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); |
1648 __ ret(); | 1644 __ ret(); |
1649 __ Bind(&is_smi); | 1645 __ Bind(&is_smi); |
1650 __ SmiUntag(EAX); | 1646 __ SmiUntag(EAX); |
1651 __ cvtsi2sd(XMM1, EAX); | 1647 __ cvtsi2sd(XMM1, EAX); |
1652 __ jmp(&double_op); | 1648 __ jmp(&double_op); |
1653 __ Bind(&fall_through); | 1649 __ Bind(&fall_through); |
1654 } | 1650 } |
1655 | 1651 |
1656 | 1652 |
1657 // var state = ((_A * (_state[kSTATE_LO])) + _state[kSTATE_HI]) & _MASK_64; | 1653 // var state = ((_A * (_state[kSTATE_LO])) + _state[kSTATE_HI]) & _MASK_64; |
1658 // _state[kSTATE_LO] = state & _MASK_32; | 1654 // _state[kSTATE_LO] = state & _MASK_32; |
1659 // _state[kSTATE_HI] = state >> 32; | 1655 // _state[kSTATE_HI] = state >> 32; |
1660 void Intrinsifier::Random_nextState(Assembler* assembler) { | 1656 void Intrinsifier::Random_nextState(Assembler* assembler) { |
1661 const Library& math_lib = Library::Handle(Library::MathLibrary()); | 1657 const Library& math_lib = Library::Handle(Library::MathLibrary()); |
1662 ASSERT(!math_lib.IsNull()); | 1658 ASSERT(!math_lib.IsNull()); |
1663 const Class& random_class = Class::Handle( | 1659 const Class& random_class = |
1664 math_lib.LookupClassAllowPrivate(Symbols::_Random())); | 1660 Class::Handle(math_lib.LookupClassAllowPrivate(Symbols::_Random())); |
1665 ASSERT(!random_class.IsNull()); | 1661 ASSERT(!random_class.IsNull()); |
1666 const Field& state_field = Field::ZoneHandle( | 1662 const Field& state_field = Field::ZoneHandle( |
1667 random_class.LookupInstanceFieldAllowPrivate(Symbols::_state())); | 1663 random_class.LookupInstanceFieldAllowPrivate(Symbols::_state())); |
1668 ASSERT(!state_field.IsNull()); | 1664 ASSERT(!state_field.IsNull()); |
1669 const Field& random_A_field = Field::ZoneHandle( | 1665 const Field& random_A_field = Field::ZoneHandle( |
1670 random_class.LookupStaticFieldAllowPrivate(Symbols::_A())); | 1666 random_class.LookupStaticFieldAllowPrivate(Symbols::_A())); |
1671 ASSERT(!random_A_field.IsNull()); | 1667 ASSERT(!random_A_field.IsNull()); |
1672 ASSERT(random_A_field.is_const()); | 1668 ASSERT(random_A_field.is_const()); |
1673 const Instance& a_value = Instance::Handle(random_A_field.StaticValue()); | 1669 const Instance& a_value = Instance::Handle(random_A_field.StaticValue()); |
1674 const int64_t a_int_value = Integer::Cast(a_value).AsInt64Value(); | 1670 const int64_t a_int_value = Integer::Cast(a_value).AsInt64Value(); |
1675 // 'a_int_value' is a mask. | 1671 // 'a_int_value' is a mask. |
1676 ASSERT(Utils::IsUint(32, a_int_value)); | 1672 ASSERT(Utils::IsUint(32, a_int_value)); |
1677 int32_t a_int32_value = static_cast<int32_t>(a_int_value); | 1673 int32_t a_int32_value = static_cast<int32_t>(a_int_value); |
1678 // Receiver. | 1674 // Receiver. |
1679 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 1675 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
1680 // Field '_state'. | 1676 // Field '_state'. |
1681 __ movl(EBX, FieldAddress(EAX, state_field.Offset())); | 1677 __ movl(EBX, FieldAddress(EAX, state_field.Offset())); |
1682 // Addresses of _state[0] and _state[1]. | 1678 // Addresses of _state[0] and _state[1]. |
1683 const intptr_t scale = Instance::ElementSizeFor(kTypedDataUint32ArrayCid); | 1679 const intptr_t scale = Instance::ElementSizeFor(kTypedDataUint32ArrayCid); |
1684 const intptr_t offset = Instance::DataOffsetFor(kTypedDataUint32ArrayCid); | 1680 const intptr_t offset = Instance::DataOffsetFor(kTypedDataUint32ArrayCid); |
1685 Address addr_0 = FieldAddress(EBX, 0 * scale + offset); | 1681 Address addr_0 = FieldAddress(EBX, 0 * scale + offset); |
1686 Address addr_1 = FieldAddress(EBX, 1 * scale + offset); | 1682 Address addr_1 = FieldAddress(EBX, 1 * scale + offset); |
1687 __ movl(EAX, Immediate(a_int32_value)); | 1683 __ movl(EAX, Immediate(a_int32_value)); |
1688 // 64-bit multiply EAX * value -> EDX:EAX. | 1684 // 64-bit multiply EAX * value -> EDX:EAX. |
1689 __ mull(addr_0); | 1685 __ mull(addr_0); |
1690 __ addl(EAX, addr_1); | 1686 __ addl(EAX, addr_1); |
1691 __ adcl(EDX, Immediate(0)); | 1687 __ adcl(EDX, Immediate(0)); |
1692 __ movl(addr_1, EDX); | 1688 __ movl(addr_1, EDX); |
1693 __ movl(addr_0, EAX); | 1689 __ movl(addr_0, EAX); |
1694 __ ret(); | 1690 __ ret(); |
1695 } | 1691 } |
1696 | 1692 |
1697 | 1693 |
1698 // Identity comparison. | 1694 // Identity comparison. |
1699 void Intrinsifier::ObjectEquals(Assembler* assembler) { | 1695 void Intrinsifier::ObjectEquals(Assembler* assembler) { |
1700 Label is_true; | 1696 Label is_true; |
1701 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 1697 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
1702 __ cmpl(EAX, Address(ESP, + 2 * kWordSize)); | 1698 __ cmpl(EAX, Address(ESP, +2 * kWordSize)); |
1703 __ j(EQUAL, &is_true, Assembler::kNearJump); | 1699 __ j(EQUAL, &is_true, Assembler::kNearJump); |
1704 __ LoadObject(EAX, Bool::False()); | 1700 __ LoadObject(EAX, Bool::False()); |
1705 __ ret(); | 1701 __ ret(); |
1706 __ Bind(&is_true); | 1702 __ Bind(&is_true); |
1707 __ LoadObject(EAX, Bool::True()); | 1703 __ LoadObject(EAX, Bool::True()); |
1708 __ ret(); | 1704 __ ret(); |
1709 } | 1705 } |
1710 | 1706 |
1711 | 1707 |
1712 static void RangeCheck(Assembler* assembler, | 1708 static void RangeCheck(Assembler* assembler, |
1713 Register reg, | 1709 Register reg, |
1714 intptr_t low, | 1710 intptr_t low, |
1715 intptr_t high, | 1711 intptr_t high, |
1716 Condition cc, | 1712 Condition cc, |
1717 Label* target) { | 1713 Label* target) { |
1718 __ subl(reg, Immediate(low)); | 1714 __ subl(reg, Immediate(low)); |
1719 __ cmpl(reg, Immediate(high - low)); | 1715 __ cmpl(reg, Immediate(high - low)); |
1720 __ j(cc, target); | 1716 __ j(cc, target); |
1721 } | 1717 } |
1722 | 1718 |
1723 | 1719 |
1724 const Condition kIfNotInRange = ABOVE; | 1720 const Condition kIfNotInRange = ABOVE; |
1725 const Condition kIfInRange = BELOW_EQUAL; | 1721 const Condition kIfInRange = BELOW_EQUAL; |
1726 | 1722 |
1727 | 1723 |
1728 static void JumpIfInteger(Assembler* assembler, | 1724 static void JumpIfInteger(Assembler* assembler, Register cid, Label* target) { |
1729 Register cid, | |
1730 Label* target) { | |
1731 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfInRange, target); | 1725 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfInRange, target); |
1732 } | 1726 } |
1733 | 1727 |
1734 | 1728 |
1735 static void JumpIfNotInteger(Assembler* assembler, | 1729 static void JumpIfNotInteger(Assembler* assembler, |
1736 Register cid, | 1730 Register cid, |
1737 Label* target) { | 1731 Label* target) { |
1738 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfNotInRange, target); | 1732 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfNotInRange, target); |
1739 } | 1733 } |
1740 | 1734 |
1741 | 1735 |
1742 static void JumpIfString(Assembler* assembler, | 1736 static void JumpIfString(Assembler* assembler, Register cid, Label* target) { |
1743 Register cid, | 1737 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid, |
1744 Label* target) { | 1738 kIfInRange, target); |
1745 RangeCheck(assembler, | |
1746 cid, | |
1747 kOneByteStringCid, | |
1748 kExternalTwoByteStringCid, | |
1749 kIfInRange, | |
1750 target); | |
1751 } | 1739 } |
1752 | 1740 |
1753 | 1741 |
1754 static void JumpIfNotString(Assembler* assembler, | 1742 static void JumpIfNotString(Assembler* assembler, Register cid, Label* target) { |
1755 Register cid, | 1743 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid, |
1756 Label* target) { | 1744 kIfNotInRange, target); |
1757 RangeCheck(assembler, | |
1758 cid, | |
1759 kOneByteStringCid, | |
1760 kExternalTwoByteStringCid, | |
1761 kIfNotInRange, | |
1762 target); | |
1763 } | 1745 } |
1764 | 1746 |
1765 | 1747 |
1766 // Return type quickly for simple types (not parameterized and not signature). | 1748 // Return type quickly for simple types (not parameterized and not signature). |
1767 void Intrinsifier::ObjectRuntimeType(Assembler* assembler) { | 1749 void Intrinsifier::ObjectRuntimeType(Assembler* assembler) { |
1768 Label fall_through, use_canonical_type, not_double, not_integer; | 1750 Label fall_through, use_canonical_type, not_double, not_integer; |
1769 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 1751 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
1770 __ LoadClassIdMayBeSmi(EDI, EAX); | 1752 __ LoadClassIdMayBeSmi(EDI, EAX); |
1771 | 1753 |
1772 __ cmpl(EDI, Immediate(kClosureCid)); | 1754 __ cmpl(EDI, Immediate(kClosureCid)); |
1773 __ j(EQUAL, &fall_through); // Instance is a closure. | 1755 __ j(EQUAL, &fall_through); // Instance is a closure. |
1774 | 1756 |
1775 __ cmpl(EDI, Immediate(kNumPredefinedCids)); | 1757 __ cmpl(EDI, Immediate(kNumPredefinedCids)); |
1776 __ j(ABOVE, &use_canonical_type); | 1758 __ j(ABOVE, &use_canonical_type); |
1777 | 1759 |
1778 // If object is a instance of _Double return double type. | 1760 // If object is a instance of _Double return double type. |
1779 __ cmpl(EDI, Immediate(kDoubleCid)); | 1761 __ cmpl(EDI, Immediate(kDoubleCid)); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1816 __ j(EQUAL, &fall_through, Assembler::kNearJump); // Not yet set. | 1798 __ j(EQUAL, &fall_through, Assembler::kNearJump); // Not yet set. |
1817 __ ret(); | 1799 __ ret(); |
1818 | 1800 |
1819 __ Bind(&fall_through); | 1801 __ Bind(&fall_through); |
1820 } | 1802 } |
1821 | 1803 |
1822 | 1804 |
1823 void Intrinsifier::ObjectHaveSameRuntimeType(Assembler* assembler) { | 1805 void Intrinsifier::ObjectHaveSameRuntimeType(Assembler* assembler) { |
1824 Label fall_through, different_cids, equal, not_equal, not_integer; | 1806 Label fall_through, different_cids, equal, not_equal, not_integer; |
1825 | 1807 |
1826 __ movl(EAX, Address(ESP, + 1 * kWordSize)); | 1808 __ movl(EAX, Address(ESP, +1 * kWordSize)); |
1827 __ LoadClassIdMayBeSmi(EDI, EAX); | 1809 __ LoadClassIdMayBeSmi(EDI, EAX); |
1828 | 1810 |
1829 // Check if left hand size is a closure. Closures are handled in the runtime. | 1811 // Check if left hand size is a closure. Closures are handled in the runtime. |
1830 __ cmpl(EDI, Immediate(kClosureCid)); | 1812 __ cmpl(EDI, Immediate(kClosureCid)); |
1831 __ j(EQUAL, &fall_through); | 1813 __ j(EQUAL, &fall_through); |
1832 | 1814 |
1833 __ movl(EAX, Address(ESP, + 2 * kWordSize)); | 1815 __ movl(EAX, Address(ESP, +2 * kWordSize)); |
1834 __ LoadClassIdMayBeSmi(EBX, EAX); | 1816 __ LoadClassIdMayBeSmi(EBX, EAX); |
1835 | 1817 |
1836 // Check whether class ids match. If class ids don't match objects can still | 1818 // Check whether class ids match. If class ids don't match objects can still |
1837 // have the same runtime type (e.g. multiple string implementation classes | 1819 // have the same runtime type (e.g. multiple string implementation classes |
1838 // map to a single String type). | 1820 // map to a single String type). |
1839 __ cmpl(EDI, EBX); | 1821 __ cmpl(EDI, EBX); |
1840 __ j(NOT_EQUAL, &different_cids); | 1822 __ j(NOT_EQUAL, &different_cids); |
1841 | 1823 |
1842 // Objects have the same class and neither is a closure. | 1824 // Objects have the same class and neither is a closure. |
1843 // Check if there are no type arguments. In this case we can return true. | 1825 // Check if there are no type arguments. In this case we can return true. |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1879 __ Bind(¬_equal); | 1861 __ Bind(¬_equal); |
1880 __ LoadObject(EAX, Bool::False()); | 1862 __ LoadObject(EAX, Bool::False()); |
1881 __ ret(); | 1863 __ ret(); |
1882 | 1864 |
1883 __ Bind(&fall_through); | 1865 __ Bind(&fall_through); |
1884 } | 1866 } |
1885 | 1867 |
1886 | 1868 |
1887 void Intrinsifier::String_getHashCode(Assembler* assembler) { | 1869 void Intrinsifier::String_getHashCode(Assembler* assembler) { |
1888 Label fall_through; | 1870 Label fall_through; |
1889 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // String object. | 1871 __ movl(EAX, Address(ESP, +1 * kWordSize)); // String object. |
1890 __ movl(EAX, FieldAddress(EAX, String::hash_offset())); | 1872 __ movl(EAX, FieldAddress(EAX, String::hash_offset())); |
1891 __ cmpl(EAX, Immediate(0)); | 1873 __ cmpl(EAX, Immediate(0)); |
1892 __ j(EQUAL, &fall_through, Assembler::kNearJump); | 1874 __ j(EQUAL, &fall_through, Assembler::kNearJump); |
1893 __ ret(); | 1875 __ ret(); |
1894 __ Bind(&fall_through); | 1876 __ Bind(&fall_through); |
1895 // Hash not yet computed. | 1877 // Hash not yet computed. |
1896 } | 1878 } |
1897 | 1879 |
1898 | 1880 |
1899 // bool _substringMatches(int start, String other) | 1881 // bool _substringMatches(int start, String other) |
1900 void Intrinsifier::StringBaseSubstringMatches(Assembler* assembler) { | 1882 void Intrinsifier::StringBaseSubstringMatches(Assembler* assembler) { |
1901 // For precompilation, not implemented on IA32. | 1883 // For precompilation, not implemented on IA32. |
1902 } | 1884 } |
1903 | 1885 |
1904 | 1886 |
1905 void Intrinsifier::StringBaseCharAt(Assembler* assembler) { | 1887 void Intrinsifier::StringBaseCharAt(Assembler* assembler) { |
1906 Label fall_through, try_two_byte_string; | 1888 Label fall_through, try_two_byte_string; |
1907 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Index. | 1889 __ movl(EBX, Address(ESP, +1 * kWordSize)); // Index. |
1908 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // String. | 1890 __ movl(EAX, Address(ESP, +2 * kWordSize)); // String. |
1909 __ testl(EBX, Immediate(kSmiTagMask)); | 1891 __ testl(EBX, Immediate(kSmiTagMask)); |
1910 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index. | 1892 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index. |
1911 // Range check. | 1893 // Range check. |
1912 __ cmpl(EBX, FieldAddress(EAX, String::length_offset())); | 1894 __ cmpl(EBX, FieldAddress(EAX, String::length_offset())); |
1913 // Runtime throws exception. | 1895 // Runtime throws exception. |
1914 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); | 1896 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); |
1915 __ CompareClassId(EAX, kOneByteStringCid, EDI); | 1897 __ CompareClassId(EAX, kOneByteStringCid, EDI); |
1916 __ j(NOT_EQUAL, &try_two_byte_string, Assembler::kNearJump); | 1898 __ j(NOT_EQUAL, &try_two_byte_string, Assembler::kNearJump); |
1917 __ SmiUntag(EBX); | 1899 __ SmiUntag(EBX); |
1918 __ movzxb(EBX, FieldAddress(EAX, EBX, TIMES_1, OneByteString::data_offset())); | 1900 __ movzxb(EBX, FieldAddress(EAX, EBX, TIMES_1, OneByteString::data_offset())); |
1919 __ cmpl(EBX, Immediate(Symbols::kNumberOfOneCharCodeSymbols)); | 1901 __ cmpl(EBX, Immediate(Symbols::kNumberOfOneCharCodeSymbols)); |
1920 __ j(GREATER_EQUAL, &fall_through); | 1902 __ j(GREATER_EQUAL, &fall_through); |
1921 __ movl(EAX, | 1903 __ movl(EAX, |
1922 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress()))); | 1904 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress()))); |
1923 __ movl(EAX, Address(EAX, | 1905 __ movl(EAX, Address(EAX, EBX, TIMES_4, |
1924 EBX, | |
1925 TIMES_4, | |
1926 Symbols::kNullCharCodeSymbolOffset * kWordSize)); | 1906 Symbols::kNullCharCodeSymbolOffset * kWordSize)); |
1927 __ ret(); | 1907 __ ret(); |
1928 | 1908 |
1929 __ Bind(&try_two_byte_string); | 1909 __ Bind(&try_two_byte_string); |
1930 __ CompareClassId(EAX, kTwoByteStringCid, EDI); | 1910 __ CompareClassId(EAX, kTwoByteStringCid, EDI); |
1931 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); | 1911 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); |
1932 ASSERT(kSmiTagShift == 1); | 1912 ASSERT(kSmiTagShift == 1); |
1933 __ movzxw(EBX, FieldAddress(EAX, EBX, TIMES_1, TwoByteString::data_offset())); | 1913 __ movzxw(EBX, FieldAddress(EAX, EBX, TIMES_1, TwoByteString::data_offset())); |
1934 __ cmpl(EBX, Immediate(Symbols::kNumberOfOneCharCodeSymbols)); | 1914 __ cmpl(EBX, Immediate(Symbols::kNumberOfOneCharCodeSymbols)); |
1935 __ j(GREATER_EQUAL, &fall_through); | 1915 __ j(GREATER_EQUAL, &fall_through); |
1936 __ movl(EAX, | 1916 __ movl(EAX, |
1937 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress()))); | 1917 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress()))); |
1938 __ movl(EAX, Address(EAX, | 1918 __ movl(EAX, Address(EAX, EBX, TIMES_4, |
1939 EBX, | |
1940 TIMES_4, | |
1941 Symbols::kNullCharCodeSymbolOffset * kWordSize)); | 1919 Symbols::kNullCharCodeSymbolOffset * kWordSize)); |
1942 __ ret(); | 1920 __ ret(); |
1943 | 1921 |
1944 __ Bind(&fall_through); | 1922 __ Bind(&fall_through); |
1945 } | 1923 } |
1946 | 1924 |
1947 | 1925 |
1948 void Intrinsifier::StringBaseIsEmpty(Assembler* assembler) { | 1926 void Intrinsifier::StringBaseIsEmpty(Assembler* assembler) { |
1949 Label is_true; | 1927 Label is_true; |
1950 // Get length. | 1928 // Get length. |
1951 __ movl(EAX, Address(ESP, + 1 * kWordSize)); // String object. | 1929 __ movl(EAX, Address(ESP, +1 * kWordSize)); // String object. |
1952 __ movl(EAX, FieldAddress(EAX, String::length_offset())); | 1930 __ movl(EAX, FieldAddress(EAX, String::length_offset())); |
1953 __ cmpl(EAX, Immediate(Smi::RawValue(0))); | 1931 __ cmpl(EAX, Immediate(Smi::RawValue(0))); |
1954 __ j(EQUAL, &is_true, Assembler::kNearJump); | 1932 __ j(EQUAL, &is_true, Assembler::kNearJump); |
1955 __ LoadObject(EAX, Bool::False()); | 1933 __ LoadObject(EAX, Bool::False()); |
1956 __ ret(); | 1934 __ ret(); |
1957 __ Bind(&is_true); | 1935 __ Bind(&is_true); |
1958 __ LoadObject(EAX, Bool::True()); | 1936 __ LoadObject(EAX, Bool::True()); |
1959 __ ret(); | 1937 __ ret(); |
1960 } | 1938 } |
1961 | 1939 |
1962 | 1940 |
1963 void Intrinsifier::OneByteString_getHashCode(Assembler* assembler) { | 1941 void Intrinsifier::OneByteString_getHashCode(Assembler* assembler) { |
1964 Label compute_hash; | 1942 Label compute_hash; |
1965 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // OneByteString object. | 1943 __ movl(EBX, Address(ESP, +1 * kWordSize)); // OneByteString object. |
1966 __ movl(EAX, FieldAddress(EBX, String::hash_offset())); | 1944 __ movl(EAX, FieldAddress(EBX, String::hash_offset())); |
1967 __ cmpl(EAX, Immediate(0)); | 1945 __ cmpl(EAX, Immediate(0)); |
1968 __ j(EQUAL, &compute_hash, Assembler::kNearJump); | 1946 __ j(EQUAL, &compute_hash, Assembler::kNearJump); |
1969 __ ret(); | 1947 __ ret(); |
1970 | 1948 |
1971 __ Bind(&compute_hash); | 1949 __ Bind(&compute_hash); |
1972 // Hash not yet computed, use algorithm of class StringHasher. | 1950 // Hash not yet computed, use algorithm of class StringHasher. |
1973 __ movl(ECX, FieldAddress(EBX, String::length_offset())); | 1951 __ movl(ECX, FieldAddress(EBX, String::length_offset())); |
1974 __ SmiUntag(ECX); | 1952 __ SmiUntag(ECX); |
1975 __ xorl(EAX, EAX); | 1953 __ xorl(EAX, EAX); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2009 __ shll(EDX, Immediate(3)); | 1987 __ shll(EDX, Immediate(3)); |
2010 __ addl(EAX, EDX); | 1988 __ addl(EAX, EDX); |
2011 __ movl(EDX, EAX); | 1989 __ movl(EDX, EAX); |
2012 __ shrl(EDX, Immediate(11)); | 1990 __ shrl(EDX, Immediate(11)); |
2013 __ xorl(EAX, EDX); | 1991 __ xorl(EAX, EDX); |
2014 __ movl(EDX, EAX); | 1992 __ movl(EDX, EAX); |
2015 __ shll(EDX, Immediate(15)); | 1993 __ shll(EDX, Immediate(15)); |
2016 __ addl(EAX, EDX); | 1994 __ addl(EAX, EDX); |
2017 // hash_ = hash_ & ((static_cast<intptr_t>(1) << bits) - 1); | 1995 // hash_ = hash_ & ((static_cast<intptr_t>(1) << bits) - 1); |
2018 __ andl(EAX, | 1996 __ andl(EAX, |
2019 Immediate(((static_cast<intptr_t>(1) << String::kHashBits) - 1))); | 1997 Immediate(((static_cast<intptr_t>(1) << String::kHashBits) - 1))); |
2020 | 1998 |
2021 // return hash_ == 0 ? 1 : hash_; | 1999 // return hash_ == 0 ? 1 : hash_; |
2022 __ cmpl(EAX, Immediate(0)); | 2000 __ cmpl(EAX, Immediate(0)); |
2023 __ j(NOT_EQUAL, &set_hash_code, Assembler::kNearJump); | 2001 __ j(NOT_EQUAL, &set_hash_code, Assembler::kNearJump); |
2024 __ incl(EAX); | 2002 __ incl(EAX); |
2025 __ Bind(&set_hash_code); | 2003 __ Bind(&set_hash_code); |
2026 __ SmiTag(EAX); | 2004 __ SmiTag(EAX); |
2027 __ StoreIntoSmiField(FieldAddress(EBX, String::hash_offset()), EAX); | 2005 __ StoreIntoSmiField(FieldAddress(EBX, String::hash_offset()), EAX); |
2028 __ ret(); | 2006 __ ret(); |
2029 } | 2007 } |
2030 | 2008 |
2031 | 2009 |
2032 // Allocates one-byte string of length 'end - start'. The content is not | 2010 // Allocates one-byte string of length 'end - start'. The content is not |
2033 // initialized. 'length-reg' contains tagged length. | 2011 // initialized. 'length-reg' contains tagged length. |
2034 // Returns new string as tagged pointer in EAX. | 2012 // Returns new string as tagged pointer in EAX. |
2035 static void TryAllocateOnebyteString(Assembler* assembler, | 2013 static void TryAllocateOnebyteString(Assembler* assembler, |
2036 Label* ok, | 2014 Label* ok, |
2037 Label* failure, | 2015 Label* failure, |
2038 Register length_reg) { | 2016 Register length_reg) { |
2039 NOT_IN_PRODUCT( | 2017 NOT_IN_PRODUCT( |
2040 __ MaybeTraceAllocation(kOneByteStringCid, EAX, failure, false)); | 2018 __ MaybeTraceAllocation(kOneByteStringCid, EAX, failure, false)); |
2041 if (length_reg != EDI) { | 2019 if (length_reg != EDI) { |
2042 __ movl(EDI, length_reg); | 2020 __ movl(EDI, length_reg); |
2043 } | 2021 } |
2044 Label pop_and_fail; | 2022 Label pop_and_fail; |
2045 __ pushl(EDI); // Preserve length. | 2023 __ pushl(EDI); // Preserve length. |
2046 __ SmiUntag(EDI); | 2024 __ SmiUntag(EDI); |
2047 const intptr_t fixed_size = sizeof(RawString) + kObjectAlignment - 1; | 2025 const intptr_t fixed_size = sizeof(RawString) + kObjectAlignment - 1; |
2048 __ leal(EDI, Address(EDI, TIMES_1, fixed_size)); // EDI is untagged. | 2026 __ leal(EDI, Address(EDI, TIMES_1, fixed_size)); // EDI is untagged. |
2049 __ andl(EDI, Immediate(-kObjectAlignment)); | 2027 __ andl(EDI, Immediate(-kObjectAlignment)); |
2050 | 2028 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2088 __ xorl(EDI, EDI); | 2066 __ xorl(EDI, EDI); |
2089 __ Bind(&done); | 2067 __ Bind(&done); |
2090 | 2068 |
2091 // Get the class index and insert it into the tags. | 2069 // Get the class index and insert it into the tags. |
2092 __ orl(EDI, Immediate(RawObject::ClassIdTag::encode(cid))); | 2070 __ orl(EDI, Immediate(RawObject::ClassIdTag::encode(cid))); |
2093 __ movl(FieldAddress(EAX, String::tags_offset()), EDI); // Tags. | 2071 __ movl(FieldAddress(EAX, String::tags_offset()), EDI); // Tags. |
2094 } | 2072 } |
2095 | 2073 |
2096 // Set the length field. | 2074 // Set the length field. |
2097 __ popl(EDI); | 2075 __ popl(EDI); |
2098 __ StoreIntoObjectNoBarrier(EAX, | 2076 __ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, String::length_offset()), |
2099 FieldAddress(EAX, String::length_offset()), | |
2100 EDI); | 2077 EDI); |
2101 // Clear hash. | 2078 // Clear hash. |
2102 __ ZeroInitSmiField(FieldAddress(EAX, String::hash_offset())); | 2079 __ ZeroInitSmiField(FieldAddress(EAX, String::hash_offset())); |
2103 __ jmp(ok, Assembler::kNearJump); | 2080 __ jmp(ok, Assembler::kNearJump); |
2104 | 2081 |
2105 __ Bind(&pop_and_fail); | 2082 __ Bind(&pop_and_fail); |
2106 __ popl(EDI); | 2083 __ popl(EDI); |
2107 __ jmp(failure); | 2084 __ jmp(failure); |
2108 } | 2085 } |
2109 | 2086 |
2110 | 2087 |
2111 // Arg0: OneByteString (receiver) | 2088 // Arg0: OneByteString (receiver) |
2112 // Arg1: Start index as Smi. | 2089 // Arg1: Start index as Smi. |
2113 // Arg2: End index as Smi. | 2090 // Arg2: End index as Smi. |
2114 // The indexes must be valid. | 2091 // The indexes must be valid. |
2115 void Intrinsifier::OneByteString_substringUnchecked(Assembler* assembler) { | 2092 void Intrinsifier::OneByteString_substringUnchecked(Assembler* assembler) { |
2116 const intptr_t kStringOffset = 3 * kWordSize; | 2093 const intptr_t kStringOffset = 3 * kWordSize; |
2117 const intptr_t kStartIndexOffset = 2 * kWordSize; | 2094 const intptr_t kStartIndexOffset = 2 * kWordSize; |
2118 const intptr_t kEndIndexOffset = 1 * kWordSize; | 2095 const intptr_t kEndIndexOffset = 1 * kWordSize; |
2119 Label fall_through, ok; | 2096 Label fall_through, ok; |
2120 __ movl(EAX, Address(ESP, + kStartIndexOffset)); | 2097 __ movl(EAX, Address(ESP, +kStartIndexOffset)); |
2121 __ movl(EDI, Address(ESP, + kEndIndexOffset)); | 2098 __ movl(EDI, Address(ESP, +kEndIndexOffset)); |
2122 __ orl(EAX, EDI); | 2099 __ orl(EAX, EDI); |
2123 __ testl(EAX, Immediate(kSmiTagMask)); | 2100 __ testl(EAX, Immediate(kSmiTagMask)); |
2124 __ j(NOT_ZERO, &fall_through); // 'start', 'end' not Smi. | 2101 __ j(NOT_ZERO, &fall_through); // 'start', 'end' not Smi. |
2125 | 2102 |
2126 __ subl(EDI, Address(ESP, + kStartIndexOffset)); | 2103 __ subl(EDI, Address(ESP, +kStartIndexOffset)); |
2127 TryAllocateOnebyteString(assembler, &ok, &fall_through, EDI); | 2104 TryAllocateOnebyteString(assembler, &ok, &fall_through, EDI); |
2128 __ Bind(&ok); | 2105 __ Bind(&ok); |
2129 // EAX: new string as tagged pointer. | 2106 // EAX: new string as tagged pointer. |
2130 // Copy string. | 2107 // Copy string. |
2131 __ movl(EDI, Address(ESP, + kStringOffset)); | 2108 __ movl(EDI, Address(ESP, +kStringOffset)); |
2132 __ movl(EBX, Address(ESP, + kStartIndexOffset)); | 2109 __ movl(EBX, Address(ESP, +kStartIndexOffset)); |
2133 __ SmiUntag(EBX); | 2110 __ SmiUntag(EBX); |
2134 __ leal(EDI, FieldAddress(EDI, EBX, TIMES_1, OneByteString::data_offset())); | 2111 __ leal(EDI, FieldAddress(EDI, EBX, TIMES_1, OneByteString::data_offset())); |
2135 // EDI: Start address to copy from (untagged). | 2112 // EDI: Start address to copy from (untagged). |
2136 // EBX: Untagged start index. | 2113 // EBX: Untagged start index. |
2137 __ movl(ECX, Address(ESP, + kEndIndexOffset)); | 2114 __ movl(ECX, Address(ESP, +kEndIndexOffset)); |
2138 __ SmiUntag(ECX); | 2115 __ SmiUntag(ECX); |
2139 __ subl(ECX, EBX); | 2116 __ subl(ECX, EBX); |
2140 __ xorl(EDX, EDX); | 2117 __ xorl(EDX, EDX); |
2141 // EDI: Start address to copy from (untagged). | 2118 // EDI: Start address to copy from (untagged). |
2142 // ECX: Untagged number of bytes to copy. | 2119 // ECX: Untagged number of bytes to copy. |
2143 // EAX: Tagged result string. | 2120 // EAX: Tagged result string. |
2144 // EDX: Loop counter. | 2121 // EDX: Loop counter. |
2145 // EBX: Scratch register. | 2122 // EBX: Scratch register. |
2146 Label loop, check; | 2123 Label loop, check; |
2147 __ jmp(&check, Assembler::kNearJump); | 2124 __ jmp(&check, Assembler::kNearJump); |
2148 __ Bind(&loop); | 2125 __ Bind(&loop); |
2149 __ movzxb(EBX, Address(EDI, EDX, TIMES_1, 0)); | 2126 __ movzxb(EBX, Address(EDI, EDX, TIMES_1, 0)); |
2150 __ movb(FieldAddress(EAX, EDX, TIMES_1, OneByteString::data_offset()), BL); | 2127 __ movb(FieldAddress(EAX, EDX, TIMES_1, OneByteString::data_offset()), BL); |
2151 __ incl(EDX); | 2128 __ incl(EDX); |
2152 __ Bind(&check); | 2129 __ Bind(&check); |
2153 __ cmpl(EDX, ECX); | 2130 __ cmpl(EDX, ECX); |
2154 __ j(LESS, &loop, Assembler::kNearJump); | 2131 __ j(LESS, &loop, Assembler::kNearJump); |
2155 __ ret(); | 2132 __ ret(); |
2156 __ Bind(&fall_through); | 2133 __ Bind(&fall_through); |
2157 } | 2134 } |
2158 | 2135 |
2159 | 2136 |
2160 void Intrinsifier::OneByteStringSetAt(Assembler* assembler) { | 2137 void Intrinsifier::OneByteStringSetAt(Assembler* assembler) { |
2161 __ movl(ECX, Address(ESP, + 1 * kWordSize)); // Value. | 2138 __ movl(ECX, Address(ESP, +1 * kWordSize)); // Value. |
2162 __ movl(EBX, Address(ESP, + 2 * kWordSize)); // Index. | 2139 __ movl(EBX, Address(ESP, +2 * kWordSize)); // Index. |
2163 __ movl(EAX, Address(ESP, + 3 * kWordSize)); // OneByteString. | 2140 __ movl(EAX, Address(ESP, +3 * kWordSize)); // OneByteString. |
2164 __ SmiUntag(EBX); | 2141 __ SmiUntag(EBX); |
2165 __ SmiUntag(ECX); | 2142 __ SmiUntag(ECX); |
2166 __ movb(FieldAddress(EAX, EBX, TIMES_1, OneByteString::data_offset()), CL); | 2143 __ movb(FieldAddress(EAX, EBX, TIMES_1, OneByteString::data_offset()), CL); |
2167 __ ret(); | 2144 __ ret(); |
2168 } | 2145 } |
2169 | 2146 |
2170 | 2147 |
2171 void Intrinsifier::OneByteString_allocate(Assembler* assembler) { | 2148 void Intrinsifier::OneByteString_allocate(Assembler* assembler) { |
2172 __ movl(EDI, Address(ESP, + 1 * kWordSize)); // Length. | 2149 __ movl(EDI, Address(ESP, +1 * kWordSize)); // Length. |
2173 Label fall_through, ok; | 2150 Label fall_through, ok; |
2174 TryAllocateOnebyteString(assembler, &ok, &fall_through, EDI); | 2151 TryAllocateOnebyteString(assembler, &ok, &fall_through, EDI); |
2175 // EDI: Start address to copy from (untagged). | 2152 // EDI: Start address to copy from (untagged). |
2176 | 2153 |
2177 __ Bind(&ok); | 2154 __ Bind(&ok); |
2178 __ ret(); | 2155 __ ret(); |
2179 | 2156 |
2180 __ Bind(&fall_through); | 2157 __ Bind(&fall_through); |
2181 } | 2158 } |
2182 | 2159 |
2183 | 2160 |
2184 // TODO(srdjan): Add combinations (one-byte/two-byte/external strings). | 2161 // TODO(srdjan): Add combinations (one-byte/two-byte/external strings). |
2185 static void StringEquality(Assembler* assembler, intptr_t string_cid) { | 2162 static void StringEquality(Assembler* assembler, intptr_t string_cid) { |
2186 Label fall_through, is_true, is_false, loop; | 2163 Label fall_through, is_true, is_false, loop; |
2187 __ movl(EAX, Address(ESP, + 2 * kWordSize)); // This. | 2164 __ movl(EAX, Address(ESP, +2 * kWordSize)); // This. |
2188 __ movl(EBX, Address(ESP, + 1 * kWordSize)); // Other. | 2165 __ movl(EBX, Address(ESP, +1 * kWordSize)); // Other. |
2189 | 2166 |
2190 // Are identical? | 2167 // Are identical? |
2191 __ cmpl(EAX, EBX); | 2168 __ cmpl(EAX, EBX); |
2192 __ j(EQUAL, &is_true, Assembler::kNearJump); | 2169 __ j(EQUAL, &is_true, Assembler::kNearJump); |
2193 | 2170 |
2194 // Is other OneByteString? | 2171 // Is other OneByteString? |
2195 __ testl(EBX, Immediate(kSmiTagMask)); | 2172 __ testl(EBX, Immediate(kSmiTagMask)); |
2196 __ j(ZERO, &is_false); // Smi | 2173 __ j(ZERO, &is_false); // Smi |
2197 __ CompareClassId(EBX, string_cid, EDI); | 2174 __ CompareClassId(EBX, string_cid, EDI); |
2198 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); | 2175 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); |
2199 | 2176 |
2200 // Have same length? | 2177 // Have same length? |
2201 __ movl(EDI, FieldAddress(EAX, String::length_offset())); | 2178 __ movl(EDI, FieldAddress(EAX, String::length_offset())); |
2202 __ cmpl(EDI, FieldAddress(EBX, String::length_offset())); | 2179 __ cmpl(EDI, FieldAddress(EBX, String::length_offset())); |
2203 __ j(NOT_EQUAL, &is_false, Assembler::kNearJump); | 2180 __ j(NOT_EQUAL, &is_false, Assembler::kNearJump); |
2204 | 2181 |
2205 // Check contents, no fall-through possible. | 2182 // Check contents, no fall-through possible. |
2206 // TODO(srdjan): write a faster check. | 2183 // TODO(srdjan): write a faster check. |
2207 __ SmiUntag(EDI); | 2184 __ SmiUntag(EDI); |
2208 __ Bind(&loop); | 2185 __ Bind(&loop); |
2209 __ decl(EDI); | 2186 __ decl(EDI); |
2210 __ cmpl(EDI, Immediate(0)); | 2187 __ cmpl(EDI, Immediate(0)); |
2211 __ j(LESS, &is_true, Assembler::kNearJump); | 2188 __ j(LESS, &is_true, Assembler::kNearJump); |
2212 if (string_cid == kOneByteStringCid) { | 2189 if (string_cid == kOneByteStringCid) { |
2213 __ movzxb(ECX, | 2190 __ movzxb(ECX, |
2214 FieldAddress(EAX, EDI, TIMES_1, OneByteString::data_offset())); | 2191 FieldAddress(EAX, EDI, TIMES_1, OneByteString::data_offset())); |
2215 __ movzxb(EDX, | 2192 __ movzxb(EDX, |
2216 FieldAddress(EBX, EDI, TIMES_1, OneByteString::data_offset())); | 2193 FieldAddress(EBX, EDI, TIMES_1, OneByteString::data_offset())); |
2217 } else if (string_cid == kTwoByteStringCid) { | 2194 } else if (string_cid == kTwoByteStringCid) { |
2218 __ movzxw(ECX, | 2195 __ movzxw(ECX, |
2219 FieldAddress(EAX, EDI, TIMES_2, TwoByteString::data_offset())); | 2196 FieldAddress(EAX, EDI, TIMES_2, TwoByteString::data_offset())); |
2220 __ movzxw(EDX, | 2197 __ movzxw(EDX, |
2221 FieldAddress(EBX, EDI, TIMES_2, TwoByteString::data_offset())); | 2198 FieldAddress(EBX, EDI, TIMES_2, TwoByteString::data_offset())); |
2222 } else { | 2199 } else { |
2223 UNIMPLEMENTED(); | 2200 UNIMPLEMENTED(); |
2224 } | 2201 } |
2225 __ cmpl(ECX, EDX); | 2202 __ cmpl(ECX, EDX); |
2226 __ j(NOT_EQUAL, &is_false, Assembler::kNearJump); | 2203 __ j(NOT_EQUAL, &is_false, Assembler::kNearJump); |
2227 __ jmp(&loop, Assembler::kNearJump); | 2204 __ jmp(&loop, Assembler::kNearJump); |
2228 | 2205 |
2229 __ Bind(&is_true); | 2206 __ Bind(&is_true); |
2230 __ LoadObject(EAX, Bool::True()); | 2207 __ LoadObject(EAX, Bool::True()); |
2231 __ ret(); | 2208 __ ret(); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2279 } | 2256 } |
2280 | 2257 |
2281 | 2258 |
2282 // On stack: user tag (+1), return-address (+0). | 2259 // On stack: user tag (+1), return-address (+0). |
2283 void Intrinsifier::UserTag_makeCurrent(Assembler* assembler) { | 2260 void Intrinsifier::UserTag_makeCurrent(Assembler* assembler) { |
2284 // RDI: Isolate. | 2261 // RDI: Isolate. |
2285 __ LoadIsolate(EDI); | 2262 __ LoadIsolate(EDI); |
2286 // EAX: Current user tag. | 2263 // EAX: Current user tag. |
2287 __ movl(EAX, Address(EDI, Isolate::current_tag_offset())); | 2264 __ movl(EAX, Address(EDI, Isolate::current_tag_offset())); |
2288 // EAX: UserTag. | 2265 // EAX: UserTag. |
2289 __ movl(EBX, Address(ESP, + 1 * kWordSize)); | 2266 __ movl(EBX, Address(ESP, +1 * kWordSize)); |
2290 // Set Isolate::current_tag_. | 2267 // Set Isolate::current_tag_. |
2291 __ movl(Address(EDI, Isolate::current_tag_offset()), EBX); | 2268 __ movl(Address(EDI, Isolate::current_tag_offset()), EBX); |
2292 // EAX: UserTag's tag. | 2269 // EAX: UserTag's tag. |
2293 __ movl(EBX, FieldAddress(EBX, UserTag::tag_offset())); | 2270 __ movl(EBX, FieldAddress(EBX, UserTag::tag_offset())); |
2294 // Set Isolate::user_tag_. | 2271 // Set Isolate::user_tag_. |
2295 __ movl(Address(EDI, Isolate::user_tag_offset()), EBX); | 2272 __ movl(Address(EDI, Isolate::user_tag_offset()), EBX); |
2296 __ ret(); | 2273 __ ret(); |
2297 } | 2274 } |
2298 | 2275 |
2299 | 2276 |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2331 __ Bind(&true_label); | 2308 __ Bind(&true_label); |
2332 __ LoadObject(EAX, Bool::True()); | 2309 __ LoadObject(EAX, Bool::True()); |
2333 __ ret(); | 2310 __ ret(); |
2334 } | 2311 } |
2335 | 2312 |
2336 #undef __ | 2313 #undef __ |
2337 | 2314 |
2338 } // namespace dart | 2315 } // namespace dart |
2339 | 2316 |
2340 #endif // defined TARGET_ARCH_IA32 | 2317 #endif // defined TARGET_ARCH_IA32 |
OLD | NEW |