Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(167)

Side by Side Diff: runtime/vm/intrinsifier_ia32.cc

Issue 2974233002: VM: Re-format to use at most one newline between functions (Closed)
Patch Set: Rebase and merge Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/intrinsifier_dbc.cc ('k') | runtime/vm/intrinsifier_x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 // 4 //
5 // The intrinsic code below is executed before a method has built its frame. 5 // The intrinsic code below is executed before a method has built its frame.
6 // The return address is on the stack and the arguments below it. 6 // The return address is on the stack and the arguments below it.
7 // Registers EDX (arguments descriptor) and ECX (function) must be preserved. 7 // Registers EDX (arguments descriptor) and ECX (function) must be preserved.
8 // Each intrinsification method returns true if the corresponding 8 // Each intrinsification method returns true if the corresponding
9 // Dart method was intrinsified. 9 // Dart method was intrinsified.
10 10
(...skipping 18 matching lines...) Expand all
29 // ECX: IC Data 29 // ECX: IC Data
30 // EDX: Arguments descriptor 30 // EDX: Arguments descriptor
31 // TOS: Return address 31 // TOS: Return address
32 // The ECX, EDX registers can be destroyed only if there is no slow-path, i.e. 32 // The ECX, EDX registers can be destroyed only if there is no slow-path, i.e.
33 // if the intrinsified method always executes a return. 33 // if the intrinsified method always executes a return.
34 // The EBP register should not be modified, because it is used by the profiler. 34 // The EBP register should not be modified, because it is used by the profiler.
35 // The THR register (see constants_ia32.h) must be preserved. 35 // The THR register (see constants_ia32.h) must be preserved.
36 36
37 #define __ assembler-> 37 #define __ assembler->
38 38
39
40 intptr_t Intrinsifier::ParameterSlotFromSp() { 39 intptr_t Intrinsifier::ParameterSlotFromSp() {
41 return 0; 40 return 0;
42 } 41 }
43 42
44
45 void Intrinsifier::IntrinsicCallPrologue(Assembler* assembler) { 43 void Intrinsifier::IntrinsicCallPrologue(Assembler* assembler) {
46 COMPILE_ASSERT(CALLEE_SAVED_TEMP != ARGS_DESC_REG); 44 COMPILE_ASSERT(CALLEE_SAVED_TEMP != ARGS_DESC_REG);
47 45
48 assembler->Comment("IntrinsicCallPrologue"); 46 assembler->Comment("IntrinsicCallPrologue");
49 assembler->movl(CALLEE_SAVED_TEMP, ARGS_DESC_REG); 47 assembler->movl(CALLEE_SAVED_TEMP, ARGS_DESC_REG);
50 } 48 }
51 49
52
53 void Intrinsifier::IntrinsicCallEpilogue(Assembler* assembler) { 50 void Intrinsifier::IntrinsicCallEpilogue(Assembler* assembler) {
54 assembler->Comment("IntrinsicCallEpilogue"); 51 assembler->Comment("IntrinsicCallEpilogue");
55 assembler->movl(ARGS_DESC_REG, CALLEE_SAVED_TEMP); 52 assembler->movl(ARGS_DESC_REG, CALLEE_SAVED_TEMP);
56 } 53 }
57 54
58
59 static intptr_t ComputeObjectArrayTypeArgumentsOffset() { 55 static intptr_t ComputeObjectArrayTypeArgumentsOffset() {
60 const Library& core_lib = Library::Handle(Library::CoreLibrary()); 56 const Library& core_lib = Library::Handle(Library::CoreLibrary());
61 const Class& cls = 57 const Class& cls =
62 Class::Handle(core_lib.LookupClassAllowPrivate(Symbols::_List())); 58 Class::Handle(core_lib.LookupClassAllowPrivate(Symbols::_List()));
63 ASSERT(!cls.IsNull()); 59 ASSERT(!cls.IsNull());
64 ASSERT(cls.NumTypeArguments() == 1); 60 ASSERT(cls.NumTypeArguments() == 1);
65 const intptr_t field_offset = cls.type_arguments_field_offset(); 61 const intptr_t field_offset = cls.type_arguments_field_offset();
66 ASSERT(field_offset != Class::kNoTypeArguments); 62 ASSERT(field_offset != Class::kNoTypeArguments);
67 return field_offset; 63 return field_offset;
68 } 64 }
69 65
70
71 // Intrinsify only for Smi value and index. Non-smi values need a store buffer 66 // Intrinsify only for Smi value and index. Non-smi values need a store buffer
72 // update. Array length is always a Smi. 67 // update. Array length is always a Smi.
73 void Intrinsifier::ObjectArraySetIndexed(Assembler* assembler) { 68 void Intrinsifier::ObjectArraySetIndexed(Assembler* assembler) {
74 Label fall_through; 69 Label fall_through;
75 if (Isolate::Current()->type_checks()) { 70 if (Isolate::Current()->type_checks()) {
76 const intptr_t type_args_field_offset = 71 const intptr_t type_args_field_offset =
77 ComputeObjectArrayTypeArgumentsOffset(); 72 ComputeObjectArrayTypeArgumentsOffset();
78 // Inline simple tests (Smi, null), fallthrough if not positive. 73 // Inline simple tests (Smi, null), fallthrough if not positive.
79 const Immediate& raw_null = 74 const Immediate& raw_null =
80 Immediate(reinterpret_cast<intptr_t>(Object::null())); 75 Immediate(reinterpret_cast<intptr_t>(Object::null()));
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
116 ASSERT(kSmiTagShift == 1); 111 ASSERT(kSmiTagShift == 1);
117 // Destroy ECX (ic data) as we will not continue in the function. 112 // Destroy ECX (ic data) as we will not continue in the function.
118 __ movl(ECX, Address(ESP, +1 * kWordSize)); // Value. 113 __ movl(ECX, Address(ESP, +1 * kWordSize)); // Value.
119 __ StoreIntoObject(EAX, FieldAddress(EAX, EBX, TIMES_2, Array::data_offset()), 114 __ StoreIntoObject(EAX, FieldAddress(EAX, EBX, TIMES_2, Array::data_offset()),
120 ECX); 115 ECX);
121 // Caller is responsible of preserving the value if necessary. 116 // Caller is responsible of preserving the value if necessary.
122 __ ret(); 117 __ ret();
123 __ Bind(&fall_through); 118 __ Bind(&fall_through);
124 } 119 }
125 120
126
127 // Allocate a GrowableObjectArray using the backing array specified. 121 // Allocate a GrowableObjectArray using the backing array specified.
128 // On stack: type argument (+2), data (+1), return-address (+0). 122 // On stack: type argument (+2), data (+1), return-address (+0).
129 void Intrinsifier::GrowableArray_Allocate(Assembler* assembler) { 123 void Intrinsifier::GrowableArray_Allocate(Assembler* assembler) {
130 // This snippet of inlined code uses the following registers: 124 // This snippet of inlined code uses the following registers:
131 // EAX, EBX 125 // EAX, EBX
132 // and the newly allocated object is returned in EAX. 126 // and the newly allocated object is returned in EAX.
133 const intptr_t kTypeArgumentsOffset = 2 * kWordSize; 127 const intptr_t kTypeArgumentsOffset = 2 * kWordSize;
134 const intptr_t kArrayOffset = 1 * kWordSize; 128 const intptr_t kArrayOffset = 1 * kWordSize;
135 Label fall_through; 129 Label fall_through;
136 130
(...skipping 14 matching lines...) Expand all
151 __ StoreIntoObjectNoBarrier( 145 __ StoreIntoObjectNoBarrier(
152 EAX, FieldAddress(EAX, GrowableObjectArray::type_arguments_offset()), 146 EAX, FieldAddress(EAX, GrowableObjectArray::type_arguments_offset()),
153 EBX); 147 EBX);
154 148
155 __ ZeroInitSmiField(FieldAddress(EAX, GrowableObjectArray::length_offset())); 149 __ ZeroInitSmiField(FieldAddress(EAX, GrowableObjectArray::length_offset()));
156 __ ret(); // returns the newly allocated object in EAX. 150 __ ret(); // returns the newly allocated object in EAX.
157 151
158 __ Bind(&fall_through); 152 __ Bind(&fall_through);
159 } 153 }
160 154
161
162 // Add an element to growable array if it doesn't need to grow, otherwise 155 // Add an element to growable array if it doesn't need to grow, otherwise
163 // call into regular code. 156 // call into regular code.
164 // On stack: growable array (+2), value (+1), return-address (+0). 157 // On stack: growable array (+2), value (+1), return-address (+0).
165 void Intrinsifier::GrowableArray_add(Assembler* assembler) { 158 void Intrinsifier::GrowableArray_add(Assembler* assembler) {
166 // In checked mode we need to type-check the incoming argument. 159 // In checked mode we need to type-check the incoming argument.
167 if (Isolate::Current()->type_checks()) return; 160 if (Isolate::Current()->type_checks()) return;
168 161
169 Label fall_through; 162 Label fall_through;
170 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Array. 163 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Array.
171 __ movl(EBX, FieldAddress(EAX, GrowableObjectArray::length_offset())); 164 __ movl(EBX, FieldAddress(EAX, GrowableObjectArray::length_offset()));
172 // EBX: length. 165 // EBX: length.
173 __ movl(EDI, FieldAddress(EAX, GrowableObjectArray::data_offset())); 166 __ movl(EDI, FieldAddress(EAX, GrowableObjectArray::data_offset()));
174 // EDI: data. 167 // EDI: data.
175 // Compare length with capacity. 168 // Compare length with capacity.
176 __ cmpl(EBX, FieldAddress(EDI, Array::length_offset())); 169 __ cmpl(EBX, FieldAddress(EDI, Array::length_offset()));
177 __ j(EQUAL, &fall_through); // Must grow data. 170 __ j(EQUAL, &fall_through); // Must grow data.
178 __ IncrementSmiField(FieldAddress(EAX, GrowableObjectArray::length_offset()), 171 __ IncrementSmiField(FieldAddress(EAX, GrowableObjectArray::length_offset()),
179 1); 172 1);
180 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Value 173 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Value
181 ASSERT(kSmiTagShift == 1); 174 ASSERT(kSmiTagShift == 1);
182 __ StoreIntoObject(EDI, FieldAddress(EDI, EBX, TIMES_2, Array::data_offset()), 175 __ StoreIntoObject(EDI, FieldAddress(EDI, EBX, TIMES_2, Array::data_offset()),
183 EAX); 176 EAX);
184 const Immediate& raw_null = 177 const Immediate& raw_null =
185 Immediate(reinterpret_cast<int32_t>(Object::null())); 178 Immediate(reinterpret_cast<int32_t>(Object::null()));
186 __ movl(EAX, raw_null); 179 __ movl(EAX, raw_null);
187 __ ret(); 180 __ ret();
188 __ Bind(&fall_through); 181 __ Bind(&fall_through);
189 } 182 }
190 183
191
192 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor) \ 184 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor) \
193 Label fall_through; \ 185 Label fall_through; \
194 const intptr_t kArrayLengthStackOffset = 1 * kWordSize; \ 186 const intptr_t kArrayLengthStackOffset = 1 * kWordSize; \
195 NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, EDI, &fall_through, false)); \ 187 NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, EDI, &fall_through, false)); \
196 __ movl(EDI, Address(ESP, kArrayLengthStackOffset)); /* Array length. */ \ 188 __ movl(EDI, Address(ESP, kArrayLengthStackOffset)); /* Array length. */ \
197 /* Check that length is a positive Smi. */ \ 189 /* Check that length is a positive Smi. */ \
198 /* EDI: requested array length argument. */ \ 190 /* EDI: requested array length argument. */ \
199 __ testl(EDI, Immediate(kSmiTagMask)); \ 191 __ testl(EDI, Immediate(kSmiTagMask)); \
200 __ j(NOT_ZERO, &fall_through); \ 192 __ j(NOT_ZERO, &fall_through); \
201 __ cmpl(EDI, Immediate(0)); \ 193 __ cmpl(EDI, Immediate(0)); \
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
277 __ cmpl(EDI, EBX); \ 269 __ cmpl(EDI, EBX); \
278 __ j(ABOVE_EQUAL, &done, Assembler::kNearJump); \ 270 __ j(ABOVE_EQUAL, &done, Assembler::kNearJump); \
279 __ movl(Address(EDI, 0), ECX); \ 271 __ movl(Address(EDI, 0), ECX); \
280 __ addl(EDI, Immediate(kWordSize)); \ 272 __ addl(EDI, Immediate(kWordSize)); \
281 __ jmp(&init_loop, Assembler::kNearJump); \ 273 __ jmp(&init_loop, Assembler::kNearJump); \
282 __ Bind(&done); \ 274 __ Bind(&done); \
283 \ 275 \
284 __ ret(); \ 276 __ ret(); \
285 __ Bind(&fall_through); 277 __ Bind(&fall_through);
286 278
287
288 static ScaleFactor GetScaleFactor(intptr_t size) { 279 static ScaleFactor GetScaleFactor(intptr_t size) {
289 switch (size) { 280 switch (size) {
290 case 1: 281 case 1:
291 return TIMES_1; 282 return TIMES_1;
292 case 2: 283 case 2:
293 return TIMES_2; 284 return TIMES_2;
294 case 4: 285 case 4:
295 return TIMES_4; 286 return TIMES_4;
296 case 8: 287 case 8:
297 return TIMES_8; 288 return TIMES_8;
298 case 16: 289 case 16:
299 return TIMES_16; 290 return TIMES_16;
300 } 291 }
301 UNREACHABLE(); 292 UNREACHABLE();
302 return static_cast<ScaleFactor>(0); 293 return static_cast<ScaleFactor>(0);
303 } 294 }
304 295
305
306 #define TYPED_DATA_ALLOCATOR(clazz) \ 296 #define TYPED_DATA_ALLOCATOR(clazz) \
307 void Intrinsifier::TypedData_##clazz##_factory(Assembler* assembler) { \ 297 void Intrinsifier::TypedData_##clazz##_factory(Assembler* assembler) { \
308 intptr_t size = TypedData::ElementSizeInBytes(kTypedData##clazz##Cid); \ 298 intptr_t size = TypedData::ElementSizeInBytes(kTypedData##clazz##Cid); \
309 intptr_t max_len = TypedData::MaxElements(kTypedData##clazz##Cid); \ 299 intptr_t max_len = TypedData::MaxElements(kTypedData##clazz##Cid); \
310 ScaleFactor scale = GetScaleFactor(size); \ 300 ScaleFactor scale = GetScaleFactor(size); \
311 TYPED_ARRAY_ALLOCATION(TypedData, kTypedData##clazz##Cid, max_len, scale); \ 301 TYPED_ARRAY_ALLOCATION(TypedData, kTypedData##clazz##Cid, max_len, scale); \
312 } 302 }
313 CLASS_LIST_TYPED_DATA(TYPED_DATA_ALLOCATOR) 303 CLASS_LIST_TYPED_DATA(TYPED_DATA_ALLOCATOR)
314 #undef TYPED_DATA_ALLOCATOR 304 #undef TYPED_DATA_ALLOCATOR
315 305
316
317 // Tests if two top most arguments are smis, jumps to label not_smi if not. 306 // Tests if two top most arguments are smis, jumps to label not_smi if not.
318 // Topmost argument is in EAX. 307 // Topmost argument is in EAX.
319 static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) { 308 static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) {
320 __ movl(EAX, Address(ESP, +1 * kWordSize)); 309 __ movl(EAX, Address(ESP, +1 * kWordSize));
321 __ movl(EBX, Address(ESP, +2 * kWordSize)); 310 __ movl(EBX, Address(ESP, +2 * kWordSize));
322 __ orl(EBX, EAX); 311 __ orl(EBX, EAX);
323 __ testl(EBX, Immediate(kSmiTagMask)); 312 __ testl(EBX, Immediate(kSmiTagMask));
324 __ j(NOT_ZERO, not_smi, Assembler::kNearJump); 313 __ j(NOT_ZERO, not_smi, Assembler::kNearJump);
325 } 314 }
326 315
327
328 void Intrinsifier::Integer_addFromInteger(Assembler* assembler) { 316 void Intrinsifier::Integer_addFromInteger(Assembler* assembler) {
329 Label fall_through; 317 Label fall_through;
330 TestBothArgumentsSmis(assembler, &fall_through); 318 TestBothArgumentsSmis(assembler, &fall_through);
331 __ addl(EAX, Address(ESP, +2 * kWordSize)); 319 __ addl(EAX, Address(ESP, +2 * kWordSize));
332 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 320 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
333 // Result is in EAX. 321 // Result is in EAX.
334 __ ret(); 322 __ ret();
335 __ Bind(&fall_through); 323 __ Bind(&fall_through);
336 } 324 }
337 325
338
339 void Intrinsifier::Integer_add(Assembler* assembler) { 326 void Intrinsifier::Integer_add(Assembler* assembler) {
340 Integer_addFromInteger(assembler); 327 Integer_addFromInteger(assembler);
341 } 328 }
342 329
343
344 void Intrinsifier::Integer_subFromInteger(Assembler* assembler) { 330 void Intrinsifier::Integer_subFromInteger(Assembler* assembler) {
345 Label fall_through; 331 Label fall_through;
346 TestBothArgumentsSmis(assembler, &fall_through); 332 TestBothArgumentsSmis(assembler, &fall_through);
347 __ subl(EAX, Address(ESP, +2 * kWordSize)); 333 __ subl(EAX, Address(ESP, +2 * kWordSize));
348 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 334 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
349 // Result is in EAX. 335 // Result is in EAX.
350 __ ret(); 336 __ ret();
351 __ Bind(&fall_through); 337 __ Bind(&fall_through);
352 } 338 }
353 339
354
355 void Intrinsifier::Integer_sub(Assembler* assembler) { 340 void Intrinsifier::Integer_sub(Assembler* assembler) {
356 Label fall_through; 341 Label fall_through;
357 TestBothArgumentsSmis(assembler, &fall_through); 342 TestBothArgumentsSmis(assembler, &fall_through);
358 __ movl(EBX, EAX); 343 __ movl(EBX, EAX);
359 __ movl(EAX, Address(ESP, +2 * kWordSize)); 344 __ movl(EAX, Address(ESP, +2 * kWordSize));
360 __ subl(EAX, EBX); 345 __ subl(EAX, EBX);
361 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 346 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
362 // Result is in EAX. 347 // Result is in EAX.
363 __ ret(); 348 __ ret();
364 __ Bind(&fall_through); 349 __ Bind(&fall_through);
365 } 350 }
366 351
367
368 void Intrinsifier::Integer_mulFromInteger(Assembler* assembler) { 352 void Intrinsifier::Integer_mulFromInteger(Assembler* assembler) {
369 Label fall_through; 353 Label fall_through;
370 TestBothArgumentsSmis(assembler, &fall_through); 354 TestBothArgumentsSmis(assembler, &fall_through);
371 ASSERT(kSmiTag == 0); // Adjust code below if not the case. 355 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
372 __ SmiUntag(EAX); 356 __ SmiUntag(EAX);
373 __ imull(EAX, Address(ESP, +2 * kWordSize)); 357 __ imull(EAX, Address(ESP, +2 * kWordSize));
374 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 358 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
375 // Result is in EAX. 359 // Result is in EAX.
376 __ ret(); 360 __ ret();
377 __ Bind(&fall_through); 361 __ Bind(&fall_through);
378 } 362 }
379 363
380
381 void Intrinsifier::Integer_mul(Assembler* assembler) { 364 void Intrinsifier::Integer_mul(Assembler* assembler) {
382 Integer_mulFromInteger(assembler); 365 Integer_mulFromInteger(assembler);
383 } 366 }
384 367
385
386 // Optimizations: 368 // Optimizations:
387 // - result is 0 if: 369 // - result is 0 if:
388 // - left is 0 370 // - left is 0
389 // - left equals right 371 // - left equals right
390 // - result is left if 372 // - result is left if
391 // - left > 0 && left < right 373 // - left > 0 && left < right
392 // EAX: Tagged left (dividend). 374 // EAX: Tagged left (dividend).
393 // EBX: Tagged right (divisor). 375 // EBX: Tagged right (divisor).
394 // Returns: 376 // Returns:
395 // EDX: Untagged fallthrough result (remainder to be adjusted), or 377 // EDX: Untagged fallthrough result (remainder to be adjusted), or
(...skipping 19 matching lines...) Expand all
415 __ xorl(EAX, EAX); 397 __ xorl(EAX, EAX);
416 __ ret(); 398 __ ret();
417 399
418 __ Bind(&modulo); 400 __ Bind(&modulo);
419 __ SmiUntag(EBX); 401 __ SmiUntag(EBX);
420 __ SmiUntag(EAX); 402 __ SmiUntag(EAX);
421 __ cdq(); 403 __ cdq();
422 __ idivl(EBX); 404 __ idivl(EBX);
423 } 405 }
424 406
425
426 // Implementation: 407 // Implementation:
427 // res = left % right; 408 // res = left % right;
428 // if (res < 0) { 409 // if (res < 0) {
429 // if (right < 0) { 410 // if (right < 0) {
430 // res = res - right; 411 // res = res - right;
431 // } else { 412 // } else {
432 // res = res + right; 413 // res = res + right;
433 // } 414 // }
434 // } 415 // }
435 void Intrinsifier::Integer_moduloFromInteger(Assembler* assembler) { 416 void Intrinsifier::Integer_moduloFromInteger(Assembler* assembler) {
(...skipping 22 matching lines...) Expand all
458 __ subl(EAX, EBX); 439 __ subl(EAX, EBX);
459 440
460 __ Bind(&done); 441 __ Bind(&done);
461 // The remainder of two smis is always a smi, no overflow check needed. 442 // The remainder of two smis is always a smi, no overflow check needed.
462 __ SmiTag(EAX); 443 __ SmiTag(EAX);
463 __ ret(); 444 __ ret();
464 445
465 __ Bind(&fall_through); 446 __ Bind(&fall_through);
466 } 447 }
467 448
468
469 void Intrinsifier::Integer_truncDivide(Assembler* assembler) { 449 void Intrinsifier::Integer_truncDivide(Assembler* assembler) {
470 Label fall_through; 450 Label fall_through;
471 TestBothArgumentsSmis(assembler, &fall_through); 451 TestBothArgumentsSmis(assembler, &fall_through);
472 // EAX: right argument (divisor) 452 // EAX: right argument (divisor)
473 __ cmpl(EAX, Immediate(0)); 453 __ cmpl(EAX, Immediate(0));
474 __ j(EQUAL, &fall_through, Assembler::kNearJump); 454 __ j(EQUAL, &fall_through, Assembler::kNearJump);
475 __ movl(EBX, EAX); 455 __ movl(EBX, EAX);
476 __ SmiUntag(EBX); 456 __ SmiUntag(EBX);
477 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument (dividend). 457 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument (dividend).
478 __ SmiUntag(EAX); 458 __ SmiUntag(EAX);
479 __ pushl(EDX); // Preserve EDX in case of 'fall_through'. 459 __ pushl(EDX); // Preserve EDX in case of 'fall_through'.
480 __ cdq(); 460 __ cdq();
481 __ idivl(EBX); 461 __ idivl(EBX);
482 __ popl(EDX); 462 __ popl(EDX);
483 // Check the corner case of dividing the 'MIN_SMI' with -1, in which case we 463 // Check the corner case of dividing the 'MIN_SMI' with -1, in which case we
484 // cannot tag the result. 464 // cannot tag the result.
485 __ cmpl(EAX, Immediate(0x40000000)); 465 __ cmpl(EAX, Immediate(0x40000000));
486 __ j(EQUAL, &fall_through); 466 __ j(EQUAL, &fall_through);
487 __ SmiTag(EAX); 467 __ SmiTag(EAX);
488 __ ret(); 468 __ ret();
489 __ Bind(&fall_through); 469 __ Bind(&fall_through);
490 } 470 }
491 471
492
493 void Intrinsifier::Integer_negate(Assembler* assembler) { 472 void Intrinsifier::Integer_negate(Assembler* assembler) {
494 Label fall_through; 473 Label fall_through;
495 __ movl(EAX, Address(ESP, +1 * kWordSize)); 474 __ movl(EAX, Address(ESP, +1 * kWordSize));
496 __ testl(EAX, Immediate(kSmiTagMask)); 475 __ testl(EAX, Immediate(kSmiTagMask));
497 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi value. 476 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi value.
498 __ negl(EAX); 477 __ negl(EAX);
499 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 478 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
500 // Result is in EAX. 479 // Result is in EAX.
501 __ ret(); 480 __ ret();
502 __ Bind(&fall_through); 481 __ Bind(&fall_through);
503 } 482 }
504 483
505
506 void Intrinsifier::Integer_bitAndFromInteger(Assembler* assembler) { 484 void Intrinsifier::Integer_bitAndFromInteger(Assembler* assembler) {
507 Label fall_through; 485 Label fall_through;
508 TestBothArgumentsSmis(assembler, &fall_through); 486 TestBothArgumentsSmis(assembler, &fall_through);
509 __ movl(EBX, Address(ESP, +2 * kWordSize)); 487 __ movl(EBX, Address(ESP, +2 * kWordSize));
510 __ andl(EAX, EBX); 488 __ andl(EAX, EBX);
511 // Result is in EAX. 489 // Result is in EAX.
512 __ ret(); 490 __ ret();
513 __ Bind(&fall_through); 491 __ Bind(&fall_through);
514 } 492 }
515 493
516
517 void Intrinsifier::Integer_bitAnd(Assembler* assembler) { 494 void Intrinsifier::Integer_bitAnd(Assembler* assembler) {
518 Integer_bitAndFromInteger(assembler); 495 Integer_bitAndFromInteger(assembler);
519 } 496 }
520 497
521
522 void Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) { 498 void Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) {
523 Label fall_through; 499 Label fall_through;
524 TestBothArgumentsSmis(assembler, &fall_through); 500 TestBothArgumentsSmis(assembler, &fall_through);
525 __ movl(EBX, Address(ESP, +2 * kWordSize)); 501 __ movl(EBX, Address(ESP, +2 * kWordSize));
526 __ orl(EAX, EBX); 502 __ orl(EAX, EBX);
527 // Result is in EAX. 503 // Result is in EAX.
528 __ ret(); 504 __ ret();
529 __ Bind(&fall_through); 505 __ Bind(&fall_through);
530 } 506 }
531 507
532
533 void Intrinsifier::Integer_bitOr(Assembler* assembler) { 508 void Intrinsifier::Integer_bitOr(Assembler* assembler) {
534 Integer_bitOrFromInteger(assembler); 509 Integer_bitOrFromInteger(assembler);
535 } 510 }
536 511
537
538 void Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) { 512 void Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) {
539 Label fall_through; 513 Label fall_through;
540 TestBothArgumentsSmis(assembler, &fall_through); 514 TestBothArgumentsSmis(assembler, &fall_through);
541 __ movl(EBX, Address(ESP, +2 * kWordSize)); 515 __ movl(EBX, Address(ESP, +2 * kWordSize));
542 __ xorl(EAX, EBX); 516 __ xorl(EAX, EBX);
543 // Result is in EAX. 517 // Result is in EAX.
544 __ ret(); 518 __ ret();
545 __ Bind(&fall_through); 519 __ Bind(&fall_through);
546 } 520 }
547 521
548
549 void Intrinsifier::Integer_bitXor(Assembler* assembler) { 522 void Intrinsifier::Integer_bitXor(Assembler* assembler) {
550 Integer_bitXorFromInteger(assembler); 523 Integer_bitXorFromInteger(assembler);
551 } 524 }
552 525
553
554 void Intrinsifier::Integer_shl(Assembler* assembler) { 526 void Intrinsifier::Integer_shl(Assembler* assembler) {
555 ASSERT(kSmiTagShift == 1); 527 ASSERT(kSmiTagShift == 1);
556 ASSERT(kSmiTag == 0); 528 ASSERT(kSmiTag == 0);
557 Label fall_through, overflow; 529 Label fall_through, overflow;
558 TestBothArgumentsSmis(assembler, &fall_through); 530 TestBothArgumentsSmis(assembler, &fall_through);
559 // Shift value is in EAX. Compare with tagged Smi. 531 // Shift value is in EAX. Compare with tagged Smi.
560 __ cmpl(EAX, Immediate(Smi::RawValue(Smi::kBits))); 532 __ cmpl(EAX, Immediate(Smi::RawValue(Smi::kBits)));
561 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); 533 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump);
562 534
563 __ SmiUntag(EAX); 535 __ SmiUntag(EAX);
(...skipping 28 matching lines...) Expand all
592 __ TryAllocate(mint_class, &fall_through, Assembler::kNearJump, 564 __ TryAllocate(mint_class, &fall_through, Assembler::kNearJump,
593 EAX, // Result register. 565 EAX, // Result register.
594 ECX); // temp 566 ECX); // temp
595 // EBX and EDI are not objects but integer values. 567 // EBX and EDI are not objects but integer values.
596 __ movl(FieldAddress(EAX, Mint::value_offset()), EBX); 568 __ movl(FieldAddress(EAX, Mint::value_offset()), EBX);
597 __ movl(FieldAddress(EAX, Mint::value_offset() + kWordSize), EDI); 569 __ movl(FieldAddress(EAX, Mint::value_offset() + kWordSize), EDI);
598 __ ret(); 570 __ ret();
599 __ Bind(&fall_through); 571 __ Bind(&fall_through);
600 } 572 }
601 573
602
603 static void Push64SmiOrMint(Assembler* assembler, 574 static void Push64SmiOrMint(Assembler* assembler,
604 Register reg, 575 Register reg,
605 Register tmp, 576 Register tmp,
606 Label* not_smi_or_mint) { 577 Label* not_smi_or_mint) {
607 Label not_smi, done; 578 Label not_smi, done;
608 __ testl(reg, Immediate(kSmiTagMask)); 579 __ testl(reg, Immediate(kSmiTagMask));
609 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump); 580 __ j(NOT_ZERO, &not_smi, Assembler::kNearJump);
610 __ SmiUntag(reg); 581 __ SmiUntag(reg);
611 // Sign extend to 64 bit 582 // Sign extend to 64 bit
612 __ movl(tmp, reg); 583 __ movl(tmp, reg);
613 __ sarl(tmp, Immediate(31)); 584 __ sarl(tmp, Immediate(31));
614 __ pushl(tmp); 585 __ pushl(tmp);
615 __ pushl(reg); 586 __ pushl(reg);
616 __ jmp(&done); 587 __ jmp(&done);
617 __ Bind(&not_smi); 588 __ Bind(&not_smi);
618 __ CompareClassId(reg, kMintCid, tmp); 589 __ CompareClassId(reg, kMintCid, tmp);
619 __ j(NOT_EQUAL, not_smi_or_mint); 590 __ j(NOT_EQUAL, not_smi_or_mint);
620 // Mint. 591 // Mint.
621 __ pushl(FieldAddress(reg, Mint::value_offset() + kWordSize)); 592 __ pushl(FieldAddress(reg, Mint::value_offset() + kWordSize));
622 __ pushl(FieldAddress(reg, Mint::value_offset())); 593 __ pushl(FieldAddress(reg, Mint::value_offset()));
623 __ Bind(&done); 594 __ Bind(&done);
624 } 595 }
625 596
626
627 static void CompareIntegers(Assembler* assembler, Condition true_condition) { 597 static void CompareIntegers(Assembler* assembler, Condition true_condition) {
628 Label try_mint_smi, is_true, is_false, drop_two_fall_through, fall_through; 598 Label try_mint_smi, is_true, is_false, drop_two_fall_through, fall_through;
629 TestBothArgumentsSmis(assembler, &try_mint_smi); 599 TestBothArgumentsSmis(assembler, &try_mint_smi);
630 // EAX contains the right argument. 600 // EAX contains the right argument.
631 __ cmpl(Address(ESP, +2 * kWordSize), EAX); 601 __ cmpl(Address(ESP, +2 * kWordSize), EAX);
632 __ j(true_condition, &is_true, Assembler::kNearJump); 602 __ j(true_condition, &is_true, Assembler::kNearJump);
633 __ Bind(&is_false); 603 __ Bind(&is_false);
634 __ LoadObject(EAX, Bool::False()); 604 __ LoadObject(EAX, Bool::False());
635 __ ret(); 605 __ ret();
636 __ Bind(&is_true); 606 __ Bind(&is_true);
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
675 __ cmpl(EAX, EBX); // cmpl left.LO, right.LO. 645 __ cmpl(EAX, EBX); // cmpl left.LO, right.LO.
676 __ j(lo_false_cond, &is_false, Assembler::kNearJump); 646 __ j(lo_false_cond, &is_false, Assembler::kNearJump);
677 // Else is true. 647 // Else is true.
678 __ jmp(&is_true); 648 __ jmp(&is_true);
679 649
680 __ Bind(&drop_two_fall_through); 650 __ Bind(&drop_two_fall_through);
681 __ Drop(2); 651 __ Drop(2);
682 __ Bind(&fall_through); 652 __ Bind(&fall_through);
683 } 653 }
684 654
685
686 void Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) { 655 void Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) {
687 CompareIntegers(assembler, LESS); 656 CompareIntegers(assembler, LESS);
688 } 657 }
689 658
690
691 void Intrinsifier::Integer_lessThan(Assembler* assembler) { 659 void Intrinsifier::Integer_lessThan(Assembler* assembler) {
692 Integer_greaterThanFromInt(assembler); 660 Integer_greaterThanFromInt(assembler);
693 } 661 }
694 662
695
696 void Intrinsifier::Integer_greaterThan(Assembler* assembler) { 663 void Intrinsifier::Integer_greaterThan(Assembler* assembler) {
697 CompareIntegers(assembler, GREATER); 664 CompareIntegers(assembler, GREATER);
698 } 665 }
699 666
700
701 void Intrinsifier::Integer_lessEqualThan(Assembler* assembler) { 667 void Intrinsifier::Integer_lessEqualThan(Assembler* assembler) {
702 CompareIntegers(assembler, LESS_EQUAL); 668 CompareIntegers(assembler, LESS_EQUAL);
703 } 669 }
704 670
705
706 void Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) { 671 void Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) {
707 CompareIntegers(assembler, GREATER_EQUAL); 672 CompareIntegers(assembler, GREATER_EQUAL);
708 } 673 }
709 674
710
711 // This is called for Smi, Mint and Bigint receivers. The right argument 675 // This is called for Smi, Mint and Bigint receivers. The right argument
712 // can be Smi, Mint, Bigint or double. 676 // can be Smi, Mint, Bigint or double.
713 void Intrinsifier::Integer_equalToInteger(Assembler* assembler) { 677 void Intrinsifier::Integer_equalToInteger(Assembler* assembler) {
714 Label fall_through, true_label, check_for_mint; 678 Label fall_through, true_label, check_for_mint;
715 // For integer receiver '===' check first. 679 // For integer receiver '===' check first.
716 __ movl(EAX, Address(ESP, +1 * kWordSize)); 680 __ movl(EAX, Address(ESP, +1 * kWordSize));
717 __ cmpl(EAX, Address(ESP, +2 * kWordSize)); 681 __ cmpl(EAX, Address(ESP, +2 * kWordSize));
718 __ j(EQUAL, &true_label, Assembler::kNearJump); 682 __ j(EQUAL, &true_label, Assembler::kNearJump);
719 __ movl(EBX, Address(ESP, +2 * kWordSize)); 683 __ movl(EBX, Address(ESP, +2 * kWordSize));
720 __ orl(EAX, EBX); 684 __ orl(EAX, EBX);
(...skipping 30 matching lines...) Expand all
751 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Right argument. 715 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Right argument.
752 __ testl(EAX, Immediate(kSmiTagMask)); 716 __ testl(EAX, Immediate(kSmiTagMask));
753 __ j(NOT_ZERO, &fall_through); 717 __ j(NOT_ZERO, &fall_through);
754 __ LoadObject(EAX, Bool::False()); 718 __ LoadObject(EAX, Bool::False());
755 __ ret(); 719 __ ret();
756 // TODO(srdjan): Implement Mint == Mint comparison. 720 // TODO(srdjan): Implement Mint == Mint comparison.
757 721
758 __ Bind(&fall_through); 722 __ Bind(&fall_through);
759 } 723 }
760 724
761
762 void Intrinsifier::Integer_equal(Assembler* assembler) { 725 void Intrinsifier::Integer_equal(Assembler* assembler) {
763 Integer_equalToInteger(assembler); 726 Integer_equalToInteger(assembler);
764 } 727 }
765 728
766
767 void Intrinsifier::Integer_sar(Assembler* assembler) { 729 void Intrinsifier::Integer_sar(Assembler* assembler) {
768 Label fall_through, shift_count_ok; 730 Label fall_through, shift_count_ok;
769 TestBothArgumentsSmis(assembler, &fall_through); 731 TestBothArgumentsSmis(assembler, &fall_through);
770 // Can destroy ECX since we are not falling through. 732 // Can destroy ECX since we are not falling through.
771 const Immediate& count_limit = Immediate(0x1F); 733 const Immediate& count_limit = Immediate(0x1F);
772 // Check that the count is not larger than what the hardware can handle. 734 // Check that the count is not larger than what the hardware can handle.
773 // For shifting right a Smi the result is the same for all numbers 735 // For shifting right a Smi the result is the same for all numbers
774 // >= count_limit. 736 // >= count_limit.
775 __ SmiUntag(EAX); 737 __ SmiUntag(EAX);
776 // Negative counts throw exception. 738 // Negative counts throw exception.
777 __ cmpl(EAX, Immediate(0)); 739 __ cmpl(EAX, Immediate(0));
778 __ j(LESS, &fall_through, Assembler::kNearJump); 740 __ j(LESS, &fall_through, Assembler::kNearJump);
779 __ cmpl(EAX, count_limit); 741 __ cmpl(EAX, count_limit);
780 __ j(LESS_EQUAL, &shift_count_ok, Assembler::kNearJump); 742 __ j(LESS_EQUAL, &shift_count_ok, Assembler::kNearJump);
781 __ movl(EAX, count_limit); 743 __ movl(EAX, count_limit);
782 __ Bind(&shift_count_ok); 744 __ Bind(&shift_count_ok);
783 __ movl(ECX, EAX); // Shift amount must be in ECX. 745 __ movl(ECX, EAX); // Shift amount must be in ECX.
784 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Value. 746 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Value.
785 __ SmiUntag(EAX); // Value. 747 __ SmiUntag(EAX); // Value.
786 __ sarl(EAX, ECX); 748 __ sarl(EAX, ECX);
787 __ SmiTag(EAX); 749 __ SmiTag(EAX);
788 __ ret(); 750 __ ret();
789 __ Bind(&fall_through); 751 __ Bind(&fall_through);
790 } 752 }
791 753
792
793 // Argument is Smi (receiver). 754 // Argument is Smi (receiver).
794 void Intrinsifier::Smi_bitNegate(Assembler* assembler) { 755 void Intrinsifier::Smi_bitNegate(Assembler* assembler) {
795 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Receiver. 756 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Receiver.
796 __ notl(EAX); 757 __ notl(EAX);
797 __ andl(EAX, Immediate(~kSmiTagMask)); // Remove inverted smi-tag. 758 __ andl(EAX, Immediate(~kSmiTagMask)); // Remove inverted smi-tag.
798 __ ret(); 759 __ ret();
799 } 760 }
800 761
801
802 void Intrinsifier::Smi_bitLength(Assembler* assembler) { 762 void Intrinsifier::Smi_bitLength(Assembler* assembler) {
803 ASSERT(kSmiTagShift == 1); 763 ASSERT(kSmiTagShift == 1);
804 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Receiver. 764 __ movl(EAX, Address(ESP, +1 * kWordSize)); // Receiver.
805 // XOR with sign bit to complement bits if value is negative. 765 // XOR with sign bit to complement bits if value is negative.
806 __ movl(ECX, EAX); 766 __ movl(ECX, EAX);
807 __ sarl(ECX, Immediate(31)); // All 0 or all 1. 767 __ sarl(ECX, Immediate(31)); // All 0 or all 1.
808 __ xorl(EAX, ECX); 768 __ xorl(EAX, ECX);
809 // BSR does not write the destination register if source is zero. Put a 1 in 769 // BSR does not write the destination register if source is zero. Put a 1 in
810 // the Smi tag bit to ensure BSR writes to destination register. 770 // the Smi tag bit to ensure BSR writes to destination register.
811 __ orl(EAX, Immediate(kSmiTagMask)); 771 __ orl(EAX, Immediate(kSmiTagMask));
812 __ bsrl(EAX, EAX); 772 __ bsrl(EAX, EAX);
813 __ SmiTag(EAX); 773 __ SmiTag(EAX);
814 __ ret(); 774 __ ret();
815 } 775 }
816 776
817
818 void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) { 777 void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
819 Integer_bitAndFromInteger(assembler); 778 Integer_bitAndFromInteger(assembler);
820 } 779 }
821 780
822
823 void Intrinsifier::Bigint_lsh(Assembler* assembler) { 781 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
824 // static void _lsh(Uint32List x_digits, int x_used, int n, 782 // static void _lsh(Uint32List x_digits, int x_used, int n,
825 // Uint32List r_digits) 783 // Uint32List r_digits)
826 784
827 // Preserve THR to free ESI. 785 // Preserve THR to free ESI.
828 __ pushl(THR); 786 __ pushl(THR);
829 ASSERT(THR == ESI); 787 ASSERT(THR == ESI);
830 788
831 __ movl(EDI, Address(ESP, 5 * kWordSize)); // x_digits 789 __ movl(EDI, Address(ESP, 5 * kWordSize)); // x_digits
832 __ movl(ECX, Address(ESP, 3 * kWordSize)); // n is Smi 790 __ movl(ECX, Address(ESP, 3 * kWordSize)); // n is Smi
(...skipping 24 matching lines...) Expand all
857 __ Bind(&last); 815 __ Bind(&last);
858 __ shldl(EDX, ESI, ECX); // ESI == 0. 816 __ shldl(EDX, ESI, ECX); // ESI == 0.
859 __ movl(Address(EBX, 0), EDX); 817 __ movl(Address(EBX, 0), EDX);
860 818
861 // Restore THR and return. 819 // Restore THR and return.
862 __ popl(THR); 820 __ popl(THR);
863 // Returning Object::null() is not required, since this method is private. 821 // Returning Object::null() is not required, since this method is private.
864 __ ret(); 822 __ ret();
865 } 823 }
866 824
867
868 void Intrinsifier::Bigint_rsh(Assembler* assembler) { 825 void Intrinsifier::Bigint_rsh(Assembler* assembler) {
869 // static void _rsh(Uint32List x_digits, int x_used, int n, 826 // static void _rsh(Uint32List x_digits, int x_used, int n,
870 // Uint32List r_digits) 827 // Uint32List r_digits)
871 828
872 // Preserve THR to free ESI. 829 // Preserve THR to free ESI.
873 __ pushl(THR); 830 __ pushl(THR);
874 ASSERT(THR == ESI); 831 ASSERT(THR == ESI);
875 832
876 __ movl(EDI, Address(ESP, 5 * kWordSize)); // x_digits 833 __ movl(EDI, Address(ESP, 5 * kWordSize)); // x_digits
877 __ movl(ECX, Address(ESP, 3 * kWordSize)); // n is Smi 834 __ movl(ECX, Address(ESP, 3 * kWordSize)); // n is Smi
(...skipping 25 matching lines...) Expand all
903 __ Bind(&last); 860 __ Bind(&last);
904 __ shrdl(EDX, ESI, ECX); // ESI == 0. 861 __ shrdl(EDX, ESI, ECX); // ESI == 0.
905 __ movl(Address(EBX, 0), EDX); 862 __ movl(Address(EBX, 0), EDX);
906 863
907 // Restore THR and return. 864 // Restore THR and return.
908 __ popl(THR); 865 __ popl(THR);
909 // Returning Object::null() is not required, since this method is private. 866 // Returning Object::null() is not required, since this method is private.
910 __ ret(); 867 __ ret();
911 } 868 }
912 869
913
914 void Intrinsifier::Bigint_absAdd(Assembler* assembler) { 870 void Intrinsifier::Bigint_absAdd(Assembler* assembler) {
915 // static void _absAdd(Uint32List digits, int used, 871 // static void _absAdd(Uint32List digits, int used,
916 // Uint32List a_digits, int a_used, 872 // Uint32List a_digits, int a_used,
917 // Uint32List r_digits) 873 // Uint32List r_digits)
918 874
919 // Preserve THR to free ESI. 875 // Preserve THR to free ESI.
920 __ pushl(THR); 876 __ pushl(THR);
921 ASSERT(THR == ESI); 877 ASSERT(THR == ESI);
922 878
923 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits 879 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
963 __ movl(EAX, Immediate(0)); 919 __ movl(EAX, Immediate(0));
964 __ adcl(EAX, Immediate(0)); 920 __ adcl(EAX, Immediate(0));
965 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX); 921 __ movl(FieldAddress(EBX, EDX, TIMES_4, TypedData::data_offset()), EAX);
966 922
967 // Restore THR and return. 923 // Restore THR and return.
968 __ popl(THR); 924 __ popl(THR);
969 // Returning Object::null() is not required, since this method is private. 925 // Returning Object::null() is not required, since this method is private.
970 __ ret(); 926 __ ret();
971 } 927 }
972 928
973
974 void Intrinsifier::Bigint_absSub(Assembler* assembler) { 929 void Intrinsifier::Bigint_absSub(Assembler* assembler) {
975 // static void _absSub(Uint32List digits, int used, 930 // static void _absSub(Uint32List digits, int used,
976 // Uint32List a_digits, int a_used, 931 // Uint32List a_digits, int a_used,
977 // Uint32List r_digits) 932 // Uint32List r_digits)
978 933
979 // Preserve THR to free ESI. 934 // Preserve THR to free ESI.
980 __ pushl(THR); 935 __ pushl(THR);
981 ASSERT(THR == ESI); 936 ASSERT(THR == ESI);
982 937
983 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits 938 __ movl(EDI, Address(ESP, 6 * kWordSize)); // digits
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1019 __ decl(ECX); // Does not affect carry flag. 974 __ decl(ECX); // Does not affect carry flag.
1020 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump); 975 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump);
1021 976
1022 __ Bind(&done); 977 __ Bind(&done);
1023 // Restore THR and return. 978 // Restore THR and return.
1024 __ popl(THR); 979 __ popl(THR);
1025 // Returning Object::null() is not required, since this method is private. 980 // Returning Object::null() is not required, since this method is private.
1026 __ ret(); 981 __ ret();
1027 } 982 }
1028 983
1029
1030 void Intrinsifier::Bigint_mulAdd(Assembler* assembler) { 984 void Intrinsifier::Bigint_mulAdd(Assembler* assembler) {
1031 // Pseudo code: 985 // Pseudo code:
1032 // static int _mulAdd(Uint32List x_digits, int xi, 986 // static int _mulAdd(Uint32List x_digits, int xi,
1033 // Uint32List m_digits, int i, 987 // Uint32List m_digits, int i,
1034 // Uint32List a_digits, int j, int n) { 988 // Uint32List a_digits, int j, int n) {
1035 // uint32_t x = x_digits[xi >> 1]; // xi is Smi. 989 // uint32_t x = x_digits[xi >> 1]; // xi is Smi.
1036 // if (x == 0 || n == 0) { 990 // if (x == 0 || n == 0) {
1037 // return 1; 991 // return 1;
1038 // } 992 // }
1039 // uint32_t* mip = &m_digits[i >> 1]; // i is Smi. 993 // uint32_t* mip = &m_digits[i >> 1]; // i is Smi.
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1139 __ Bind(&done); 1093 __ Bind(&done);
1140 __ Drop(1); // n 1094 __ Drop(1); // n
1141 // Restore THR and return. 1095 // Restore THR and return.
1142 __ popl(THR); 1096 __ popl(THR);
1143 1097
1144 __ Bind(&no_op); 1098 __ Bind(&no_op);
1145 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. 1099 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed.
1146 __ ret(); 1100 __ ret();
1147 } 1101 }
1148 1102
1149
1150 void Intrinsifier::Bigint_sqrAdd(Assembler* assembler) { 1103 void Intrinsifier::Bigint_sqrAdd(Assembler* assembler) {
1151 // Pseudo code: 1104 // Pseudo code:
1152 // static int _sqrAdd(Uint32List x_digits, int i, 1105 // static int _sqrAdd(Uint32List x_digits, int i,
1153 // Uint32List a_digits, int used) { 1106 // Uint32List a_digits, int used) {
1154 // uint32_t* xip = &x_digits[i >> 1]; // i is Smi. 1107 // uint32_t* xip = &x_digits[i >> 1]; // i is Smi.
1155 // uint32_t x = *xip++; 1108 // uint32_t x = *xip++;
1156 // if (x == 0) return 1; 1109 // if (x == 0) return 1;
1157 // uint32_t* ajp = &a_digits[i]; // j == 2*i, i is Smi. 1110 // uint32_t* ajp = &a_digits[i]; // j == 2*i, i is Smi.
1158 // uint32_t aj = *ajp; 1111 // uint32_t aj = *ajp;
1159 // uint64_t t = x*x + aj; 1112 // uint64_t t = x*x + aj;
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
1272 __ movl(Address(ESI, Bigint::kBytesPerDigit), EDX); 1225 __ movl(Address(ESI, Bigint::kBytesPerDigit), EDX);
1273 1226
1274 // Restore THR and return. 1227 // Restore THR and return.
1275 __ Drop(3); 1228 __ Drop(3);
1276 __ popl(THR); 1229 __ popl(THR);
1277 __ Bind(&x_zero); 1230 __ Bind(&x_zero);
1278 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. 1231 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed.
1279 __ ret(); 1232 __ ret();
1280 } 1233 }
1281 1234
1282
1283 void Intrinsifier::Bigint_estQuotientDigit(Assembler* assembler) { 1235 void Intrinsifier::Bigint_estQuotientDigit(Assembler* assembler) {
1284 // Pseudo code: 1236 // Pseudo code:
1285 // static int _estQuotientDigit(Uint32List args, Uint32List digits, int i) { 1237 // static int _estQuotientDigit(Uint32List args, Uint32List digits, int i) {
1286 // uint32_t yt = args[_YT]; // _YT == 1. 1238 // uint32_t yt = args[_YT]; // _YT == 1.
1287 // uint32_t* dp = &digits[i >> 1]; // i is Smi. 1239 // uint32_t* dp = &digits[i >> 1]; // i is Smi.
1288 // uint32_t dh = dp[0]; // dh == digits[i >> 1]. 1240 // uint32_t dh = dp[0]; // dh == digits[i >> 1].
1289 // uint32_t qd; 1241 // uint32_t qd;
1290 // if (dh == yt) { 1242 // if (dh == yt) {
1291 // qd = DIGIT_MASK; 1243 // qd = DIGIT_MASK;
1292 // } else { 1244 // } else {
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1329 __ Bind(&return_qd); 1281 __ Bind(&return_qd);
1330 // args[2] = qd 1282 // args[2] = qd
1331 __ movl( 1283 __ movl(
1332 FieldAddress(EDI, TypedData::data_offset() + 2 * Bigint::kBytesPerDigit), 1284 FieldAddress(EDI, TypedData::data_offset() + 2 * Bigint::kBytesPerDigit),
1333 EAX); 1285 EAX);
1334 1286
1335 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. 1287 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed.
1336 __ ret(); 1288 __ ret();
1337 } 1289 }
1338 1290
1339
1340 void Intrinsifier::Montgomery_mulMod(Assembler* assembler) { 1291 void Intrinsifier::Montgomery_mulMod(Assembler* assembler) {
1341 // Pseudo code: 1292 // Pseudo code:
1342 // static int _mulMod(Uint32List args, Uint32List digits, int i) { 1293 // static int _mulMod(Uint32List args, Uint32List digits, int i) {
1343 // uint32_t rho = args[_RHO]; // _RHO == 2. 1294 // uint32_t rho = args[_RHO]; // _RHO == 2.
1344 // uint32_t d = digits[i >> 1]; // i is Smi. 1295 // uint32_t d = digits[i >> 1]; // i is Smi.
1345 // uint64_t t = rho*d; 1296 // uint64_t t = rho*d;
1346 // args[_MU] = t mod DIGIT_BASE; // _MU == 4. 1297 // args[_MU] = t mod DIGIT_BASE; // _MU == 4.
1347 // return 1; 1298 // return 1;
1348 // } 1299 // }
1349 1300
(...skipping 14 matching lines...) Expand all
1364 1315
1365 // args[4] = t mod DIGIT_BASE = low32(t) 1316 // args[4] = t mod DIGIT_BASE = low32(t)
1366 __ movl( 1317 __ movl(
1367 FieldAddress(EDI, TypedData::data_offset() + 4 * Bigint::kBytesPerDigit), 1318 FieldAddress(EDI, TypedData::data_offset() + 4 * Bigint::kBytesPerDigit),
1368 EAX); 1319 EAX);
1369 1320
1370 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed. 1321 __ movl(EAX, Immediate(Smi::RawValue(1))); // One digit processed.
1371 __ ret(); 1322 __ ret();
1372 } 1323 }
1373 1324
1374
1375 // Check if the last argument is a double, jump to label 'is_smi' if smi 1325 // Check if the last argument is a double, jump to label 'is_smi' if smi
1376 // (easy to convert to double), otherwise jump to label 'not_double_smi', 1326 // (easy to convert to double), otherwise jump to label 'not_double_smi',
1377 // Returns the last argument in EAX. 1327 // Returns the last argument in EAX.
1378 static void TestLastArgumentIsDouble(Assembler* assembler, 1328 static void TestLastArgumentIsDouble(Assembler* assembler,
1379 Label* is_smi, 1329 Label* is_smi,
1380 Label* not_double_smi) { 1330 Label* not_double_smi) {
1381 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1331 __ movl(EAX, Address(ESP, +1 * kWordSize));
1382 __ testl(EAX, Immediate(kSmiTagMask)); 1332 __ testl(EAX, Immediate(kSmiTagMask));
1383 __ j(ZERO, is_smi, Assembler::kNearJump); // Jump if Smi. 1333 __ j(ZERO, is_smi, Assembler::kNearJump); // Jump if Smi.
1384 __ CompareClassId(EAX, kDoubleCid, EBX); 1334 __ CompareClassId(EAX, kDoubleCid, EBX);
1385 __ j(NOT_EQUAL, not_double_smi, Assembler::kNearJump); 1335 __ j(NOT_EQUAL, not_double_smi, Assembler::kNearJump);
1386 // Fall through if double. 1336 // Fall through if double.
1387 } 1337 }
1388 1338
1389
1390 // Both arguments on stack, arg0 (left) is a double, arg1 (right) is of unknown 1339 // Both arguments on stack, arg0 (left) is a double, arg1 (right) is of unknown
1391 // type. Return true or false object in the register EAX. Any NaN argument 1340 // type. Return true or false object in the register EAX. Any NaN argument
1392 // returns false. Any non-double arg1 causes control flow to fall through to the 1341 // returns false. Any non-double arg1 causes control flow to fall through to the
1393 // slow case (compiled method body). 1342 // slow case (compiled method body).
1394 static void CompareDoubles(Assembler* assembler, Condition true_condition) { 1343 static void CompareDoubles(Assembler* assembler, Condition true_condition) {
1395 Label fall_through, is_false, is_true, is_smi, double_op; 1344 Label fall_through, is_false, is_true, is_smi, double_op;
1396 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); 1345 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through);
1397 // Both arguments are double, right operand is in EAX. 1346 // Both arguments are double, right operand is in EAX.
1398 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); 1347 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset()));
1399 __ Bind(&double_op); 1348 __ Bind(&double_op);
1400 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument. 1349 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument.
1401 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1350 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
1402 __ comisd(XMM0, XMM1); 1351 __ comisd(XMM0, XMM1);
1403 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false; 1352 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false;
1404 __ j(true_condition, &is_true, Assembler::kNearJump); 1353 __ j(true_condition, &is_true, Assembler::kNearJump);
1405 // Fall through false. 1354 // Fall through false.
1406 __ Bind(&is_false); 1355 __ Bind(&is_false);
1407 __ LoadObject(EAX, Bool::False()); 1356 __ LoadObject(EAX, Bool::False());
1408 __ ret(); 1357 __ ret();
1409 __ Bind(&is_true); 1358 __ Bind(&is_true);
1410 __ LoadObject(EAX, Bool::True()); 1359 __ LoadObject(EAX, Bool::True());
1411 __ ret(); 1360 __ ret();
1412 __ Bind(&is_smi); 1361 __ Bind(&is_smi);
1413 __ SmiUntag(EAX); 1362 __ SmiUntag(EAX);
1414 __ cvtsi2sd(XMM1, EAX); 1363 __ cvtsi2sd(XMM1, EAX);
1415 __ jmp(&double_op); 1364 __ jmp(&double_op);
1416 __ Bind(&fall_through); 1365 __ Bind(&fall_through);
1417 } 1366 }
1418 1367
1419
1420 // arg0 is Double, arg1 is unknown. 1368 // arg0 is Double, arg1 is unknown.
1421 void Intrinsifier::Double_greaterThan(Assembler* assembler) { 1369 void Intrinsifier::Double_greaterThan(Assembler* assembler) {
1422 CompareDoubles(assembler, ABOVE); 1370 CompareDoubles(assembler, ABOVE);
1423 } 1371 }
1424 1372
1425
1426 // arg0 is Double, arg1 is unknown. 1373 // arg0 is Double, arg1 is unknown.
1427 void Intrinsifier::Double_greaterEqualThan(Assembler* assembler) { 1374 void Intrinsifier::Double_greaterEqualThan(Assembler* assembler) {
1428 CompareDoubles(assembler, ABOVE_EQUAL); 1375 CompareDoubles(assembler, ABOVE_EQUAL);
1429 } 1376 }
1430 1377
1431
1432 // arg0 is Double, arg1 is unknown. 1378 // arg0 is Double, arg1 is unknown.
1433 void Intrinsifier::Double_lessThan(Assembler* assembler) { 1379 void Intrinsifier::Double_lessThan(Assembler* assembler) {
1434 CompareDoubles(assembler, BELOW); 1380 CompareDoubles(assembler, BELOW);
1435 } 1381 }
1436 1382
1437
1438 // arg0 is Double, arg1 is unknown. 1383 // arg0 is Double, arg1 is unknown.
1439 void Intrinsifier::Double_equal(Assembler* assembler) { 1384 void Intrinsifier::Double_equal(Assembler* assembler) {
1440 CompareDoubles(assembler, EQUAL); 1385 CompareDoubles(assembler, EQUAL);
1441 } 1386 }
1442 1387
1443
1444 // arg0 is Double, arg1 is unknown. 1388 // arg0 is Double, arg1 is unknown.
1445 void Intrinsifier::Double_lessEqualThan(Assembler* assembler) { 1389 void Intrinsifier::Double_lessEqualThan(Assembler* assembler) {
1446 CompareDoubles(assembler, BELOW_EQUAL); 1390 CompareDoubles(assembler, BELOW_EQUAL);
1447 } 1391 }
1448 1392
1449
1450 // Expects left argument to be double (receiver). Right argument is unknown. 1393 // Expects left argument to be double (receiver). Right argument is unknown.
1451 // Both arguments are on stack. 1394 // Both arguments are on stack.
1452 static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { 1395 static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) {
1453 Label fall_through, is_smi, double_op; 1396 Label fall_through, is_smi, double_op;
1454 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); 1397 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through);
1455 // Both arguments are double, right operand is in EAX. 1398 // Both arguments are double, right operand is in EAX.
1456 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); 1399 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset()));
1457 __ Bind(&double_op); 1400 __ Bind(&double_op);
1458 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument. 1401 __ movl(EAX, Address(ESP, +2 * kWordSize)); // Left argument.
1459 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1402 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
(...skipping 20 matching lines...) Expand all
1480 EBX); 1423 EBX);
1481 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); 1424 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0);
1482 __ ret(); 1425 __ ret();
1483 __ Bind(&is_smi); 1426 __ Bind(&is_smi);
1484 __ SmiUntag(EAX); 1427 __ SmiUntag(EAX);
1485 __ cvtsi2sd(XMM1, EAX); 1428 __ cvtsi2sd(XMM1, EAX);
1486 __ jmp(&double_op); 1429 __ jmp(&double_op);
1487 __ Bind(&fall_through); 1430 __ Bind(&fall_through);
1488 } 1431 }
1489 1432
1490
1491 void Intrinsifier::Double_add(Assembler* assembler) { 1433 void Intrinsifier::Double_add(Assembler* assembler) {
1492 DoubleArithmeticOperations(assembler, Token::kADD); 1434 DoubleArithmeticOperations(assembler, Token::kADD);
1493 } 1435 }
1494 1436
1495
1496 void Intrinsifier::Double_mul(Assembler* assembler) { 1437 void Intrinsifier::Double_mul(Assembler* assembler) {
1497 DoubleArithmeticOperations(assembler, Token::kMUL); 1438 DoubleArithmeticOperations(assembler, Token::kMUL);
1498 } 1439 }
1499 1440
1500
1501 void Intrinsifier::Double_sub(Assembler* assembler) { 1441 void Intrinsifier::Double_sub(Assembler* assembler) {
1502 DoubleArithmeticOperations(assembler, Token::kSUB); 1442 DoubleArithmeticOperations(assembler, Token::kSUB);
1503 } 1443 }
1504 1444
1505
1506 void Intrinsifier::Double_div(Assembler* assembler) { 1445 void Intrinsifier::Double_div(Assembler* assembler) {
1507 DoubleArithmeticOperations(assembler, Token::kDIV); 1446 DoubleArithmeticOperations(assembler, Token::kDIV);
1508 } 1447 }
1509 1448
1510
1511 // Left is double right is integer (Bigint, Mint or Smi) 1449 // Left is double right is integer (Bigint, Mint or Smi)
1512 void Intrinsifier::Double_mulFromInteger(Assembler* assembler) { 1450 void Intrinsifier::Double_mulFromInteger(Assembler* assembler) {
1513 Label fall_through; 1451 Label fall_through;
1514 // Only smis allowed. 1452 // Only smis allowed.
1515 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1453 __ movl(EAX, Address(ESP, +1 * kWordSize));
1516 __ testl(EAX, Immediate(kSmiTagMask)); 1454 __ testl(EAX, Immediate(kSmiTagMask));
1517 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); 1455 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump);
1518 // Is Smi. 1456 // Is Smi.
1519 __ SmiUntag(EAX); 1457 __ SmiUntag(EAX);
1520 __ cvtsi2sd(XMM1, EAX); 1458 __ cvtsi2sd(XMM1, EAX);
1521 __ movl(EAX, Address(ESP, +2 * kWordSize)); 1459 __ movl(EAX, Address(ESP, +2 * kWordSize));
1522 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1460 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
1523 __ mulsd(XMM0, XMM1); 1461 __ mulsd(XMM0, XMM1);
1524 const Class& double_class = 1462 const Class& double_class =
1525 Class::Handle(Isolate::Current()->object_store()->double_class()); 1463 Class::Handle(Isolate::Current()->object_store()->double_class());
1526 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump, 1464 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump,
1527 EAX, // Result register. 1465 EAX, // Result register.
1528 EBX); 1466 EBX);
1529 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); 1467 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0);
1530 __ ret(); 1468 __ ret();
1531 __ Bind(&fall_through); 1469 __ Bind(&fall_through);
1532 } 1470 }
1533 1471
1534
1535 void Intrinsifier::DoubleFromInteger(Assembler* assembler) { 1472 void Intrinsifier::DoubleFromInteger(Assembler* assembler) {
1536 Label fall_through; 1473 Label fall_through;
1537 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1474 __ movl(EAX, Address(ESP, +1 * kWordSize));
1538 __ testl(EAX, Immediate(kSmiTagMask)); 1475 __ testl(EAX, Immediate(kSmiTagMask));
1539 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); 1476 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump);
1540 // Is Smi. 1477 // Is Smi.
1541 __ SmiUntag(EAX); 1478 __ SmiUntag(EAX);
1542 __ cvtsi2sd(XMM0, EAX); 1479 __ cvtsi2sd(XMM0, EAX);
1543 const Class& double_class = 1480 const Class& double_class =
1544 Class::Handle(Isolate::Current()->object_store()->double_class()); 1481 Class::Handle(Isolate::Current()->object_store()->double_class());
1545 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump, 1482 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump,
1546 EAX, // Result register. 1483 EAX, // Result register.
1547 EBX); 1484 EBX);
1548 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); 1485 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0);
1549 __ ret(); 1486 __ ret();
1550 __ Bind(&fall_through); 1487 __ Bind(&fall_through);
1551 } 1488 }
1552 1489
1553
1554 void Intrinsifier::Double_getIsNaN(Assembler* assembler) { 1490 void Intrinsifier::Double_getIsNaN(Assembler* assembler) {
1555 Label is_true; 1491 Label is_true;
1556 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1492 __ movl(EAX, Address(ESP, +1 * kWordSize));
1557 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1493 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
1558 __ comisd(XMM0, XMM0); 1494 __ comisd(XMM0, XMM0);
1559 __ j(PARITY_EVEN, &is_true, Assembler::kNearJump); // NaN -> true; 1495 __ j(PARITY_EVEN, &is_true, Assembler::kNearJump); // NaN -> true;
1560 __ LoadObject(EAX, Bool::False()); 1496 __ LoadObject(EAX, Bool::False());
1561 __ ret(); 1497 __ ret();
1562 __ Bind(&is_true); 1498 __ Bind(&is_true);
1563 __ LoadObject(EAX, Bool::True()); 1499 __ LoadObject(EAX, Bool::True());
1564 __ ret(); 1500 __ ret();
1565 } 1501 }
1566 1502
1567
1568 void Intrinsifier::Double_getIsInfinite(Assembler* assembler) { 1503 void Intrinsifier::Double_getIsInfinite(Assembler* assembler) {
1569 Label not_inf; 1504 Label not_inf;
1570 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1505 __ movl(EAX, Address(ESP, +1 * kWordSize));
1571 __ movl(EBX, FieldAddress(EAX, Double::value_offset())); 1506 __ movl(EBX, FieldAddress(EAX, Double::value_offset()));
1572 1507
1573 // If the low word isn't zero, then it isn't infinity. 1508 // If the low word isn't zero, then it isn't infinity.
1574 __ cmpl(EBX, Immediate(0)); 1509 __ cmpl(EBX, Immediate(0));
1575 __ j(NOT_EQUAL, &not_inf, Assembler::kNearJump); 1510 __ j(NOT_EQUAL, &not_inf, Assembler::kNearJump);
1576 // Check the high word. 1511 // Check the high word.
1577 __ movl(EBX, FieldAddress(EAX, Double::value_offset() + kWordSize)); 1512 __ movl(EBX, FieldAddress(EAX, Double::value_offset() + kWordSize));
1578 // Mask off sign bit. 1513 // Mask off sign bit.
1579 __ andl(EBX, Immediate(0x7FFFFFFF)); 1514 __ andl(EBX, Immediate(0x7FFFFFFF));
1580 // Compare with +infinity. 1515 // Compare with +infinity.
1581 __ cmpl(EBX, Immediate(0x7FF00000)); 1516 __ cmpl(EBX, Immediate(0x7FF00000));
1582 __ j(NOT_EQUAL, &not_inf, Assembler::kNearJump); 1517 __ j(NOT_EQUAL, &not_inf, Assembler::kNearJump);
1583 __ LoadObject(EAX, Bool::True()); 1518 __ LoadObject(EAX, Bool::True());
1584 __ ret(); 1519 __ ret();
1585 1520
1586 __ Bind(&not_inf); 1521 __ Bind(&not_inf);
1587 __ LoadObject(EAX, Bool::False()); 1522 __ LoadObject(EAX, Bool::False());
1588 __ ret(); 1523 __ ret();
1589 } 1524 }
1590 1525
1591
1592 void Intrinsifier::Double_getIsNegative(Assembler* assembler) { 1526 void Intrinsifier::Double_getIsNegative(Assembler* assembler) {
1593 Label is_false, is_true, is_zero; 1527 Label is_false, is_true, is_zero;
1594 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1528 __ movl(EAX, Address(ESP, +1 * kWordSize));
1595 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1529 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
1596 __ xorpd(XMM1, XMM1); // 0.0 -> XMM1. 1530 __ xorpd(XMM1, XMM1); // 0.0 -> XMM1.
1597 __ comisd(XMM0, XMM1); 1531 __ comisd(XMM0, XMM1);
1598 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false. 1532 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false.
1599 __ j(EQUAL, &is_zero, Assembler::kNearJump); // Check for negative zero. 1533 __ j(EQUAL, &is_zero, Assembler::kNearJump); // Check for negative zero.
1600 __ j(ABOVE_EQUAL, &is_false, Assembler::kNearJump); // >= 0 -> false. 1534 __ j(ABOVE_EQUAL, &is_false, Assembler::kNearJump); // >= 0 -> false.
1601 __ Bind(&is_true); 1535 __ Bind(&is_true);
1602 __ LoadObject(EAX, Bool::True()); 1536 __ LoadObject(EAX, Bool::True());
1603 __ ret(); 1537 __ ret();
1604 __ Bind(&is_false); 1538 __ Bind(&is_false);
1605 __ LoadObject(EAX, Bool::False()); 1539 __ LoadObject(EAX, Bool::False());
1606 __ ret(); 1540 __ ret();
1607 __ Bind(&is_zero); 1541 __ Bind(&is_zero);
1608 // Check for negative zero (get the sign bit). 1542 // Check for negative zero (get the sign bit).
1609 __ movmskpd(EAX, XMM0); 1543 __ movmskpd(EAX, XMM0);
1610 __ testl(EAX, Immediate(1)); 1544 __ testl(EAX, Immediate(1));
1611 __ j(NOT_ZERO, &is_true, Assembler::kNearJump); 1545 __ j(NOT_ZERO, &is_true, Assembler::kNearJump);
1612 __ jmp(&is_false, Assembler::kNearJump); 1546 __ jmp(&is_false, Assembler::kNearJump);
1613 } 1547 }
1614 1548
1615
1616 void Intrinsifier::DoubleToInteger(Assembler* assembler) { 1549 void Intrinsifier::DoubleToInteger(Assembler* assembler) {
1617 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1550 __ movl(EAX, Address(ESP, +1 * kWordSize));
1618 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset())); 1551 __ movsd(XMM0, FieldAddress(EAX, Double::value_offset()));
1619 __ cvttsd2si(EAX, XMM0); 1552 __ cvttsd2si(EAX, XMM0);
1620 // Overflow is signalled with minint. 1553 // Overflow is signalled with minint.
1621 Label fall_through; 1554 Label fall_through;
1622 // Check for overflow and that it fits into Smi. 1555 // Check for overflow and that it fits into Smi.
1623 __ cmpl(EAX, Immediate(0xC0000000)); 1556 __ cmpl(EAX, Immediate(0xC0000000));
1624 __ j(NEGATIVE, &fall_through, Assembler::kNearJump); 1557 __ j(NEGATIVE, &fall_through, Assembler::kNearJump);
1625 __ SmiTag(EAX); 1558 __ SmiTag(EAX);
1626 __ ret(); 1559 __ ret();
1627 __ Bind(&fall_through); 1560 __ Bind(&fall_through);
1628 } 1561 }
1629 1562
1630
1631 // Argument type is not known 1563 // Argument type is not known
1632 void Intrinsifier::MathSqrt(Assembler* assembler) { 1564 void Intrinsifier::MathSqrt(Assembler* assembler) {
1633 Label fall_through, is_smi, double_op; 1565 Label fall_through, is_smi, double_op;
1634 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); 1566 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through);
1635 // Argument is double and is in EAX. 1567 // Argument is double and is in EAX.
1636 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset())); 1568 __ movsd(XMM1, FieldAddress(EAX, Double::value_offset()));
1637 __ Bind(&double_op); 1569 __ Bind(&double_op);
1638 __ sqrtsd(XMM0, XMM1); 1570 __ sqrtsd(XMM0, XMM1);
1639 const Class& double_class = 1571 const Class& double_class =
1640 Class::Handle(Isolate::Current()->object_store()->double_class()); 1572 Class::Handle(Isolate::Current()->object_store()->double_class());
1641 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump, 1573 __ TryAllocate(double_class, &fall_through, Assembler::kNearJump,
1642 EAX, // Result register. 1574 EAX, // Result register.
1643 EBX); 1575 EBX);
1644 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0); 1576 __ movsd(FieldAddress(EAX, Double::value_offset()), XMM0);
1645 __ ret(); 1577 __ ret();
1646 __ Bind(&is_smi); 1578 __ Bind(&is_smi);
1647 __ SmiUntag(EAX); 1579 __ SmiUntag(EAX);
1648 __ cvtsi2sd(XMM1, EAX); 1580 __ cvtsi2sd(XMM1, EAX);
1649 __ jmp(&double_op); 1581 __ jmp(&double_op);
1650 __ Bind(&fall_through); 1582 __ Bind(&fall_through);
1651 } 1583 }
1652 1584
1653
1654 // var state = ((_A * (_state[kSTATE_LO])) + _state[kSTATE_HI]) & _MASK_64; 1585 // var state = ((_A * (_state[kSTATE_LO])) + _state[kSTATE_HI]) & _MASK_64;
1655 // _state[kSTATE_LO] = state & _MASK_32; 1586 // _state[kSTATE_LO] = state & _MASK_32;
1656 // _state[kSTATE_HI] = state >> 32; 1587 // _state[kSTATE_HI] = state >> 32;
1657 void Intrinsifier::Random_nextState(Assembler* assembler) { 1588 void Intrinsifier::Random_nextState(Assembler* assembler) {
1658 const Library& math_lib = Library::Handle(Library::MathLibrary()); 1589 const Library& math_lib = Library::Handle(Library::MathLibrary());
1659 ASSERT(!math_lib.IsNull()); 1590 ASSERT(!math_lib.IsNull());
1660 const Class& random_class = 1591 const Class& random_class =
1661 Class::Handle(math_lib.LookupClassAllowPrivate(Symbols::_Random())); 1592 Class::Handle(math_lib.LookupClassAllowPrivate(Symbols::_Random()));
1662 ASSERT(!random_class.IsNull()); 1593 ASSERT(!random_class.IsNull());
1663 const Field& state_field = Field::ZoneHandle( 1594 const Field& state_field = Field::ZoneHandle(
(...skipping 25 matching lines...) Expand all
1689 __ movl(EAX, Immediate(a_int32_value)); 1620 __ movl(EAX, Immediate(a_int32_value));
1690 // 64-bit multiply EAX * value -> EDX:EAX. 1621 // 64-bit multiply EAX * value -> EDX:EAX.
1691 __ mull(addr_0); 1622 __ mull(addr_0);
1692 __ addl(EAX, addr_1); 1623 __ addl(EAX, addr_1);
1693 __ adcl(EDX, Immediate(0)); 1624 __ adcl(EDX, Immediate(0));
1694 __ movl(addr_1, EDX); 1625 __ movl(addr_1, EDX);
1695 __ movl(addr_0, EAX); 1626 __ movl(addr_0, EAX);
1696 __ ret(); 1627 __ ret();
1697 } 1628 }
1698 1629
1699
1700 // Identity comparison. 1630 // Identity comparison.
1701 void Intrinsifier::ObjectEquals(Assembler* assembler) { 1631 void Intrinsifier::ObjectEquals(Assembler* assembler) {
1702 Label is_true; 1632 Label is_true;
1703 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1633 __ movl(EAX, Address(ESP, +1 * kWordSize));
1704 __ cmpl(EAX, Address(ESP, +2 * kWordSize)); 1634 __ cmpl(EAX, Address(ESP, +2 * kWordSize));
1705 __ j(EQUAL, &is_true, Assembler::kNearJump); 1635 __ j(EQUAL, &is_true, Assembler::kNearJump);
1706 __ LoadObject(EAX, Bool::False()); 1636 __ LoadObject(EAX, Bool::False());
1707 __ ret(); 1637 __ ret();
1708 __ Bind(&is_true); 1638 __ Bind(&is_true);
1709 __ LoadObject(EAX, Bool::True()); 1639 __ LoadObject(EAX, Bool::True());
1710 __ ret(); 1640 __ ret();
1711 } 1641 }
1712 1642
1713
1714 static void RangeCheck(Assembler* assembler, 1643 static void RangeCheck(Assembler* assembler,
1715 Register reg, 1644 Register reg,
1716 intptr_t low, 1645 intptr_t low,
1717 intptr_t high, 1646 intptr_t high,
1718 Condition cc, 1647 Condition cc,
1719 Label* target) { 1648 Label* target) {
1720 __ subl(reg, Immediate(low)); 1649 __ subl(reg, Immediate(low));
1721 __ cmpl(reg, Immediate(high - low)); 1650 __ cmpl(reg, Immediate(high - low));
1722 __ j(cc, target); 1651 __ j(cc, target);
1723 } 1652 }
1724 1653
1725
1726 const Condition kIfNotInRange = ABOVE; 1654 const Condition kIfNotInRange = ABOVE;
1727 const Condition kIfInRange = BELOW_EQUAL; 1655 const Condition kIfInRange = BELOW_EQUAL;
1728 1656
1729
1730 static void JumpIfInteger(Assembler* assembler, Register cid, Label* target) { 1657 static void JumpIfInteger(Assembler* assembler, Register cid, Label* target) {
1731 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfInRange, target); 1658 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfInRange, target);
1732 } 1659 }
1733 1660
1734
1735 static void JumpIfNotInteger(Assembler* assembler, 1661 static void JumpIfNotInteger(Assembler* assembler,
1736 Register cid, 1662 Register cid,
1737 Label* target) { 1663 Label* target) {
1738 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfNotInRange, target); 1664 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfNotInRange, target);
1739 } 1665 }
1740 1666
1741
1742 static void JumpIfString(Assembler* assembler, Register cid, Label* target) { 1667 static void JumpIfString(Assembler* assembler, Register cid, Label* target) {
1743 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid, 1668 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid,
1744 kIfInRange, target); 1669 kIfInRange, target);
1745 } 1670 }
1746 1671
1747
1748 static void JumpIfNotString(Assembler* assembler, Register cid, Label* target) { 1672 static void JumpIfNotString(Assembler* assembler, Register cid, Label* target) {
1749 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid, 1673 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid,
1750 kIfNotInRange, target); 1674 kIfNotInRange, target);
1751 } 1675 }
1752 1676
1753
1754 // Return type quickly for simple types (not parameterized and not signature). 1677 // Return type quickly for simple types (not parameterized and not signature).
1755 void Intrinsifier::ObjectRuntimeType(Assembler* assembler) { 1678 void Intrinsifier::ObjectRuntimeType(Assembler* assembler) {
1756 Label fall_through, use_canonical_type, not_double, not_integer; 1679 Label fall_through, use_canonical_type, not_double, not_integer;
1757 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1680 __ movl(EAX, Address(ESP, +1 * kWordSize));
1758 __ LoadClassIdMayBeSmi(EDI, EAX); 1681 __ LoadClassIdMayBeSmi(EDI, EAX);
1759 1682
1760 __ cmpl(EDI, Immediate(kClosureCid)); 1683 __ cmpl(EDI, Immediate(kClosureCid));
1761 __ j(EQUAL, &fall_through); // Instance is a closure. 1684 __ j(EQUAL, &fall_through); // Instance is a closure.
1762 1685
1763 __ cmpl(EDI, Immediate(kNumPredefinedCids)); 1686 __ cmpl(EDI, Immediate(kNumPredefinedCids));
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1800 __ cmpl(EDI, Immediate(0)); 1723 __ cmpl(EDI, Immediate(0));
1801 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); 1724 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump);
1802 __ movl(EAX, FieldAddress(EBX, Class::canonical_type_offset())); 1725 __ movl(EAX, FieldAddress(EBX, Class::canonical_type_offset()));
1803 __ CompareObject(EAX, Object::null_object()); 1726 __ CompareObject(EAX, Object::null_object());
1804 __ j(EQUAL, &fall_through, Assembler::kNearJump); // Not yet set. 1727 __ j(EQUAL, &fall_through, Assembler::kNearJump); // Not yet set.
1805 __ ret(); 1728 __ ret();
1806 1729
1807 __ Bind(&fall_through); 1730 __ Bind(&fall_through);
1808 } 1731 }
1809 1732
1810
1811 void Intrinsifier::ObjectHaveSameRuntimeType(Assembler* assembler) { 1733 void Intrinsifier::ObjectHaveSameRuntimeType(Assembler* assembler) {
1812 Label fall_through, different_cids, equal, not_equal, not_integer; 1734 Label fall_through, different_cids, equal, not_equal, not_integer;
1813 1735
1814 __ movl(EAX, Address(ESP, +1 * kWordSize)); 1736 __ movl(EAX, Address(ESP, +1 * kWordSize));
1815 __ LoadClassIdMayBeSmi(EDI, EAX); 1737 __ LoadClassIdMayBeSmi(EDI, EAX);
1816 1738
1817 // Check if left hand size is a closure. Closures are handled in the runtime. 1739 // Check if left hand size is a closure. Closures are handled in the runtime.
1818 __ cmpl(EDI, Immediate(kClosureCid)); 1740 __ cmpl(EDI, Immediate(kClosureCid));
1819 __ j(EQUAL, &fall_through); 1741 __ j(EQUAL, &fall_through);
1820 1742
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1864 // Strings only have the same runtime type as other strings. 1786 // Strings only have the same runtime type as other strings.
1865 // Fall-through to the not equal case. 1787 // Fall-through to the not equal case.
1866 1788
1867 __ Bind(&not_equal); 1789 __ Bind(&not_equal);
1868 __ LoadObject(EAX, Bool::False()); 1790 __ LoadObject(EAX, Bool::False());
1869 __ ret(); 1791 __ ret();
1870 1792
1871 __ Bind(&fall_through); 1793 __ Bind(&fall_through);
1872 } 1794 }
1873 1795
1874
1875 void Intrinsifier::String_getHashCode(Assembler* assembler) { 1796 void Intrinsifier::String_getHashCode(Assembler* assembler) {
1876 Label fall_through; 1797 Label fall_through;
1877 __ movl(EAX, Address(ESP, +1 * kWordSize)); // String object. 1798 __ movl(EAX, Address(ESP, +1 * kWordSize)); // String object.
1878 __ movl(EAX, FieldAddress(EAX, String::hash_offset())); 1799 __ movl(EAX, FieldAddress(EAX, String::hash_offset()));
1879 __ cmpl(EAX, Immediate(0)); 1800 __ cmpl(EAX, Immediate(0));
1880 __ j(EQUAL, &fall_through, Assembler::kNearJump); 1801 __ j(EQUAL, &fall_through, Assembler::kNearJump);
1881 __ ret(); 1802 __ ret();
1882 __ Bind(&fall_through); 1803 __ Bind(&fall_through);
1883 // Hash not yet computed. 1804 // Hash not yet computed.
1884 } 1805 }
1885 1806
1886
1887 // bool _substringMatches(int start, String other) 1807 // bool _substringMatches(int start, String other)
1888 void Intrinsifier::StringBaseSubstringMatches(Assembler* assembler) { 1808 void Intrinsifier::StringBaseSubstringMatches(Assembler* assembler) {
1889 // For precompilation, not implemented on IA32. 1809 // For precompilation, not implemented on IA32.
1890 } 1810 }
1891 1811
1892
1893 void Intrinsifier::Object_getHash(Assembler* assembler) { 1812 void Intrinsifier::Object_getHash(Assembler* assembler) {
1894 UNREACHABLE(); 1813 UNREACHABLE();
1895 } 1814 }
1896 1815
1897
1898 void Intrinsifier::Object_setHash(Assembler* assembler) { 1816 void Intrinsifier::Object_setHash(Assembler* assembler) {
1899 UNREACHABLE(); 1817 UNREACHABLE();
1900 } 1818 }
1901 1819
1902
1903 void Intrinsifier::StringBaseCharAt(Assembler* assembler) { 1820 void Intrinsifier::StringBaseCharAt(Assembler* assembler) {
1904 Label fall_through, try_two_byte_string; 1821 Label fall_through, try_two_byte_string;
1905 __ movl(EBX, Address(ESP, +1 * kWordSize)); // Index. 1822 __ movl(EBX, Address(ESP, +1 * kWordSize)); // Index.
1906 __ movl(EAX, Address(ESP, +2 * kWordSize)); // String. 1823 __ movl(EAX, Address(ESP, +2 * kWordSize)); // String.
1907 __ testl(EBX, Immediate(kSmiTagMask)); 1824 __ testl(EBX, Immediate(kSmiTagMask));
1908 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index. 1825 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi index.
1909 // Range check. 1826 // Range check.
1910 __ cmpl(EBX, FieldAddress(EAX, String::length_offset())); 1827 __ cmpl(EBX, FieldAddress(EAX, String::length_offset()));
1911 // Runtime throws exception. 1828 // Runtime throws exception.
1912 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); 1829 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump);
(...skipping 18 matching lines...) Expand all
1931 __ j(GREATER_EQUAL, &fall_through); 1848 __ j(GREATER_EQUAL, &fall_through);
1932 __ movl(EAX, 1849 __ movl(EAX,
1933 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress()))); 1850 Immediate(reinterpret_cast<uword>(Symbols::PredefinedAddress())));
1934 __ movl(EAX, Address(EAX, EBX, TIMES_4, 1851 __ movl(EAX, Address(EAX, EBX, TIMES_4,
1935 Symbols::kNullCharCodeSymbolOffset * kWordSize)); 1852 Symbols::kNullCharCodeSymbolOffset * kWordSize));
1936 __ ret(); 1853 __ ret();
1937 1854
1938 __ Bind(&fall_through); 1855 __ Bind(&fall_through);
1939 } 1856 }
1940 1857
1941
1942 void Intrinsifier::StringBaseIsEmpty(Assembler* assembler) { 1858 void Intrinsifier::StringBaseIsEmpty(Assembler* assembler) {
1943 Label is_true; 1859 Label is_true;
1944 // Get length. 1860 // Get length.
1945 __ movl(EAX, Address(ESP, +1 * kWordSize)); // String object. 1861 __ movl(EAX, Address(ESP, +1 * kWordSize)); // String object.
1946 __ movl(EAX, FieldAddress(EAX, String::length_offset())); 1862 __ movl(EAX, FieldAddress(EAX, String::length_offset()));
1947 __ cmpl(EAX, Immediate(Smi::RawValue(0))); 1863 __ cmpl(EAX, Immediate(Smi::RawValue(0)));
1948 __ j(EQUAL, &is_true, Assembler::kNearJump); 1864 __ j(EQUAL, &is_true, Assembler::kNearJump);
1949 __ LoadObject(EAX, Bool::False()); 1865 __ LoadObject(EAX, Bool::False());
1950 __ ret(); 1866 __ ret();
1951 __ Bind(&is_true); 1867 __ Bind(&is_true);
1952 __ LoadObject(EAX, Bool::True()); 1868 __ LoadObject(EAX, Bool::True());
1953 __ ret(); 1869 __ ret();
1954 } 1870 }
1955 1871
1956
1957 void Intrinsifier::OneByteString_getHashCode(Assembler* assembler) { 1872 void Intrinsifier::OneByteString_getHashCode(Assembler* assembler) {
1958 Label compute_hash; 1873 Label compute_hash;
1959 __ movl(EBX, Address(ESP, +1 * kWordSize)); // OneByteString object. 1874 __ movl(EBX, Address(ESP, +1 * kWordSize)); // OneByteString object.
1960 __ movl(EAX, FieldAddress(EBX, String::hash_offset())); 1875 __ movl(EAX, FieldAddress(EBX, String::hash_offset()));
1961 __ cmpl(EAX, Immediate(0)); 1876 __ cmpl(EAX, Immediate(0));
1962 __ j(EQUAL, &compute_hash, Assembler::kNearJump); 1877 __ j(EQUAL, &compute_hash, Assembler::kNearJump);
1963 __ ret(); 1878 __ ret();
1964 1879
1965 __ Bind(&compute_hash); 1880 __ Bind(&compute_hash);
1966 // Hash not yet computed, use algorithm of class StringHasher. 1881 // Hash not yet computed, use algorithm of class StringHasher.
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2015 // return hash_ == 0 ? 1 : hash_; 1930 // return hash_ == 0 ? 1 : hash_;
2016 __ cmpl(EAX, Immediate(0)); 1931 __ cmpl(EAX, Immediate(0));
2017 __ j(NOT_EQUAL, &set_hash_code, Assembler::kNearJump); 1932 __ j(NOT_EQUAL, &set_hash_code, Assembler::kNearJump);
2018 __ incl(EAX); 1933 __ incl(EAX);
2019 __ Bind(&set_hash_code); 1934 __ Bind(&set_hash_code);
2020 __ SmiTag(EAX); 1935 __ SmiTag(EAX);
2021 __ StoreIntoSmiField(FieldAddress(EBX, String::hash_offset()), EAX); 1936 __ StoreIntoSmiField(FieldAddress(EBX, String::hash_offset()), EAX);
2022 __ ret(); 1937 __ ret();
2023 } 1938 }
2024 1939
2025
2026 // Allocates one-byte string of length 'end - start'. The content is not 1940 // Allocates one-byte string of length 'end - start'. The content is not
2027 // initialized. 'length-reg' contains tagged length. 1941 // initialized. 'length-reg' contains tagged length.
2028 // Returns new string as tagged pointer in EAX. 1942 // Returns new string as tagged pointer in EAX.
2029 static void TryAllocateOnebyteString(Assembler* assembler, 1943 static void TryAllocateOnebyteString(Assembler* assembler,
2030 Label* ok, 1944 Label* ok,
2031 Label* failure, 1945 Label* failure,
2032 Register length_reg) { 1946 Register length_reg) {
2033 NOT_IN_PRODUCT( 1947 NOT_IN_PRODUCT(
2034 __ MaybeTraceAllocation(kOneByteStringCid, EAX, failure, false)); 1948 __ MaybeTraceAllocation(kOneByteStringCid, EAX, failure, false));
2035 if (length_reg != EDI) { 1949 if (length_reg != EDI) {
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2095 EDI); 2009 EDI);
2096 // Clear hash. 2010 // Clear hash.
2097 __ ZeroInitSmiField(FieldAddress(EAX, String::hash_offset())); 2011 __ ZeroInitSmiField(FieldAddress(EAX, String::hash_offset()));
2098 __ jmp(ok, Assembler::kNearJump); 2012 __ jmp(ok, Assembler::kNearJump);
2099 2013
2100 __ Bind(&pop_and_fail); 2014 __ Bind(&pop_and_fail);
2101 __ popl(EDI); 2015 __ popl(EDI);
2102 __ jmp(failure); 2016 __ jmp(failure);
2103 } 2017 }
2104 2018
2105
2106 // Arg0: OneByteString (receiver) 2019 // Arg0: OneByteString (receiver)
2107 // Arg1: Start index as Smi. 2020 // Arg1: Start index as Smi.
2108 // Arg2: End index as Smi. 2021 // Arg2: End index as Smi.
2109 // The indexes must be valid. 2022 // The indexes must be valid.
2110 void Intrinsifier::OneByteString_substringUnchecked(Assembler* assembler) { 2023 void Intrinsifier::OneByteString_substringUnchecked(Assembler* assembler) {
2111 const intptr_t kStringOffset = 3 * kWordSize; 2024 const intptr_t kStringOffset = 3 * kWordSize;
2112 const intptr_t kStartIndexOffset = 2 * kWordSize; 2025 const intptr_t kStartIndexOffset = 2 * kWordSize;
2113 const intptr_t kEndIndexOffset = 1 * kWordSize; 2026 const intptr_t kEndIndexOffset = 1 * kWordSize;
2114 Label fall_through, ok; 2027 Label fall_through, ok;
2115 __ movl(EAX, Address(ESP, +kStartIndexOffset)); 2028 __ movl(EAX, Address(ESP, +kStartIndexOffset));
(...skipping 28 matching lines...) Expand all
2144 __ movzxb(EBX, Address(EDI, EDX, TIMES_1, 0)); 2057 __ movzxb(EBX, Address(EDI, EDX, TIMES_1, 0));
2145 __ movb(FieldAddress(EAX, EDX, TIMES_1, OneByteString::data_offset()), BL); 2058 __ movb(FieldAddress(EAX, EDX, TIMES_1, OneByteString::data_offset()), BL);
2146 __ incl(EDX); 2059 __ incl(EDX);
2147 __ Bind(&check); 2060 __ Bind(&check);
2148 __ cmpl(EDX, ECX); 2061 __ cmpl(EDX, ECX);
2149 __ j(LESS, &loop, Assembler::kNearJump); 2062 __ j(LESS, &loop, Assembler::kNearJump);
2150 __ ret(); 2063 __ ret();
2151 __ Bind(&fall_through); 2064 __ Bind(&fall_through);
2152 } 2065 }
2153 2066
2154
2155 void Intrinsifier::OneByteStringSetAt(Assembler* assembler) { 2067 void Intrinsifier::OneByteStringSetAt(Assembler* assembler) {
2156 __ movl(ECX, Address(ESP, +1 * kWordSize)); // Value. 2068 __ movl(ECX, Address(ESP, +1 * kWordSize)); // Value.
2157 __ movl(EBX, Address(ESP, +2 * kWordSize)); // Index. 2069 __ movl(EBX, Address(ESP, +2 * kWordSize)); // Index.
2158 __ movl(EAX, Address(ESP, +3 * kWordSize)); // OneByteString. 2070 __ movl(EAX, Address(ESP, +3 * kWordSize)); // OneByteString.
2159 __ SmiUntag(EBX); 2071 __ SmiUntag(EBX);
2160 __ SmiUntag(ECX); 2072 __ SmiUntag(ECX);
2161 __ movb(FieldAddress(EAX, EBX, TIMES_1, OneByteString::data_offset()), CL); 2073 __ movb(FieldAddress(EAX, EBX, TIMES_1, OneByteString::data_offset()), CL);
2162 __ ret(); 2074 __ ret();
2163 } 2075 }
2164 2076
2165
2166 void Intrinsifier::OneByteString_allocate(Assembler* assembler) { 2077 void Intrinsifier::OneByteString_allocate(Assembler* assembler) {
2167 __ movl(EDI, Address(ESP, +1 * kWordSize)); // Length. 2078 __ movl(EDI, Address(ESP, +1 * kWordSize)); // Length.
2168 Label fall_through, ok; 2079 Label fall_through, ok;
2169 TryAllocateOnebyteString(assembler, &ok, &fall_through, EDI); 2080 TryAllocateOnebyteString(assembler, &ok, &fall_through, EDI);
2170 // EDI: Start address to copy from (untagged). 2081 // EDI: Start address to copy from (untagged).
2171 2082
2172 __ Bind(&ok); 2083 __ Bind(&ok);
2173 __ ret(); 2084 __ ret();
2174 2085
2175 __ Bind(&fall_through); 2086 __ Bind(&fall_through);
2176 } 2087 }
2177 2088
2178
2179 // TODO(srdjan): Add combinations (one-byte/two-byte/external strings). 2089 // TODO(srdjan): Add combinations (one-byte/two-byte/external strings).
2180 static void StringEquality(Assembler* assembler, intptr_t string_cid) { 2090 static void StringEquality(Assembler* assembler, intptr_t string_cid) {
2181 Label fall_through, is_true, is_false, loop; 2091 Label fall_through, is_true, is_false, loop;
2182 __ movl(EAX, Address(ESP, +2 * kWordSize)); // This. 2092 __ movl(EAX, Address(ESP, +2 * kWordSize)); // This.
2183 __ movl(EBX, Address(ESP, +1 * kWordSize)); // Other. 2093 __ movl(EBX, Address(ESP, +1 * kWordSize)); // Other.
2184 2094
2185 // Are identical? 2095 // Are identical?
2186 __ cmpl(EAX, EBX); 2096 __ cmpl(EAX, EBX);
2187 __ j(EQUAL, &is_true, Assembler::kNearJump); 2097 __ j(EQUAL, &is_true, Assembler::kNearJump);
2188 2098
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2225 __ LoadObject(EAX, Bool::True()); 2135 __ LoadObject(EAX, Bool::True());
2226 __ ret(); 2136 __ ret();
2227 2137
2228 __ Bind(&is_false); 2138 __ Bind(&is_false);
2229 __ LoadObject(EAX, Bool::False()); 2139 __ LoadObject(EAX, Bool::False());
2230 __ ret(); 2140 __ ret();
2231 2141
2232 __ Bind(&fall_through); 2142 __ Bind(&fall_through);
2233 } 2143 }
2234 2144
2235
2236 void Intrinsifier::OneByteString_equality(Assembler* assembler) { 2145 void Intrinsifier::OneByteString_equality(Assembler* assembler) {
2237 StringEquality(assembler, kOneByteStringCid); 2146 StringEquality(assembler, kOneByteStringCid);
2238 } 2147 }
2239 2148
2240
2241 void Intrinsifier::TwoByteString_equality(Assembler* assembler) { 2149 void Intrinsifier::TwoByteString_equality(Assembler* assembler) {
2242 StringEquality(assembler, kTwoByteStringCid); 2150 StringEquality(assembler, kTwoByteStringCid);
2243 } 2151 }
2244 2152
2245
2246 void Intrinsifier::IntrinsifyRegExpExecuteMatch(Assembler* assembler, 2153 void Intrinsifier::IntrinsifyRegExpExecuteMatch(Assembler* assembler,
2247 bool sticky) { 2154 bool sticky) {
2248 if (FLAG_interpret_irregexp) return; 2155 if (FLAG_interpret_irregexp) return;
2249 2156
2250 static const intptr_t kRegExpParamOffset = 3 * kWordSize; 2157 static const intptr_t kRegExpParamOffset = 3 * kWordSize;
2251 static const intptr_t kStringParamOffset = 2 * kWordSize; 2158 static const intptr_t kStringParamOffset = 2 * kWordSize;
2252 // start_index smi is located at offset 1. 2159 // start_index smi is located at offset 1.
2253 2160
2254 // Incoming registers: 2161 // Incoming registers:
2255 // EAX: Function. (Will be loaded with the specialized matcher function.) 2162 // EAX: Function. (Will be loaded with the specialized matcher function.)
2256 // ECX: Unknown. (Must be GC safe on tail call.) 2163 // ECX: Unknown. (Must be GC safe on tail call.)
2257 // EDX: Arguments descriptor. (Will be preserved.) 2164 // EDX: Arguments descriptor. (Will be preserved.)
2258 2165
2259 // Load the specialized function pointer into EAX. Leverage the fact the 2166 // Load the specialized function pointer into EAX. Leverage the fact the
2260 // string CIDs as well as stored function pointers are in sequence. 2167 // string CIDs as well as stored function pointers are in sequence.
2261 __ movl(EBX, Address(ESP, kRegExpParamOffset)); 2168 __ movl(EBX, Address(ESP, kRegExpParamOffset));
2262 __ movl(EDI, Address(ESP, kStringParamOffset)); 2169 __ movl(EDI, Address(ESP, kStringParamOffset));
2263 __ LoadClassId(EDI, EDI); 2170 __ LoadClassId(EDI, EDI);
2264 __ SubImmediate(EDI, Immediate(kOneByteStringCid)); 2171 __ SubImmediate(EDI, Immediate(kOneByteStringCid));
2265 __ movl(EAX, FieldAddress(EBX, EDI, TIMES_4, RegExp::function_offset( 2172 __ movl(EAX,
2266 kOneByteStringCid, sticky))); 2173 FieldAddress(EBX, EDI, TIMES_4,
2174 RegExp::function_offset(kOneByteStringCid, sticky)));
2267 2175
2268 // Registers are now set up for the lazy compile stub. It expects the function 2176 // Registers are now set up for the lazy compile stub. It expects the function
2269 // in EAX, the argument descriptor in EDX, and IC-Data in ECX. 2177 // in EAX, the argument descriptor in EDX, and IC-Data in ECX.
2270 __ xorl(ECX, ECX); 2178 __ xorl(ECX, ECX);
2271 2179
2272 // Tail-call the function. 2180 // Tail-call the function.
2273 __ movl(EDI, FieldAddress(EAX, Function::entry_point_offset())); 2181 __ movl(EDI, FieldAddress(EAX, Function::entry_point_offset()));
2274 __ jmp(EDI); 2182 __ jmp(EDI);
2275 } 2183 }
2276 2184
2277
2278 // On stack: user tag (+1), return-address (+0). 2185 // On stack: user tag (+1), return-address (+0).
2279 void Intrinsifier::UserTag_makeCurrent(Assembler* assembler) { 2186 void Intrinsifier::UserTag_makeCurrent(Assembler* assembler) {
2280 // RDI: Isolate. 2187 // RDI: Isolate.
2281 __ LoadIsolate(EDI); 2188 __ LoadIsolate(EDI);
2282 // EAX: Current user tag. 2189 // EAX: Current user tag.
2283 __ movl(EAX, Address(EDI, Isolate::current_tag_offset())); 2190 __ movl(EAX, Address(EDI, Isolate::current_tag_offset()));
2284 // EAX: UserTag. 2191 // EAX: UserTag.
2285 __ movl(EBX, Address(ESP, +1 * kWordSize)); 2192 __ movl(EBX, Address(ESP, +1 * kWordSize));
2286 // Set Isolate::current_tag_. 2193 // Set Isolate::current_tag_.
2287 __ movl(Address(EDI, Isolate::current_tag_offset()), EBX); 2194 __ movl(Address(EDI, Isolate::current_tag_offset()), EBX);
2288 // EAX: UserTag's tag. 2195 // EAX: UserTag's tag.
2289 __ movl(EBX, FieldAddress(EBX, UserTag::tag_offset())); 2196 __ movl(EBX, FieldAddress(EBX, UserTag::tag_offset()));
2290 // Set Isolate::user_tag_. 2197 // Set Isolate::user_tag_.
2291 __ movl(Address(EDI, Isolate::user_tag_offset()), EBX); 2198 __ movl(Address(EDI, Isolate::user_tag_offset()), EBX);
2292 __ ret(); 2199 __ ret();
2293 } 2200 }
2294 2201
2295
2296 void Intrinsifier::UserTag_defaultTag(Assembler* assembler) { 2202 void Intrinsifier::UserTag_defaultTag(Assembler* assembler) {
2297 __ LoadIsolate(EAX); 2203 __ LoadIsolate(EAX);
2298 __ movl(EAX, Address(EAX, Isolate::default_tag_offset())); 2204 __ movl(EAX, Address(EAX, Isolate::default_tag_offset()));
2299 __ ret(); 2205 __ ret();
2300 } 2206 }
2301 2207
2302
2303 void Intrinsifier::Profiler_getCurrentTag(Assembler* assembler) { 2208 void Intrinsifier::Profiler_getCurrentTag(Assembler* assembler) {
2304 __ LoadIsolate(EAX); 2209 __ LoadIsolate(EAX);
2305 __ movl(EAX, Address(EAX, Isolate::current_tag_offset())); 2210 __ movl(EAX, Address(EAX, Isolate::current_tag_offset()));
2306 __ ret(); 2211 __ ret();
2307 } 2212 }
2308 2213
2309
2310 void Intrinsifier::Timeline_isDartStreamEnabled(Assembler* assembler) { 2214 void Intrinsifier::Timeline_isDartStreamEnabled(Assembler* assembler) {
2311 if (!FLAG_support_timeline) { 2215 if (!FLAG_support_timeline) {
2312 __ LoadObject(EAX, Bool::False()); 2216 __ LoadObject(EAX, Bool::False());
2313 __ ret(); 2217 __ ret();
2314 return; 2218 return;
2315 } 2219 }
2316 Label true_label; 2220 Label true_label;
2317 // Load TimelineStream*. 2221 // Load TimelineStream*.
2318 __ movl(EAX, Address(THR, Thread::dart_stream_offset())); 2222 __ movl(EAX, Address(THR, Thread::dart_stream_offset()));
2319 // Load uintptr_t from TimelineStream*. 2223 // Load uintptr_t from TimelineStream*.
2320 __ movl(EAX, Address(EAX, TimelineStream::enabled_offset())); 2224 __ movl(EAX, Address(EAX, TimelineStream::enabled_offset()));
2321 __ cmpl(EAX, Immediate(0)); 2225 __ cmpl(EAX, Immediate(0));
2322 __ j(NOT_ZERO, &true_label, Assembler::kNearJump); 2226 __ j(NOT_ZERO, &true_label, Assembler::kNearJump);
2323 // Not enabled. 2227 // Not enabled.
2324 __ LoadObject(EAX, Bool::False()); 2228 __ LoadObject(EAX, Bool::False());
2325 __ ret(); 2229 __ ret();
2326 // Enabled. 2230 // Enabled.
2327 __ Bind(&true_label); 2231 __ Bind(&true_label);
2328 __ LoadObject(EAX, Bool::True()); 2232 __ LoadObject(EAX, Bool::True());
2329 __ ret(); 2233 __ ret();
2330 } 2234 }
2331 2235
2332
2333 void Intrinsifier::ClearAsyncThreadStackTrace(Assembler* assembler) { 2236 void Intrinsifier::ClearAsyncThreadStackTrace(Assembler* assembler) {
2334 __ LoadObject(EAX, Object::null_object()); 2237 __ LoadObject(EAX, Object::null_object());
2335 __ movl(Address(THR, Thread::async_stack_trace_offset()), EAX); 2238 __ movl(Address(THR, Thread::async_stack_trace_offset()), EAX);
2336 __ ret(); 2239 __ ret();
2337 } 2240 }
2338 2241
2339
2340 void Intrinsifier::SetAsyncThreadStackTrace(Assembler* assembler) { 2242 void Intrinsifier::SetAsyncThreadStackTrace(Assembler* assembler) {
2341 __ movl(Address(THR, Thread::async_stack_trace_offset()), EAX); 2243 __ movl(Address(THR, Thread::async_stack_trace_offset()), EAX);
2342 __ LoadObject(EAX, Object::null_object()); 2244 __ LoadObject(EAX, Object::null_object());
2343 __ ret(); 2245 __ ret();
2344 } 2246 }
2345 2247
2346 #undef __ 2248 #undef __
2347 2249
2348 } // namespace dart 2250 } // namespace dart
2349 2251
2350 #endif // defined TARGET_ARCH_IA32 2252 #endif // defined TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « runtime/vm/intrinsifier_dbc.cc ('k') | runtime/vm/intrinsifier_x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698