Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(511)

Side by Side Diff: runtime/vm/intrinsifier_x64.cc

Issue 2974233002: VM: Re-format to use at most one newline between functions (Closed)
Patch Set: Rebase and merge Created 3 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/intrinsifier_ia32.cc ('k') | runtime/vm/isolate.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/intrinsifier.h" 8 #include "vm/intrinsifier.h"
9 9
10 #include "vm/assembler.h" 10 #include "vm/assembler.h"
(...skipping 10 matching lines...) Expand all
21 // When entering intrinsics code: 21 // When entering intrinsics code:
22 // R10: Arguments descriptor 22 // R10: Arguments descriptor
23 // TOS: Return address 23 // TOS: Return address
24 // The R10 registers can be destroyed only if there is no slow-path, i.e. 24 // The R10 registers can be destroyed only if there is no slow-path, i.e.
25 // if the intrinsified method always executes a return. 25 // if the intrinsified method always executes a return.
26 // The RBP register should not be modified, because it is used by the profiler. 26 // The RBP register should not be modified, because it is used by the profiler.
27 // The PP and THR registers (see constants_x64.h) must be preserved. 27 // The PP and THR registers (see constants_x64.h) must be preserved.
28 28
29 #define __ assembler-> 29 #define __ assembler->
30 30
31
32 intptr_t Intrinsifier::ParameterSlotFromSp() { 31 intptr_t Intrinsifier::ParameterSlotFromSp() {
33 return 0; 32 return 0;
34 } 33 }
35 34
36
37 static bool IsABIPreservedRegister(Register reg) { 35 static bool IsABIPreservedRegister(Register reg) {
38 return ((1 << reg) & CallingConventions::kCalleeSaveCpuRegisters) != 0; 36 return ((1 << reg) & CallingConventions::kCalleeSaveCpuRegisters) != 0;
39 } 37 }
40 38
41
42 void Intrinsifier::IntrinsicCallPrologue(Assembler* assembler) { 39 void Intrinsifier::IntrinsicCallPrologue(Assembler* assembler) {
43 ASSERT(IsABIPreservedRegister(CODE_REG)); 40 ASSERT(IsABIPreservedRegister(CODE_REG));
44 ASSERT(!IsABIPreservedRegister(ARGS_DESC_REG)); 41 ASSERT(!IsABIPreservedRegister(ARGS_DESC_REG));
45 ASSERT(IsABIPreservedRegister(CALLEE_SAVED_TEMP)); 42 ASSERT(IsABIPreservedRegister(CALLEE_SAVED_TEMP));
46 ASSERT(CALLEE_SAVED_TEMP != CODE_REG); 43 ASSERT(CALLEE_SAVED_TEMP != CODE_REG);
47 ASSERT(CALLEE_SAVED_TEMP != ARGS_DESC_REG); 44 ASSERT(CALLEE_SAVED_TEMP != ARGS_DESC_REG);
48 45
49 assembler->Comment("IntrinsicCallPrologue"); 46 assembler->Comment("IntrinsicCallPrologue");
50 assembler->movq(CALLEE_SAVED_TEMP, ARGS_DESC_REG); 47 assembler->movq(CALLEE_SAVED_TEMP, ARGS_DESC_REG);
51 } 48 }
52 49
53
54 void Intrinsifier::IntrinsicCallEpilogue(Assembler* assembler) { 50 void Intrinsifier::IntrinsicCallEpilogue(Assembler* assembler) {
55 assembler->Comment("IntrinsicCallEpilogue"); 51 assembler->Comment("IntrinsicCallEpilogue");
56 assembler->movq(ARGS_DESC_REG, CALLEE_SAVED_TEMP); 52 assembler->movq(ARGS_DESC_REG, CALLEE_SAVED_TEMP);
57 } 53 }
58 54
59
60 void Intrinsifier::ObjectArraySetIndexed(Assembler* assembler) { 55 void Intrinsifier::ObjectArraySetIndexed(Assembler* assembler) {
61 if (Isolate::Current()->type_checks()) { 56 if (Isolate::Current()->type_checks()) {
62 return; 57 return;
63 } 58 }
64 59
65 Label fall_through; 60 Label fall_through;
66 __ movq(RDX, Address(RSP, +1 * kWordSize)); // Value. 61 __ movq(RDX, Address(RSP, +1 * kWordSize)); // Value.
67 __ movq(RCX, Address(RSP, +2 * kWordSize)); // Index. 62 __ movq(RCX, Address(RSP, +2 * kWordSize)); // Index.
68 __ movq(RAX, Address(RSP, +3 * kWordSize)); // Array. 63 __ movq(RAX, Address(RSP, +3 * kWordSize)); // Array.
69 __ testq(RCX, Immediate(kSmiTagMask)); 64 __ testq(RCX, Immediate(kSmiTagMask));
70 __ j(NOT_ZERO, &fall_through); 65 __ j(NOT_ZERO, &fall_through);
71 // Range check. 66 // Range check.
72 __ cmpq(RCX, FieldAddress(RAX, Array::length_offset())); 67 __ cmpq(RCX, FieldAddress(RAX, Array::length_offset()));
73 // Runtime throws exception. 68 // Runtime throws exception.
74 __ j(ABOVE_EQUAL, &fall_through); 69 __ j(ABOVE_EQUAL, &fall_through);
75 // Note that RBX is Smi, i.e, times 2. 70 // Note that RBX is Smi, i.e, times 2.
76 ASSERT(kSmiTagShift == 1); 71 ASSERT(kSmiTagShift == 1);
77 // Destroy RCX (ic data) as we will not continue in the function. 72 // Destroy RCX (ic data) as we will not continue in the function.
78 __ StoreIntoObject(RAX, FieldAddress(RAX, RCX, TIMES_4, Array::data_offset()), 73 __ StoreIntoObject(RAX, FieldAddress(RAX, RCX, TIMES_4, Array::data_offset()),
79 RDX); 74 RDX);
80 // Caller is responsible of preserving the value if necessary. 75 // Caller is responsible of preserving the value if necessary.
81 __ ret(); 76 __ ret();
82 __ Bind(&fall_through); 77 __ Bind(&fall_through);
83 } 78 }
84 79
85
86 // Allocate a GrowableObjectArray using the backing array specified. 80 // Allocate a GrowableObjectArray using the backing array specified.
87 // On stack: type argument (+2), data (+1), return-address (+0). 81 // On stack: type argument (+2), data (+1), return-address (+0).
88 void Intrinsifier::GrowableArray_Allocate(Assembler* assembler) { 82 void Intrinsifier::GrowableArray_Allocate(Assembler* assembler) {
89 // This snippet of inlined code uses the following registers: 83 // This snippet of inlined code uses the following registers:
90 // RAX, RCX, R13 84 // RAX, RCX, R13
91 // and the newly allocated object is returned in RAX. 85 // and the newly allocated object is returned in RAX.
92 const intptr_t kTypeArgumentsOffset = 2 * kWordSize; 86 const intptr_t kTypeArgumentsOffset = 2 * kWordSize;
93 const intptr_t kArrayOffset = 1 * kWordSize; 87 const intptr_t kArrayOffset = 1 * kWordSize;
94 Label fall_through; 88 Label fall_through;
95 89
(...skipping 15 matching lines...) Expand all
111 RAX, FieldAddress(RAX, GrowableObjectArray::type_arguments_offset()), 105 RAX, FieldAddress(RAX, GrowableObjectArray::type_arguments_offset()),
112 RCX); 106 RCX);
113 107
114 // Set the length field in the growable array object to 0. 108 // Set the length field in the growable array object to 0.
115 __ ZeroInitSmiField(FieldAddress(RAX, GrowableObjectArray::length_offset())); 109 __ ZeroInitSmiField(FieldAddress(RAX, GrowableObjectArray::length_offset()));
116 __ ret(); // returns the newly allocated object in RAX. 110 __ ret(); // returns the newly allocated object in RAX.
117 111
118 __ Bind(&fall_through); 112 __ Bind(&fall_through);
119 } 113 }
120 114
121
122 // Add an element to growable array if it doesn't need to grow, otherwise 115 // Add an element to growable array if it doesn't need to grow, otherwise
123 // call into regular code. 116 // call into regular code.
124 // On stack: growable array (+2), value (+1), return-address (+0). 117 // On stack: growable array (+2), value (+1), return-address (+0).
125 void Intrinsifier::GrowableArray_add(Assembler* assembler) { 118 void Intrinsifier::GrowableArray_add(Assembler* assembler) {
126 // In checked mode we need to check the incoming argument. 119 // In checked mode we need to check the incoming argument.
127 if (Isolate::Current()->type_checks()) return; 120 if (Isolate::Current()->type_checks()) return;
128 Label fall_through; 121 Label fall_through;
129 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Array. 122 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Array.
130 __ movq(RCX, FieldAddress(RAX, GrowableObjectArray::length_offset())); 123 __ movq(RCX, FieldAddress(RAX, GrowableObjectArray::length_offset()));
131 // RCX: length. 124 // RCX: length.
132 __ movq(RDX, FieldAddress(RAX, GrowableObjectArray::data_offset())); 125 __ movq(RDX, FieldAddress(RAX, GrowableObjectArray::data_offset()));
133 // RDX: data. 126 // RDX: data.
134 // Compare length with capacity. 127 // Compare length with capacity.
135 __ cmpq(RCX, FieldAddress(RDX, Array::length_offset())); 128 __ cmpq(RCX, FieldAddress(RDX, Array::length_offset()));
136 __ j(EQUAL, &fall_through); // Must grow data. 129 __ j(EQUAL, &fall_through); // Must grow data.
137 // len = len + 1; 130 // len = len + 1;
138 __ IncrementSmiField(FieldAddress(RAX, GrowableObjectArray::length_offset()), 131 __ IncrementSmiField(FieldAddress(RAX, GrowableObjectArray::length_offset()),
139 1); 132 1);
140 __ movq(RAX, Address(RSP, +1 * kWordSize)); // Value 133 __ movq(RAX, Address(RSP, +1 * kWordSize)); // Value
141 ASSERT(kSmiTagShift == 1); 134 ASSERT(kSmiTagShift == 1);
142 __ StoreIntoObject(RDX, FieldAddress(RDX, RCX, TIMES_4, Array::data_offset()), 135 __ StoreIntoObject(RDX, FieldAddress(RDX, RCX, TIMES_4, Array::data_offset()),
143 RAX); 136 RAX);
144 __ LoadObject(RAX, Object::null_object()); 137 __ LoadObject(RAX, Object::null_object());
145 __ ret(); 138 __ ret();
146 __ Bind(&fall_through); 139 __ Bind(&fall_through);
147 } 140 }
148 141
149
150 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor) \ 142 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor) \
151 Label fall_through; \ 143 Label fall_through; \
152 const intptr_t kArrayLengthStackOffset = 1 * kWordSize; \ 144 const intptr_t kArrayLengthStackOffset = 1 * kWordSize; \
153 NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, &fall_through, false)); \ 145 NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, &fall_through, false)); \
154 __ movq(RDI, Address(RSP, kArrayLengthStackOffset)); /* Array length. */ \ 146 __ movq(RDI, Address(RSP, kArrayLengthStackOffset)); /* Array length. */ \
155 /* Check that length is a positive Smi. */ \ 147 /* Check that length is a positive Smi. */ \
156 /* RDI: requested array length argument. */ \ 148 /* RDI: requested array length argument. */ \
157 __ testq(RDI, Immediate(kSmiTagMask)); \ 149 __ testq(RDI, Immediate(kSmiTagMask)); \
158 __ j(NOT_ZERO, &fall_through); \ 150 __ j(NOT_ZERO, &fall_through); \
159 __ cmpq(RDI, Immediate(0)); \ 151 __ cmpq(RDI, Immediate(0)); \
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
235 __ cmpq(RDI, RCX); \ 227 __ cmpq(RDI, RCX); \
236 __ j(ABOVE_EQUAL, &done, Assembler::kNearJump); \ 228 __ j(ABOVE_EQUAL, &done, Assembler::kNearJump); \
237 __ movq(Address(RDI, 0), RBX); \ 229 __ movq(Address(RDI, 0), RBX); \
238 __ addq(RDI, Immediate(kWordSize)); \ 230 __ addq(RDI, Immediate(kWordSize)); \
239 __ jmp(&init_loop, Assembler::kNearJump); \ 231 __ jmp(&init_loop, Assembler::kNearJump); \
240 __ Bind(&done); \ 232 __ Bind(&done); \
241 \ 233 \
242 __ ret(); \ 234 __ ret(); \
243 __ Bind(&fall_through); 235 __ Bind(&fall_through);
244 236
245
246 static ScaleFactor GetScaleFactor(intptr_t size) { 237 static ScaleFactor GetScaleFactor(intptr_t size) {
247 switch (size) { 238 switch (size) {
248 case 1: 239 case 1:
249 return TIMES_1; 240 return TIMES_1;
250 case 2: 241 case 2:
251 return TIMES_2; 242 return TIMES_2;
252 case 4: 243 case 4:
253 return TIMES_4; 244 return TIMES_4;
254 case 8: 245 case 8:
255 return TIMES_8; 246 return TIMES_8;
256 case 16: 247 case 16:
257 return TIMES_16; 248 return TIMES_16;
258 } 249 }
259 UNREACHABLE(); 250 UNREACHABLE();
260 return static_cast<ScaleFactor>(0); 251 return static_cast<ScaleFactor>(0);
261 } 252 }
262 253
263
264 #define TYPED_DATA_ALLOCATOR(clazz) \ 254 #define TYPED_DATA_ALLOCATOR(clazz) \
265 void Intrinsifier::TypedData_##clazz##_factory(Assembler* assembler) { \ 255 void Intrinsifier::TypedData_##clazz##_factory(Assembler* assembler) { \
266 intptr_t size = TypedData::ElementSizeInBytes(kTypedData##clazz##Cid); \ 256 intptr_t size = TypedData::ElementSizeInBytes(kTypedData##clazz##Cid); \
267 intptr_t max_len = TypedData::MaxElements(kTypedData##clazz##Cid); \ 257 intptr_t max_len = TypedData::MaxElements(kTypedData##clazz##Cid); \
268 ScaleFactor scale = GetScaleFactor(size); \ 258 ScaleFactor scale = GetScaleFactor(size); \
269 TYPED_ARRAY_ALLOCATION(TypedData, kTypedData##clazz##Cid, max_len, scale); \ 259 TYPED_ARRAY_ALLOCATION(TypedData, kTypedData##clazz##Cid, max_len, scale); \
270 } 260 }
271 CLASS_LIST_TYPED_DATA(TYPED_DATA_ALLOCATOR) 261 CLASS_LIST_TYPED_DATA(TYPED_DATA_ALLOCATOR)
272 #undef TYPED_DATA_ALLOCATOR 262 #undef TYPED_DATA_ALLOCATOR
273 263
274
275 // Tests if two top most arguments are smis, jumps to label not_smi if not. 264 // Tests if two top most arguments are smis, jumps to label not_smi if not.
276 // Topmost argument is in RAX. 265 // Topmost argument is in RAX.
277 static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) { 266 static void TestBothArgumentsSmis(Assembler* assembler, Label* not_smi) {
278 __ movq(RAX, Address(RSP, +1 * kWordSize)); 267 __ movq(RAX, Address(RSP, +1 * kWordSize));
279 __ movq(RCX, Address(RSP, +2 * kWordSize)); 268 __ movq(RCX, Address(RSP, +2 * kWordSize));
280 __ orq(RCX, RAX); 269 __ orq(RCX, RAX);
281 __ testq(RCX, Immediate(kSmiTagMask)); 270 __ testq(RCX, Immediate(kSmiTagMask));
282 __ j(NOT_ZERO, not_smi); 271 __ j(NOT_ZERO, not_smi);
283 } 272 }
284 273
285
286 void Intrinsifier::Integer_addFromInteger(Assembler* assembler) { 274 void Intrinsifier::Integer_addFromInteger(Assembler* assembler) {
287 Label fall_through; 275 Label fall_through;
288 TestBothArgumentsSmis(assembler, &fall_through); 276 TestBothArgumentsSmis(assembler, &fall_through);
289 // RAX contains right argument. 277 // RAX contains right argument.
290 __ addq(RAX, Address(RSP, +2 * kWordSize)); 278 __ addq(RAX, Address(RSP, +2 * kWordSize));
291 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 279 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
292 // Result is in RAX. 280 // Result is in RAX.
293 __ ret(); 281 __ ret();
294 __ Bind(&fall_through); 282 __ Bind(&fall_through);
295 } 283 }
296 284
297
298 void Intrinsifier::Integer_add(Assembler* assembler) { 285 void Intrinsifier::Integer_add(Assembler* assembler) {
299 Integer_addFromInteger(assembler); 286 Integer_addFromInteger(assembler);
300 } 287 }
301 288
302
303 void Intrinsifier::Integer_subFromInteger(Assembler* assembler) { 289 void Intrinsifier::Integer_subFromInteger(Assembler* assembler) {
304 Label fall_through; 290 Label fall_through;
305 TestBothArgumentsSmis(assembler, &fall_through); 291 TestBothArgumentsSmis(assembler, &fall_through);
306 // RAX contains right argument, which is the actual minuend of subtraction. 292 // RAX contains right argument, which is the actual minuend of subtraction.
307 __ subq(RAX, Address(RSP, +2 * kWordSize)); 293 __ subq(RAX, Address(RSP, +2 * kWordSize));
308 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 294 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
309 // Result is in RAX. 295 // Result is in RAX.
310 __ ret(); 296 __ ret();
311 __ Bind(&fall_through); 297 __ Bind(&fall_through);
312 } 298 }
313 299
314
315 void Intrinsifier::Integer_sub(Assembler* assembler) { 300 void Intrinsifier::Integer_sub(Assembler* assembler) {
316 Label fall_through; 301 Label fall_through;
317 TestBothArgumentsSmis(assembler, &fall_through); 302 TestBothArgumentsSmis(assembler, &fall_through);
318 // RAX contains right argument, which is the actual subtrahend of subtraction. 303 // RAX contains right argument, which is the actual subtrahend of subtraction.
319 __ movq(RCX, RAX); 304 __ movq(RCX, RAX);
320 __ movq(RAX, Address(RSP, +2 * kWordSize)); 305 __ movq(RAX, Address(RSP, +2 * kWordSize));
321 __ subq(RAX, RCX); 306 __ subq(RAX, RCX);
322 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 307 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
323 // Result is in RAX. 308 // Result is in RAX.
324 __ ret(); 309 __ ret();
325 __ Bind(&fall_through); 310 __ Bind(&fall_through);
326 } 311 }
327 312
328
329 void Intrinsifier::Integer_mulFromInteger(Assembler* assembler) { 313 void Intrinsifier::Integer_mulFromInteger(Assembler* assembler) {
330 Label fall_through; 314 Label fall_through;
331 TestBothArgumentsSmis(assembler, &fall_through); 315 TestBothArgumentsSmis(assembler, &fall_through);
332 // RAX is the right argument. 316 // RAX is the right argument.
333 ASSERT(kSmiTag == 0); // Adjust code below if not the case. 317 ASSERT(kSmiTag == 0); // Adjust code below if not the case.
334 __ SmiUntag(RAX); 318 __ SmiUntag(RAX);
335 __ imulq(RAX, Address(RSP, +2 * kWordSize)); 319 __ imulq(RAX, Address(RSP, +2 * kWordSize));
336 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 320 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
337 // Result is in RAX. 321 // Result is in RAX.
338 __ ret(); 322 __ ret();
339 __ Bind(&fall_through); 323 __ Bind(&fall_through);
340 } 324 }
341 325
342
343 void Intrinsifier::Integer_mul(Assembler* assembler) { 326 void Intrinsifier::Integer_mul(Assembler* assembler) {
344 Integer_mulFromInteger(assembler); 327 Integer_mulFromInteger(assembler);
345 } 328 }
346 329
347
348 // Optimizations: 330 // Optimizations:
349 // - result is 0 if: 331 // - result is 0 if:
350 // - left is 0 332 // - left is 0
351 // - left equals right 333 // - left equals right
352 // - result is left if 334 // - result is left if
353 // - left > 0 && left < right 335 // - left > 0 && left < right
354 // RAX: Tagged left (dividend). 336 // RAX: Tagged left (dividend).
355 // RCX: Tagged right (divisor). 337 // RCX: Tagged right (divisor).
356 // Returns: 338 // Returns:
357 // RAX: Untagged fallthrough result (remainder to be adjusted), or 339 // RAX: Untagged fallthrough result (remainder to be adjusted), or
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
401 // Divide using 64bit idiv. 383 // Divide using 64bit idiv.
402 __ Bind(&not_32bit); 384 __ Bind(&not_32bit);
403 __ SmiUntag(RAX); 385 __ SmiUntag(RAX);
404 __ SmiUntag(RCX); 386 __ SmiUntag(RCX);
405 __ cqo(); 387 __ cqo();
406 __ idivq(RCX); 388 __ idivq(RCX);
407 __ movq(RAX, RDX); 389 __ movq(RAX, RDX);
408 __ Bind(&done); 390 __ Bind(&done);
409 } 391 }
410 392
411
412 // Implementation: 393 // Implementation:
413 // res = left % right; 394 // res = left % right;
414 // if (res < 0) { 395 // if (res < 0) {
415 // if (right < 0) { 396 // if (right < 0) {
416 // res = res - right; 397 // res = res - right;
417 // } else { 398 // } else {
418 // res = res + right; 399 // res = res + right;
419 // } 400 // }
420 // } 401 // }
421 void Intrinsifier::Integer_moduloFromInteger(Assembler* assembler) { 402 void Intrinsifier::Integer_moduloFromInteger(Assembler* assembler) {
(...skipping 22 matching lines...) Expand all
444 __ ret(); 425 __ ret();
445 426
446 __ Bind(&subtract); 427 __ Bind(&subtract);
447 __ subq(RAX, RCX); 428 __ subq(RAX, RCX);
448 __ SmiTag(RAX); 429 __ SmiTag(RAX);
449 __ ret(); 430 __ ret();
450 431
451 __ Bind(&fall_through); 432 __ Bind(&fall_through);
452 } 433 }
453 434
454
455 void Intrinsifier::Integer_truncDivide(Assembler* assembler) { 435 void Intrinsifier::Integer_truncDivide(Assembler* assembler) {
456 Label fall_through, not_32bit; 436 Label fall_through, not_32bit;
457 TestBothArgumentsSmis(assembler, &fall_through); 437 TestBothArgumentsSmis(assembler, &fall_through);
458 // RAX: right argument (divisor) 438 // RAX: right argument (divisor)
459 __ cmpq(RAX, Immediate(0)); 439 __ cmpq(RAX, Immediate(0));
460 __ j(EQUAL, &fall_through, Assembler::kNearJump); 440 __ j(EQUAL, &fall_through, Assembler::kNearJump);
461 __ movq(RCX, RAX); 441 __ movq(RCX, RAX);
462 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Left argument (dividend). 442 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Left argument (dividend).
463 443
464 // Check if both operands fit into 32bits as idiv with 64bit operands 444 // Check if both operands fit into 32bits as idiv with 64bit operands
(...skipping 26 matching lines...) Expand all
491 __ popq(RDX); 471 __ popq(RDX);
492 // Check the corner case of dividing the 'MIN_SMI' with -1, in which case we 472 // Check the corner case of dividing the 'MIN_SMI' with -1, in which case we
493 // cannot tag the result. 473 // cannot tag the result.
494 __ cmpq(RAX, Immediate(0x4000000000000000)); 474 __ cmpq(RAX, Immediate(0x4000000000000000));
495 __ j(EQUAL, &fall_through); 475 __ j(EQUAL, &fall_through);
496 __ SmiTag(RAX); 476 __ SmiTag(RAX);
497 __ ret(); 477 __ ret();
498 __ Bind(&fall_through); 478 __ Bind(&fall_through);
499 } 479 }
500 480
501
502 void Intrinsifier::Integer_negate(Assembler* assembler) { 481 void Intrinsifier::Integer_negate(Assembler* assembler) {
503 Label fall_through; 482 Label fall_through;
504 __ movq(RAX, Address(RSP, +1 * kWordSize)); 483 __ movq(RAX, Address(RSP, +1 * kWordSize));
505 __ testq(RAX, Immediate(kSmiTagMask)); 484 __ testq(RAX, Immediate(kSmiTagMask));
506 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi value. 485 __ j(NOT_ZERO, &fall_through, Assembler::kNearJump); // Non-smi value.
507 __ negq(RAX); 486 __ negq(RAX);
508 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 487 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
509 // Result is in RAX. 488 // Result is in RAX.
510 __ ret(); 489 __ ret();
511 __ Bind(&fall_through); 490 __ Bind(&fall_through);
512 } 491 }
513 492
514
515 void Intrinsifier::Integer_bitAndFromInteger(Assembler* assembler) { 493 void Intrinsifier::Integer_bitAndFromInteger(Assembler* assembler) {
516 Label fall_through; 494 Label fall_through;
517 TestBothArgumentsSmis(assembler, &fall_through); 495 TestBothArgumentsSmis(assembler, &fall_through);
518 // RAX is the right argument. 496 // RAX is the right argument.
519 __ andq(RAX, Address(RSP, +2 * kWordSize)); 497 __ andq(RAX, Address(RSP, +2 * kWordSize));
520 // Result is in RAX. 498 // Result is in RAX.
521 __ ret(); 499 __ ret();
522 __ Bind(&fall_through); 500 __ Bind(&fall_through);
523 } 501 }
524 502
525
526 void Intrinsifier::Integer_bitAnd(Assembler* assembler) { 503 void Intrinsifier::Integer_bitAnd(Assembler* assembler) {
527 Integer_bitAndFromInteger(assembler); 504 Integer_bitAndFromInteger(assembler);
528 } 505 }
529 506
530
531 void Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) { 507 void Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) {
532 Label fall_through; 508 Label fall_through;
533 TestBothArgumentsSmis(assembler, &fall_through); 509 TestBothArgumentsSmis(assembler, &fall_through);
534 // RAX is the right argument. 510 // RAX is the right argument.
535 __ orq(RAX, Address(RSP, +2 * kWordSize)); 511 __ orq(RAX, Address(RSP, +2 * kWordSize));
536 // Result is in RAX. 512 // Result is in RAX.
537 __ ret(); 513 __ ret();
538 __ Bind(&fall_through); 514 __ Bind(&fall_through);
539 } 515 }
540 516
541
542 void Intrinsifier::Integer_bitOr(Assembler* assembler) { 517 void Intrinsifier::Integer_bitOr(Assembler* assembler) {
543 Integer_bitOrFromInteger(assembler); 518 Integer_bitOrFromInteger(assembler);
544 } 519 }
545 520
546
547 void Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) { 521 void Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) {
548 Label fall_through; 522 Label fall_through;
549 TestBothArgumentsSmis(assembler, &fall_through); 523 TestBothArgumentsSmis(assembler, &fall_through);
550 // RAX is the right argument. 524 // RAX is the right argument.
551 __ xorq(RAX, Address(RSP, +2 * kWordSize)); 525 __ xorq(RAX, Address(RSP, +2 * kWordSize));
552 // Result is in RAX. 526 // Result is in RAX.
553 __ ret(); 527 __ ret();
554 __ Bind(&fall_through); 528 __ Bind(&fall_through);
555 } 529 }
556 530
557
558 void Intrinsifier::Integer_bitXor(Assembler* assembler) { 531 void Intrinsifier::Integer_bitXor(Assembler* assembler) {
559 Integer_bitXorFromInteger(assembler); 532 Integer_bitXorFromInteger(assembler);
560 } 533 }
561 534
562
563 void Intrinsifier::Integer_shl(Assembler* assembler) { 535 void Intrinsifier::Integer_shl(Assembler* assembler) {
564 ASSERT(kSmiTagShift == 1); 536 ASSERT(kSmiTagShift == 1);
565 ASSERT(kSmiTag == 0); 537 ASSERT(kSmiTag == 0);
566 Label fall_through, overflow; 538 Label fall_through, overflow;
567 TestBothArgumentsSmis(assembler, &fall_through); 539 TestBothArgumentsSmis(assembler, &fall_through);
568 // Shift value is in RAX. Compare with tagged Smi. 540 // Shift value is in RAX. Compare with tagged Smi.
569 __ cmpq(RAX, Immediate(Smi::RawValue(Smi::kBits))); 541 __ cmpq(RAX, Immediate(Smi::RawValue(Smi::kBits)));
570 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump); 542 __ j(ABOVE_EQUAL, &fall_through, Assembler::kNearJump);
571 543
572 __ SmiUntag(RAX); 544 __ SmiUntag(RAX);
(...skipping 11 matching lines...) Expand all
584 556
585 // RAX is a correctly tagged Smi. 557 // RAX is a correctly tagged Smi.
586 __ ret(); 558 __ ret();
587 559
588 __ Bind(&overflow); 560 __ Bind(&overflow);
589 // Mint is rarely used on x64 (only for integers requiring 64 bit instead of 561 // Mint is rarely used on x64 (only for integers requiring 64 bit instead of
590 // 63 bits as represented by Smi). 562 // 63 bits as represented by Smi).
591 __ Bind(&fall_through); 563 __ Bind(&fall_through);
592 } 564 }
593 565
594
595 static void CompareIntegers(Assembler* assembler, Condition true_condition) { 566 static void CompareIntegers(Assembler* assembler, Condition true_condition) {
596 Label fall_through, true_label; 567 Label fall_through, true_label;
597 TestBothArgumentsSmis(assembler, &fall_through); 568 TestBothArgumentsSmis(assembler, &fall_through);
598 // RAX contains the right argument. 569 // RAX contains the right argument.
599 __ cmpq(Address(RSP, +2 * kWordSize), RAX); 570 __ cmpq(Address(RSP, +2 * kWordSize), RAX);
600 __ j(true_condition, &true_label, Assembler::kNearJump); 571 __ j(true_condition, &true_label, Assembler::kNearJump);
601 __ LoadObject(RAX, Bool::False()); 572 __ LoadObject(RAX, Bool::False());
602 __ ret(); 573 __ ret();
603 __ Bind(&true_label); 574 __ Bind(&true_label);
604 __ LoadObject(RAX, Bool::True()); 575 __ LoadObject(RAX, Bool::True());
605 __ ret(); 576 __ ret();
606 __ Bind(&fall_through); 577 __ Bind(&fall_through);
607 } 578 }
608 579
609
610 void Intrinsifier::Integer_lessThan(Assembler* assembler) { 580 void Intrinsifier::Integer_lessThan(Assembler* assembler) {
611 CompareIntegers(assembler, LESS); 581 CompareIntegers(assembler, LESS);
612 } 582 }
613 583
614
615 void Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) { 584 void Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) {
616 CompareIntegers(assembler, LESS); 585 CompareIntegers(assembler, LESS);
617 } 586 }
618 587
619
620 void Intrinsifier::Integer_greaterThan(Assembler* assembler) { 588 void Intrinsifier::Integer_greaterThan(Assembler* assembler) {
621 CompareIntegers(assembler, GREATER); 589 CompareIntegers(assembler, GREATER);
622 } 590 }
623 591
624
625 void Intrinsifier::Integer_lessEqualThan(Assembler* assembler) { 592 void Intrinsifier::Integer_lessEqualThan(Assembler* assembler) {
626 CompareIntegers(assembler, LESS_EQUAL); 593 CompareIntegers(assembler, LESS_EQUAL);
627 } 594 }
628 595
629
630 void Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) { 596 void Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) {
631 CompareIntegers(assembler, GREATER_EQUAL); 597 CompareIntegers(assembler, GREATER_EQUAL);
632 } 598 }
633 599
634
635 // This is called for Smi, Mint and Bigint receivers. The right argument 600 // This is called for Smi, Mint and Bigint receivers. The right argument
636 // can be Smi, Mint, Bigint or double. 601 // can be Smi, Mint, Bigint or double.
637 void Intrinsifier::Integer_equalToInteger(Assembler* assembler) { 602 void Intrinsifier::Integer_equalToInteger(Assembler* assembler) {
638 Label fall_through, true_label, check_for_mint; 603 Label fall_through, true_label, check_for_mint;
639 const intptr_t kReceiverOffset = 2; 604 const intptr_t kReceiverOffset = 2;
640 const intptr_t kArgumentOffset = 1; 605 const intptr_t kArgumentOffset = 1;
641 606
642 // For integer receiver '===' check first. 607 // For integer receiver '===' check first.
643 __ movq(RAX, Address(RSP, +kArgumentOffset * kWordSize)); 608 __ movq(RAX, Address(RSP, +kArgumentOffset * kWordSize));
644 __ movq(RCX, Address(RSP, +kReceiverOffset * kWordSize)); 609 __ movq(RCX, Address(RSP, +kReceiverOffset * kWordSize));
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
679 __ testq(RAX, Immediate(kSmiTagMask)); 644 __ testq(RAX, Immediate(kSmiTagMask));
680 __ j(NOT_ZERO, &fall_through); 645 __ j(NOT_ZERO, &fall_through);
681 // Smi == Mint -> false. 646 // Smi == Mint -> false.
682 __ LoadObject(RAX, Bool::False()); 647 __ LoadObject(RAX, Bool::False());
683 __ ret(); 648 __ ret();
684 // TODO(srdjan): Implement Mint == Mint comparison. 649 // TODO(srdjan): Implement Mint == Mint comparison.
685 650
686 __ Bind(&fall_through); 651 __ Bind(&fall_through);
687 } 652 }
688 653
689
690 void Intrinsifier::Integer_equal(Assembler* assembler) { 654 void Intrinsifier::Integer_equal(Assembler* assembler) {
691 Integer_equalToInteger(assembler); 655 Integer_equalToInteger(assembler);
692 } 656 }
693 657
694
695 void Intrinsifier::Integer_sar(Assembler* assembler) { 658 void Intrinsifier::Integer_sar(Assembler* assembler) {
696 Label fall_through, shift_count_ok; 659 Label fall_through, shift_count_ok;
697 TestBothArgumentsSmis(assembler, &fall_through); 660 TestBothArgumentsSmis(assembler, &fall_through);
698 const Immediate& count_limit = Immediate(0x3F); 661 const Immediate& count_limit = Immediate(0x3F);
699 // Check that the count is not larger than what the hardware can handle. 662 // Check that the count is not larger than what the hardware can handle.
700 // For shifting right a Smi the result is the same for all numbers 663 // For shifting right a Smi the result is the same for all numbers
701 // >= count_limit. 664 // >= count_limit.
702 __ SmiUntag(RAX); 665 __ SmiUntag(RAX);
703 // Negative counts throw exception. 666 // Negative counts throw exception.
704 __ cmpq(RAX, Immediate(0)); 667 __ cmpq(RAX, Immediate(0));
705 __ j(LESS, &fall_through, Assembler::kNearJump); 668 __ j(LESS, &fall_through, Assembler::kNearJump);
706 __ cmpq(RAX, count_limit); 669 __ cmpq(RAX, count_limit);
707 __ j(LESS_EQUAL, &shift_count_ok, Assembler::kNearJump); 670 __ j(LESS_EQUAL, &shift_count_ok, Assembler::kNearJump);
708 __ movq(RAX, count_limit); 671 __ movq(RAX, count_limit);
709 __ Bind(&shift_count_ok); 672 __ Bind(&shift_count_ok);
710 __ movq(RCX, RAX); // Shift amount must be in RCX. 673 __ movq(RCX, RAX); // Shift amount must be in RCX.
711 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Value. 674 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Value.
712 __ SmiUntag(RAX); // Value. 675 __ SmiUntag(RAX); // Value.
713 __ sarq(RAX, RCX); 676 __ sarq(RAX, RCX);
714 __ SmiTag(RAX); 677 __ SmiTag(RAX);
715 __ ret(); 678 __ ret();
716 __ Bind(&fall_through); 679 __ Bind(&fall_through);
717 } 680 }
718 681
719
720 // Argument is Smi (receiver). 682 // Argument is Smi (receiver).
721 void Intrinsifier::Smi_bitNegate(Assembler* assembler) { 683 void Intrinsifier::Smi_bitNegate(Assembler* assembler) {
722 __ movq(RAX, Address(RSP, +1 * kWordSize)); // Index. 684 __ movq(RAX, Address(RSP, +1 * kWordSize)); // Index.
723 __ notq(RAX); 685 __ notq(RAX);
724 __ andq(RAX, Immediate(~kSmiTagMask)); // Remove inverted smi-tag. 686 __ andq(RAX, Immediate(~kSmiTagMask)); // Remove inverted smi-tag.
725 __ ret(); 687 __ ret();
726 } 688 }
727 689
728
729 void Intrinsifier::Smi_bitLength(Assembler* assembler) { 690 void Intrinsifier::Smi_bitLength(Assembler* assembler) {
730 ASSERT(kSmiTagShift == 1); 691 ASSERT(kSmiTagShift == 1);
731 __ movq(RAX, Address(RSP, +1 * kWordSize)); // Index. 692 __ movq(RAX, Address(RSP, +1 * kWordSize)); // Index.
732 // XOR with sign bit to complement bits if value is negative. 693 // XOR with sign bit to complement bits if value is negative.
733 __ movq(RCX, RAX); 694 __ movq(RCX, RAX);
734 __ sarq(RCX, Immediate(63)); // All 0 or all 1. 695 __ sarq(RCX, Immediate(63)); // All 0 or all 1.
735 __ xorq(RAX, RCX); 696 __ xorq(RAX, RCX);
736 // BSR does not write the destination register if source is zero. Put a 1 in 697 // BSR does not write the destination register if source is zero. Put a 1 in
737 // the Smi tag bit to ensure BSR writes to destination register. 698 // the Smi tag bit to ensure BSR writes to destination register.
738 __ orq(RAX, Immediate(kSmiTagMask)); 699 __ orq(RAX, Immediate(kSmiTagMask));
739 __ bsrq(RAX, RAX); 700 __ bsrq(RAX, RAX);
740 __ SmiTag(RAX); 701 __ SmiTag(RAX);
741 __ ret(); 702 __ ret();
742 } 703 }
743 704
744
745 void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) { 705 void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
746 Integer_bitAndFromInteger(assembler); 706 Integer_bitAndFromInteger(assembler);
747 } 707 }
748 708
749
750 void Intrinsifier::Bigint_lsh(Assembler* assembler) { 709 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
751 // static void _lsh(Uint32List x_digits, int x_used, int n, 710 // static void _lsh(Uint32List x_digits, int x_used, int n,
752 // Uint32List r_digits) 711 // Uint32List r_digits)
753 712
754 __ movq(RDI, Address(RSP, 4 * kWordSize)); // x_digits 713 __ movq(RDI, Address(RSP, 4 * kWordSize)); // x_digits
755 __ movq(R8, Address(RSP, 3 * kWordSize)); // x_used is Smi 714 __ movq(R8, Address(RSP, 3 * kWordSize)); // x_used is Smi
756 __ subq(R8, Immediate(2)); // x_used > 0, Smi. R8 = x_used - 1, round up. 715 __ subq(R8, Immediate(2)); // x_used > 0, Smi. R8 = x_used - 1, round up.
757 __ sarq(R8, Immediate(2)); // R8 + 1 = number of digit pairs to read. 716 __ sarq(R8, Immediate(2)); // R8 + 1 = number of digit pairs to read.
758 __ movq(RCX, Address(RSP, 2 * kWordSize)); // n is Smi 717 __ movq(RCX, Address(RSP, 2 * kWordSize)); // n is Smi
759 __ SmiUntag(RCX); 718 __ SmiUntag(RCX);
760 __ movq(RBX, Address(RSP, 1 * kWordSize)); // r_digits 719 __ movq(RBX, Address(RSP, 1 * kWordSize)); // r_digits
761 __ movq(RSI, RCX); 720 __ movq(RSI, RCX);
762 __ sarq(RSI, Immediate(6)); // RSI = n ~/ (2*_DIGIT_BITS). 721 __ sarq(RSI, Immediate(6)); // RSI = n ~/ (2*_DIGIT_BITS).
763 __ leaq(RBX, FieldAddress(RBX, RSI, TIMES_8, TypedData::data_offset())); 722 __ leaq(RBX, FieldAddress(RBX, RSI, TIMES_8, TypedData::data_offset()));
764 __ xorq(RAX, RAX); // RAX = 0. 723 __ xorq(RAX, RAX); // RAX = 0.
765 __ movq(RDX, FieldAddress(RDI, R8, TIMES_8, TypedData::data_offset())); 724 __ movq(RDX, FieldAddress(RDI, R8, TIMES_8, TypedData::data_offset()));
766 __ shldq(RAX, RDX, RCX); 725 __ shldq(RAX, RDX, RCX);
767 __ movq(Address(RBX, R8, TIMES_8, 2 * Bigint::kBytesPerDigit), RAX); 726 __ movq(Address(RBX, R8, TIMES_8, 2 * Bigint::kBytesPerDigit), RAX);
768 Label last; 727 Label last;
769 __ cmpq(R8, Immediate(0)); 728 __ cmpq(R8, Immediate(0));
770 __ j(EQUAL, &last, Assembler::kNearJump); 729 __ j(EQUAL, &last, Assembler::kNearJump);
771 Label loop; 730 Label loop;
772 __ Bind(&loop); 731 __ Bind(&loop);
773 __ movq(RAX, RDX); 732 __ movq(RAX, RDX);
774 __ movq(RDX, FieldAddress(RDI, R8, TIMES_8, TypedData::data_offset() - 733 __ movq(RDX,
775 2 * Bigint::kBytesPerDigit)); 734 FieldAddress(RDI, R8, TIMES_8,
735 TypedData::data_offset() - 2 * Bigint::kBytesPerDigit));
776 __ shldq(RAX, RDX, RCX); 736 __ shldq(RAX, RDX, RCX);
777 __ movq(Address(RBX, R8, TIMES_8, 0), RAX); 737 __ movq(Address(RBX, R8, TIMES_8, 0), RAX);
778 __ decq(R8); 738 __ decq(R8);
779 __ j(NOT_ZERO, &loop, Assembler::kNearJump); 739 __ j(NOT_ZERO, &loop, Assembler::kNearJump);
780 __ Bind(&last); 740 __ Bind(&last);
781 __ shldq(RDX, R8, RCX); // R8 == 0. 741 __ shldq(RDX, R8, RCX); // R8 == 0.
782 __ movq(Address(RBX, 0), RDX); 742 __ movq(Address(RBX, 0), RDX);
783 // Returning Object::null() is not required, since this method is private. 743 // Returning Object::null() is not required, since this method is private.
784 __ ret(); 744 __ ret();
785 } 745 }
786 746
787
788 void Intrinsifier::Bigint_rsh(Assembler* assembler) { 747 void Intrinsifier::Bigint_rsh(Assembler* assembler) {
789 // static void _rsh(Uint32List x_digits, int x_used, int n, 748 // static void _rsh(Uint32List x_digits, int x_used, int n,
790 // Uint32List r_digits) 749 // Uint32List r_digits)
791 750
792 __ movq(RDI, Address(RSP, 4 * kWordSize)); // x_digits 751 __ movq(RDI, Address(RSP, 4 * kWordSize)); // x_digits
793 __ movq(RCX, Address(RSP, 2 * kWordSize)); // n is Smi 752 __ movq(RCX, Address(RSP, 2 * kWordSize)); // n is Smi
794 __ SmiUntag(RCX); 753 __ SmiUntag(RCX);
795 __ movq(RBX, Address(RSP, 1 * kWordSize)); // r_digits 754 __ movq(RBX, Address(RSP, 1 * kWordSize)); // r_digits
796 __ movq(RDX, RCX); 755 __ movq(RDX, RCX);
797 __ sarq(RDX, Immediate(6)); // RDX = n ~/ (2*_DIGIT_BITS). 756 __ sarq(RDX, Immediate(6)); // RDX = n ~/ (2*_DIGIT_BITS).
(...skipping 16 matching lines...) Expand all
814 __ movq(Address(RBX, RSI, TIMES_8, 0), RAX); 773 __ movq(Address(RBX, RSI, TIMES_8, 0), RAX);
815 __ incq(RSI); 774 __ incq(RSI);
816 __ j(NOT_ZERO, &loop, Assembler::kNearJump); 775 __ j(NOT_ZERO, &loop, Assembler::kNearJump);
817 __ Bind(&last); 776 __ Bind(&last);
818 __ shrdq(RDX, RSI, RCX); // RSI == 0. 777 __ shrdq(RDX, RSI, RCX); // RSI == 0.
819 __ movq(Address(RBX, 0), RDX); 778 __ movq(Address(RBX, 0), RDX);
820 // Returning Object::null() is not required, since this method is private. 779 // Returning Object::null() is not required, since this method is private.
821 __ ret(); 780 __ ret();
822 } 781 }
823 782
824
825 void Intrinsifier::Bigint_absAdd(Assembler* assembler) { 783 void Intrinsifier::Bigint_absAdd(Assembler* assembler) {
826 // static void _absAdd(Uint32List digits, int used, 784 // static void _absAdd(Uint32List digits, int used,
827 // Uint32List a_digits, int a_used, 785 // Uint32List a_digits, int a_used,
828 // Uint32List r_digits) 786 // Uint32List r_digits)
829 787
830 __ movq(RDI, Address(RSP, 5 * kWordSize)); // digits 788 __ movq(RDI, Address(RSP, 5 * kWordSize)); // digits
831 __ movq(R8, Address(RSP, 4 * kWordSize)); // used is Smi 789 __ movq(R8, Address(RSP, 4 * kWordSize)); // used is Smi
832 __ addq(R8, Immediate(2)); // used > 0, Smi. R8 = used + 1, round up. 790 __ addq(R8, Immediate(2)); // used > 0, Smi. R8 = used + 1, round up.
833 __ sarq(R8, Immediate(2)); // R8 = number of digit pairs to process. 791 __ sarq(R8, Immediate(2)); // R8 = number of digit pairs to process.
834 __ movq(RSI, Address(RSP, 3 * kWordSize)); // a_digits 792 __ movq(RSI, Address(RSP, 3 * kWordSize)); // a_digits
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
870 Label done; 828 Label done;
871 __ j(NOT_CARRY, &done); 829 __ j(NOT_CARRY, &done);
872 __ movq(FieldAddress(RBX, RDX, TIMES_8, TypedData::data_offset()), 830 __ movq(FieldAddress(RBX, RDX, TIMES_8, TypedData::data_offset()),
873 Immediate(1)); 831 Immediate(1));
874 832
875 __ Bind(&done); 833 __ Bind(&done);
876 // Returning Object::null() is not required, since this method is private. 834 // Returning Object::null() is not required, since this method is private.
877 __ ret(); 835 __ ret();
878 } 836 }
879 837
880
881 void Intrinsifier::Bigint_absSub(Assembler* assembler) { 838 void Intrinsifier::Bigint_absSub(Assembler* assembler) {
882 // static void _absSub(Uint32List digits, int used, 839 // static void _absSub(Uint32List digits, int used,
883 // Uint32List a_digits, int a_used, 840 // Uint32List a_digits, int a_used,
884 // Uint32List r_digits) 841 // Uint32List r_digits)
885 842
886 __ movq(RDI, Address(RSP, 5 * kWordSize)); // digits 843 __ movq(RDI, Address(RSP, 5 * kWordSize)); // digits
887 __ movq(R8, Address(RSP, 4 * kWordSize)); // used is Smi 844 __ movq(R8, Address(RSP, 4 * kWordSize)); // used is Smi
888 __ addq(R8, Immediate(2)); // used > 0, Smi. R8 = used + 1, round up. 845 __ addq(R8, Immediate(2)); // used > 0, Smi. R8 = used + 1, round up.
889 __ sarq(R8, Immediate(2)); // R8 = number of digit pairs to process. 846 __ sarq(R8, Immediate(2)); // R8 = number of digit pairs to process.
890 __ movq(RSI, Address(RSP, 3 * kWordSize)); // a_digits 847 __ movq(RSI, Address(RSP, 3 * kWordSize)); // a_digits
(...skipping 29 matching lines...) Expand all
920 __ movq(FieldAddress(RBX, RDX, TIMES_8, TypedData::data_offset()), RAX); 877 __ movq(FieldAddress(RBX, RDX, TIMES_8, TypedData::data_offset()), RAX);
921 __ incq(RDX); // Does not affect carry flag. 878 __ incq(RDX); // Does not affect carry flag.
922 __ decq(R8); // Does not affect carry flag. 879 __ decq(R8); // Does not affect carry flag.
923 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump); 880 __ j(NOT_ZERO, &carry_loop, Assembler::kNearJump);
924 881
925 __ Bind(&done); 882 __ Bind(&done);
926 // Returning Object::null() is not required, since this method is private. 883 // Returning Object::null() is not required, since this method is private.
927 __ ret(); 884 __ ret();
928 } 885 }
929 886
930
931 void Intrinsifier::Bigint_mulAdd(Assembler* assembler) { 887 void Intrinsifier::Bigint_mulAdd(Assembler* assembler) {
932 // Pseudo code: 888 // Pseudo code:
933 // static int _mulAdd(Uint32List x_digits, int xi, 889 // static int _mulAdd(Uint32List x_digits, int xi,
934 // Uint32List m_digits, int i, 890 // Uint32List m_digits, int i,
935 // Uint32List a_digits, int j, int n) { 891 // Uint32List a_digits, int j, int n) {
936 // uint64_t x = x_digits[xi >> 1 .. (xi >> 1) + 1]; // xi is Smi and even. 892 // uint64_t x = x_digits[xi >> 1 .. (xi >> 1) + 1]; // xi is Smi and even.
937 // if (x == 0 || n == 0) { 893 // if (x == 0 || n == 0) {
938 // return 2; 894 // return 2;
939 // } 895 // }
940 // uint64_t* mip = &m_digits[i >> 1]; // i is Smi and even. 896 // uint64_t* mip = &m_digits[i >> 1]; // i is Smi and even.
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1028 __ Bind(&propagate_carry_loop); 984 __ Bind(&propagate_carry_loop);
1029 __ addq(RSI, Immediate(2 * Bigint::kBytesPerDigit)); 985 __ addq(RSI, Immediate(2 * Bigint::kBytesPerDigit));
1030 __ incq(Address(RSI, 0)); // c == 0 or 1 986 __ incq(Address(RSI, 0)); // c == 0 or 1
1031 __ j(CARRY, &propagate_carry_loop, Assembler::kNearJump); 987 __ j(CARRY, &propagate_carry_loop, Assembler::kNearJump);
1032 988
1033 __ Bind(&done); 989 __ Bind(&done);
1034 __ movq(RAX, Immediate(Smi::RawValue(2))); // Two digits processed. 990 __ movq(RAX, Immediate(Smi::RawValue(2))); // Two digits processed.
1035 __ ret(); 991 __ ret();
1036 } 992 }
1037 993
1038
1039 void Intrinsifier::Bigint_sqrAdd(Assembler* assembler) { 994 void Intrinsifier::Bigint_sqrAdd(Assembler* assembler) {
1040 // Pseudo code: 995 // Pseudo code:
1041 // static int _sqrAdd(Uint32List x_digits, int i, 996 // static int _sqrAdd(Uint32List x_digits, int i,
1042 // Uint32List a_digits, int used) { 997 // Uint32List a_digits, int used) {
1043 // uint64_t* xip = &x_digits[i >> 1]; // i is Smi and even. 998 // uint64_t* xip = &x_digits[i >> 1]; // i is Smi and even.
1044 // uint64_t x = *xip++; 999 // uint64_t x = *xip++;
1045 // if (x == 0) return 2; 1000 // if (x == 0) return 2;
1046 // uint64_t* ajp = &a_digits[i]; // j == 2*i, i is Smi. 1001 // uint64_t* ajp = &a_digits[i]; // j == 2*i, i is Smi.
1047 // uint64_t aj = *ajp; 1002 // uint64_t aj = *ajp;
1048 // uint128_t t = x*x + aj; 1003 // uint128_t t = x*x + aj;
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1148 // *ajp++ = low64(t) 1103 // *ajp++ = low64(t)
1149 // *ajp = high64(t) 1104 // *ajp = high64(t)
1150 __ movq(Address(RSI, 0), R12); 1105 __ movq(Address(RSI, 0), R12);
1151 __ movq(Address(RSI, 2 * Bigint::kBytesPerDigit), R13); 1106 __ movq(Address(RSI, 2 * Bigint::kBytesPerDigit), R13);
1152 1107
1153 __ Bind(&x_zero); 1108 __ Bind(&x_zero);
1154 __ movq(RAX, Immediate(Smi::RawValue(2))); // Two digits processed. 1109 __ movq(RAX, Immediate(Smi::RawValue(2))); // Two digits processed.
1155 __ ret(); 1110 __ ret();
1156 } 1111 }
1157 1112
1158
1159 void Intrinsifier::Bigint_estQuotientDigit(Assembler* assembler) { 1113 void Intrinsifier::Bigint_estQuotientDigit(Assembler* assembler) {
1160 // Pseudo code: 1114 // Pseudo code:
1161 // static int _estQuotientDigit(Uint32List args, Uint32List digits, int i) { 1115 // static int _estQuotientDigit(Uint32List args, Uint32List digits, int i) {
1162 // uint64_t yt = args[_YT_LO .. _YT]; // _YT_LO == 0, _YT == 1. 1116 // uint64_t yt = args[_YT_LO .. _YT]; // _YT_LO == 0, _YT == 1.
1163 // uint64_t* dp = &digits[(i >> 1) - 1]; // i is Smi. 1117 // uint64_t* dp = &digits[(i >> 1) - 1]; // i is Smi.
1164 // uint64_t dh = dp[0]; // dh == digits[(i >> 1) - 1 .. i >> 1]. 1118 // uint64_t dh = dp[0]; // dh == digits[(i >> 1) - 1 .. i >> 1].
1165 // uint64_t qd; 1119 // uint64_t qd;
1166 // if (dh == yt) { 1120 // if (dh == yt) {
1167 // qd = (DIGIT_MASK << 32) | DIGIT_MASK; 1121 // qd = (DIGIT_MASK << 32) | DIGIT_MASK;
1168 // } else { 1122 // } else {
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1205 __ Bind(&return_qd); 1159 __ Bind(&return_qd);
1206 // args[2..3] = qd 1160 // args[2..3] = qd
1207 __ movq( 1161 __ movq(
1208 FieldAddress(RDI, TypedData::data_offset() + 2 * Bigint::kBytesPerDigit), 1162 FieldAddress(RDI, TypedData::data_offset() + 2 * Bigint::kBytesPerDigit),
1209 RAX); 1163 RAX);
1210 1164
1211 __ movq(RAX, Immediate(Smi::RawValue(2))); // Two digits processed. 1165 __ movq(RAX, Immediate(Smi::RawValue(2))); // Two digits processed.
1212 __ ret(); 1166 __ ret();
1213 } 1167 }
1214 1168
1215
1216 void Intrinsifier::Montgomery_mulMod(Assembler* assembler) { 1169 void Intrinsifier::Montgomery_mulMod(Assembler* assembler) {
1217 // Pseudo code: 1170 // Pseudo code:
1218 // static int _mulMod(Uint32List args, Uint32List digits, int i) { 1171 // static int _mulMod(Uint32List args, Uint32List digits, int i) {
1219 // uint64_t rho = args[_RHO .. _RHO_HI]; // _RHO == 2, _RHO_HI == 3. 1172 // uint64_t rho = args[_RHO .. _RHO_HI]; // _RHO == 2, _RHO_HI == 3.
1220 // uint64_t d = digits[i >> 1 .. (i >> 1) + 1]; // i is Smi and even. 1173 // uint64_t d = digits[i >> 1 .. (i >> 1) + 1]; // i is Smi and even.
1221 // uint128_t t = rho*d; 1174 // uint128_t t = rho*d;
1222 // args[_MU .. _MU_HI] = t mod DIGIT_BASE^2; // _MU == 4, _MU_HI == 5. 1175 // args[_MU .. _MU_HI] = t mod DIGIT_BASE^2; // _MU == 4, _MU_HI == 5.
1223 // return 2; 1176 // return 2;
1224 // } 1177 // }
1225 1178
(...skipping 14 matching lines...) Expand all
1240 1193
1241 // args[4 .. 5] = t mod DIGIT_BASE^2 = low64(t) 1194 // args[4 .. 5] = t mod DIGIT_BASE^2 = low64(t)
1242 __ movq( 1195 __ movq(
1243 FieldAddress(RDI, TypedData::data_offset() + 4 * Bigint::kBytesPerDigit), 1196 FieldAddress(RDI, TypedData::data_offset() + 4 * Bigint::kBytesPerDigit),
1244 RAX); 1197 RAX);
1245 1198
1246 __ movq(RAX, Immediate(Smi::RawValue(2))); // Two digits processed. 1199 __ movq(RAX, Immediate(Smi::RawValue(2))); // Two digits processed.
1247 __ ret(); 1200 __ ret();
1248 } 1201 }
1249 1202
1250
1251 // Check if the last argument is a double, jump to label 'is_smi' if smi 1203 // Check if the last argument is a double, jump to label 'is_smi' if smi
1252 // (easy to convert to double), otherwise jump to label 'not_double_smi', 1204 // (easy to convert to double), otherwise jump to label 'not_double_smi',
1253 // Returns the last argument in RAX. 1205 // Returns the last argument in RAX.
1254 static void TestLastArgumentIsDouble(Assembler* assembler, 1206 static void TestLastArgumentIsDouble(Assembler* assembler,
1255 Label* is_smi, 1207 Label* is_smi,
1256 Label* not_double_smi) { 1208 Label* not_double_smi) {
1257 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1209 __ movq(RAX, Address(RSP, +1 * kWordSize));
1258 __ testq(RAX, Immediate(kSmiTagMask)); 1210 __ testq(RAX, Immediate(kSmiTagMask));
1259 __ j(ZERO, is_smi); // Jump if Smi. 1211 __ j(ZERO, is_smi); // Jump if Smi.
1260 __ CompareClassId(RAX, kDoubleCid); 1212 __ CompareClassId(RAX, kDoubleCid);
1261 __ j(NOT_EQUAL, not_double_smi); 1213 __ j(NOT_EQUAL, not_double_smi);
1262 // Fall through if double. 1214 // Fall through if double.
1263 } 1215 }
1264 1216
1265
1266 // Both arguments on stack, left argument is a double, right argument is of 1217 // Both arguments on stack, left argument is a double, right argument is of
1267 // unknown type. Return true or false object in RAX. Any NaN argument 1218 // unknown type. Return true or false object in RAX. Any NaN argument
1268 // returns false. Any non-double argument causes control flow to fall through 1219 // returns false. Any non-double argument causes control flow to fall through
1269 // to the slow case (compiled method body). 1220 // to the slow case (compiled method body).
1270 static void CompareDoubles(Assembler* assembler, Condition true_condition) { 1221 static void CompareDoubles(Assembler* assembler, Condition true_condition) {
1271 Label fall_through, is_false, is_true, is_smi, double_op; 1222 Label fall_through, is_false, is_true, is_smi, double_op;
1272 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); 1223 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through);
1273 // Both arguments are double, right operand is in RAX. 1224 // Both arguments are double, right operand is in RAX.
1274 __ movsd(XMM1, FieldAddress(RAX, Double::value_offset())); 1225 __ movsd(XMM1, FieldAddress(RAX, Double::value_offset()));
1275 __ Bind(&double_op); 1226 __ Bind(&double_op);
1276 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Left argument. 1227 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Left argument.
1277 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset())); 1228 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset()));
1278 __ comisd(XMM0, XMM1); 1229 __ comisd(XMM0, XMM1);
1279 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false; 1230 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false;
1280 __ j(true_condition, &is_true, Assembler::kNearJump); 1231 __ j(true_condition, &is_true, Assembler::kNearJump);
1281 // Fall through false. 1232 // Fall through false.
1282 __ Bind(&is_false); 1233 __ Bind(&is_false);
1283 __ LoadObject(RAX, Bool::False()); 1234 __ LoadObject(RAX, Bool::False());
1284 __ ret(); 1235 __ ret();
1285 __ Bind(&is_true); 1236 __ Bind(&is_true);
1286 __ LoadObject(RAX, Bool::True()); 1237 __ LoadObject(RAX, Bool::True());
1287 __ ret(); 1238 __ ret();
1288 __ Bind(&is_smi); 1239 __ Bind(&is_smi);
1289 __ SmiUntag(RAX); 1240 __ SmiUntag(RAX);
1290 __ cvtsi2sdq(XMM1, RAX); 1241 __ cvtsi2sdq(XMM1, RAX);
1291 __ jmp(&double_op); 1242 __ jmp(&double_op);
1292 __ Bind(&fall_through); 1243 __ Bind(&fall_through);
1293 } 1244 }
1294 1245
1295
1296 void Intrinsifier::Double_greaterThan(Assembler* assembler) { 1246 void Intrinsifier::Double_greaterThan(Assembler* assembler) {
1297 CompareDoubles(assembler, ABOVE); 1247 CompareDoubles(assembler, ABOVE);
1298 } 1248 }
1299 1249
1300
1301 void Intrinsifier::Double_greaterEqualThan(Assembler* assembler) { 1250 void Intrinsifier::Double_greaterEqualThan(Assembler* assembler) {
1302 CompareDoubles(assembler, ABOVE_EQUAL); 1251 CompareDoubles(assembler, ABOVE_EQUAL);
1303 } 1252 }
1304 1253
1305
1306 void Intrinsifier::Double_lessThan(Assembler* assembler) { 1254 void Intrinsifier::Double_lessThan(Assembler* assembler) {
1307 CompareDoubles(assembler, BELOW); 1255 CompareDoubles(assembler, BELOW);
1308 } 1256 }
1309 1257
1310
1311 void Intrinsifier::Double_equal(Assembler* assembler) { 1258 void Intrinsifier::Double_equal(Assembler* assembler) {
1312 CompareDoubles(assembler, EQUAL); 1259 CompareDoubles(assembler, EQUAL);
1313 } 1260 }
1314 1261
1315
1316 void Intrinsifier::Double_lessEqualThan(Assembler* assembler) { 1262 void Intrinsifier::Double_lessEqualThan(Assembler* assembler) {
1317 CompareDoubles(assembler, BELOW_EQUAL); 1263 CompareDoubles(assembler, BELOW_EQUAL);
1318 } 1264 }
1319 1265
1320
1321 // Expects left argument to be double (receiver). Right argument is unknown. 1266 // Expects left argument to be double (receiver). Right argument is unknown.
1322 // Both arguments are on stack. 1267 // Both arguments are on stack.
1323 static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { 1268 static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) {
1324 Label fall_through, is_smi, double_op; 1269 Label fall_through, is_smi, double_op;
1325 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); 1270 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through);
1326 // Both arguments are double, right operand is in RAX. 1271 // Both arguments are double, right operand is in RAX.
1327 __ movsd(XMM1, FieldAddress(RAX, Double::value_offset())); 1272 __ movsd(XMM1, FieldAddress(RAX, Double::value_offset()));
1328 __ Bind(&double_op); 1273 __ Bind(&double_op);
1329 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Left argument. 1274 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Left argument.
1330 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset())); 1275 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset()));
(...skipping 20 matching lines...) Expand all
1351 R13); 1296 R13);
1352 __ movsd(FieldAddress(RAX, Double::value_offset()), XMM0); 1297 __ movsd(FieldAddress(RAX, Double::value_offset()), XMM0);
1353 __ ret(); 1298 __ ret();
1354 __ Bind(&is_smi); 1299 __ Bind(&is_smi);
1355 __ SmiUntag(RAX); 1300 __ SmiUntag(RAX);
1356 __ cvtsi2sdq(XMM1, RAX); 1301 __ cvtsi2sdq(XMM1, RAX);
1357 __ jmp(&double_op); 1302 __ jmp(&double_op);
1358 __ Bind(&fall_through); 1303 __ Bind(&fall_through);
1359 } 1304 }
1360 1305
1361
1362 void Intrinsifier::Double_add(Assembler* assembler) { 1306 void Intrinsifier::Double_add(Assembler* assembler) {
1363 DoubleArithmeticOperations(assembler, Token::kADD); 1307 DoubleArithmeticOperations(assembler, Token::kADD);
1364 } 1308 }
1365 1309
1366
1367 void Intrinsifier::Double_mul(Assembler* assembler) { 1310 void Intrinsifier::Double_mul(Assembler* assembler) {
1368 DoubleArithmeticOperations(assembler, Token::kMUL); 1311 DoubleArithmeticOperations(assembler, Token::kMUL);
1369 } 1312 }
1370 1313
1371
1372 void Intrinsifier::Double_sub(Assembler* assembler) { 1314 void Intrinsifier::Double_sub(Assembler* assembler) {
1373 DoubleArithmeticOperations(assembler, Token::kSUB); 1315 DoubleArithmeticOperations(assembler, Token::kSUB);
1374 } 1316 }
1375 1317
1376
1377 void Intrinsifier::Double_div(Assembler* assembler) { 1318 void Intrinsifier::Double_div(Assembler* assembler) {
1378 DoubleArithmeticOperations(assembler, Token::kDIV); 1319 DoubleArithmeticOperations(assembler, Token::kDIV);
1379 } 1320 }
1380 1321
1381
1382 void Intrinsifier::Double_mulFromInteger(Assembler* assembler) { 1322 void Intrinsifier::Double_mulFromInteger(Assembler* assembler) {
1383 Label fall_through; 1323 Label fall_through;
1384 // Only smis allowed. 1324 // Only smis allowed.
1385 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1325 __ movq(RAX, Address(RSP, +1 * kWordSize));
1386 __ testq(RAX, Immediate(kSmiTagMask)); 1326 __ testq(RAX, Immediate(kSmiTagMask));
1387 __ j(NOT_ZERO, &fall_through); 1327 __ j(NOT_ZERO, &fall_through);
1388 // Is Smi. 1328 // Is Smi.
1389 __ SmiUntag(RAX); 1329 __ SmiUntag(RAX);
1390 __ cvtsi2sdq(XMM1, RAX); 1330 __ cvtsi2sdq(XMM1, RAX);
1391 __ movq(RAX, Address(RSP, +2 * kWordSize)); 1331 __ movq(RAX, Address(RSP, +2 * kWordSize));
1392 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset())); 1332 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset()));
1393 __ mulsd(XMM0, XMM1); 1333 __ mulsd(XMM0, XMM1);
1394 const Class& double_class = 1334 const Class& double_class =
1395 Class::Handle(Isolate::Current()->object_store()->double_class()); 1335 Class::Handle(Isolate::Current()->object_store()->double_class());
1396 __ TryAllocate(double_class, &fall_through, Assembler::kFarJump, 1336 __ TryAllocate(double_class, &fall_through, Assembler::kFarJump,
1397 RAX, // Result register. 1337 RAX, // Result register.
1398 R13); 1338 R13);
1399 __ movsd(FieldAddress(RAX, Double::value_offset()), XMM0); 1339 __ movsd(FieldAddress(RAX, Double::value_offset()), XMM0);
1400 __ ret(); 1340 __ ret();
1401 __ Bind(&fall_through); 1341 __ Bind(&fall_through);
1402 } 1342 }
1403 1343
1404
1405 // Left is double right is integer (Bigint, Mint or Smi) 1344 // Left is double right is integer (Bigint, Mint or Smi)
1406 void Intrinsifier::DoubleFromInteger(Assembler* assembler) { 1345 void Intrinsifier::DoubleFromInteger(Assembler* assembler) {
1407 Label fall_through; 1346 Label fall_through;
1408 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1347 __ movq(RAX, Address(RSP, +1 * kWordSize));
1409 __ testq(RAX, Immediate(kSmiTagMask)); 1348 __ testq(RAX, Immediate(kSmiTagMask));
1410 __ j(NOT_ZERO, &fall_through); 1349 __ j(NOT_ZERO, &fall_through);
1411 // Is Smi. 1350 // Is Smi.
1412 __ SmiUntag(RAX); 1351 __ SmiUntag(RAX);
1413 __ cvtsi2sdq(XMM0, RAX); 1352 __ cvtsi2sdq(XMM0, RAX);
1414 const Class& double_class = 1353 const Class& double_class =
1415 Class::Handle(Isolate::Current()->object_store()->double_class()); 1354 Class::Handle(Isolate::Current()->object_store()->double_class());
1416 __ TryAllocate(double_class, &fall_through, Assembler::kFarJump, 1355 __ TryAllocate(double_class, &fall_through, Assembler::kFarJump,
1417 RAX, // Result register. 1356 RAX, // Result register.
1418 R13); 1357 R13);
1419 __ movsd(FieldAddress(RAX, Double::value_offset()), XMM0); 1358 __ movsd(FieldAddress(RAX, Double::value_offset()), XMM0);
1420 __ ret(); 1359 __ ret();
1421 __ Bind(&fall_through); 1360 __ Bind(&fall_through);
1422 } 1361 }
1423 1362
1424
1425 void Intrinsifier::Double_getIsNaN(Assembler* assembler) { 1363 void Intrinsifier::Double_getIsNaN(Assembler* assembler) {
1426 Label is_true; 1364 Label is_true;
1427 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1365 __ movq(RAX, Address(RSP, +1 * kWordSize));
1428 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset())); 1366 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset()));
1429 __ comisd(XMM0, XMM0); 1367 __ comisd(XMM0, XMM0);
1430 __ j(PARITY_EVEN, &is_true, Assembler::kNearJump); // NaN -> true; 1368 __ j(PARITY_EVEN, &is_true, Assembler::kNearJump); // NaN -> true;
1431 __ LoadObject(RAX, Bool::False()); 1369 __ LoadObject(RAX, Bool::False());
1432 __ ret(); 1370 __ ret();
1433 __ Bind(&is_true); 1371 __ Bind(&is_true);
1434 __ LoadObject(RAX, Bool::True()); 1372 __ LoadObject(RAX, Bool::True());
1435 __ ret(); 1373 __ ret();
1436 } 1374 }
1437 1375
1438
1439 void Intrinsifier::Double_getIsInfinite(Assembler* assembler) { 1376 void Intrinsifier::Double_getIsInfinite(Assembler* assembler) {
1440 Label is_inf, done; 1377 Label is_inf, done;
1441 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1378 __ movq(RAX, Address(RSP, +1 * kWordSize));
1442 __ movq(RAX, FieldAddress(RAX, Double::value_offset())); 1379 __ movq(RAX, FieldAddress(RAX, Double::value_offset()));
1443 // Mask off the sign. 1380 // Mask off the sign.
1444 __ AndImmediate(RAX, Immediate(0x7FFFFFFFFFFFFFFFLL)); 1381 __ AndImmediate(RAX, Immediate(0x7FFFFFFFFFFFFFFFLL));
1445 // Compare with +infinity. 1382 // Compare with +infinity.
1446 __ CompareImmediate(RAX, Immediate(0x7FF0000000000000LL)); 1383 __ CompareImmediate(RAX, Immediate(0x7FF0000000000000LL));
1447 __ j(EQUAL, &is_inf, Assembler::kNearJump); 1384 __ j(EQUAL, &is_inf, Assembler::kNearJump);
1448 __ LoadObject(RAX, Bool::False()); 1385 __ LoadObject(RAX, Bool::False());
1449 __ jmp(&done); 1386 __ jmp(&done);
1450 1387
1451 __ Bind(&is_inf); 1388 __ Bind(&is_inf);
1452 __ LoadObject(RAX, Bool::True()); 1389 __ LoadObject(RAX, Bool::True());
1453 1390
1454 __ Bind(&done); 1391 __ Bind(&done);
1455 __ ret(); 1392 __ ret();
1456 } 1393 }
1457 1394
1458
1459 void Intrinsifier::Double_getIsNegative(Assembler* assembler) { 1395 void Intrinsifier::Double_getIsNegative(Assembler* assembler) {
1460 Label is_false, is_true, is_zero; 1396 Label is_false, is_true, is_zero;
1461 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1397 __ movq(RAX, Address(RSP, +1 * kWordSize));
1462 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset())); 1398 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset()));
1463 __ xorpd(XMM1, XMM1); // 0.0 -> XMM1. 1399 __ xorpd(XMM1, XMM1); // 0.0 -> XMM1.
1464 __ comisd(XMM0, XMM1); 1400 __ comisd(XMM0, XMM1);
1465 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false. 1401 __ j(PARITY_EVEN, &is_false, Assembler::kNearJump); // NaN -> false.
1466 __ j(EQUAL, &is_zero, Assembler::kNearJump); // Check for negative zero. 1402 __ j(EQUAL, &is_zero, Assembler::kNearJump); // Check for negative zero.
1467 __ j(ABOVE_EQUAL, &is_false, Assembler::kNearJump); // >= 0 -> false. 1403 __ j(ABOVE_EQUAL, &is_false, Assembler::kNearJump); // >= 0 -> false.
1468 __ Bind(&is_true); 1404 __ Bind(&is_true);
1469 __ LoadObject(RAX, Bool::True()); 1405 __ LoadObject(RAX, Bool::True());
1470 __ ret(); 1406 __ ret();
1471 __ Bind(&is_false); 1407 __ Bind(&is_false);
1472 __ LoadObject(RAX, Bool::False()); 1408 __ LoadObject(RAX, Bool::False());
1473 __ ret(); 1409 __ ret();
1474 __ Bind(&is_zero); 1410 __ Bind(&is_zero);
1475 // Check for negative zero (get the sign bit). 1411 // Check for negative zero (get the sign bit).
1476 __ movmskpd(RAX, XMM0); 1412 __ movmskpd(RAX, XMM0);
1477 __ testq(RAX, Immediate(1)); 1413 __ testq(RAX, Immediate(1));
1478 __ j(NOT_ZERO, &is_true, Assembler::kNearJump); 1414 __ j(NOT_ZERO, &is_true, Assembler::kNearJump);
1479 __ jmp(&is_false, Assembler::kNearJump); 1415 __ jmp(&is_false, Assembler::kNearJump);
1480 } 1416 }
1481 1417
1482
1483 void Intrinsifier::DoubleToInteger(Assembler* assembler) { 1418 void Intrinsifier::DoubleToInteger(Assembler* assembler) {
1484 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1419 __ movq(RAX, Address(RSP, +1 * kWordSize));
1485 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset())); 1420 __ movsd(XMM0, FieldAddress(RAX, Double::value_offset()));
1486 __ cvttsd2siq(RAX, XMM0); 1421 __ cvttsd2siq(RAX, XMM0);
1487 // Overflow is signalled with minint. 1422 // Overflow is signalled with minint.
1488 Label fall_through; 1423 Label fall_through;
1489 // Check for overflow and that it fits into Smi. 1424 // Check for overflow and that it fits into Smi.
1490 __ movq(RCX, RAX); 1425 __ movq(RCX, RAX);
1491 __ shlq(RCX, Immediate(1)); 1426 __ shlq(RCX, Immediate(1));
1492 __ j(OVERFLOW, &fall_through, Assembler::kNearJump); 1427 __ j(OVERFLOW, &fall_through, Assembler::kNearJump);
1493 __ SmiTag(RAX); 1428 __ SmiTag(RAX);
1494 __ ret(); 1429 __ ret();
1495 __ Bind(&fall_through); 1430 __ Bind(&fall_through);
1496 } 1431 }
1497 1432
1498
1499 void Intrinsifier::MathSqrt(Assembler* assembler) { 1433 void Intrinsifier::MathSqrt(Assembler* assembler) {
1500 Label fall_through, is_smi, double_op; 1434 Label fall_through, is_smi, double_op;
1501 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); 1435 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through);
1502 // Argument is double and is in RAX. 1436 // Argument is double and is in RAX.
1503 __ movsd(XMM1, FieldAddress(RAX, Double::value_offset())); 1437 __ movsd(XMM1, FieldAddress(RAX, Double::value_offset()));
1504 __ Bind(&double_op); 1438 __ Bind(&double_op);
1505 __ sqrtsd(XMM0, XMM1); 1439 __ sqrtsd(XMM0, XMM1);
1506 const Class& double_class = 1440 const Class& double_class =
1507 Class::Handle(Isolate::Current()->object_store()->double_class()); 1441 Class::Handle(Isolate::Current()->object_store()->double_class());
1508 __ TryAllocate(double_class, &fall_through, Assembler::kFarJump, 1442 __ TryAllocate(double_class, &fall_through, Assembler::kFarJump,
1509 RAX, // Result register. 1443 RAX, // Result register.
1510 R13); 1444 R13);
1511 __ movsd(FieldAddress(RAX, Double::value_offset()), XMM0); 1445 __ movsd(FieldAddress(RAX, Double::value_offset()), XMM0);
1512 __ ret(); 1446 __ ret();
1513 __ Bind(&is_smi); 1447 __ Bind(&is_smi);
1514 __ SmiUntag(RAX); 1448 __ SmiUntag(RAX);
1515 __ cvtsi2sdq(XMM1, RAX); 1449 __ cvtsi2sdq(XMM1, RAX);
1516 __ jmp(&double_op); 1450 __ jmp(&double_op);
1517 __ Bind(&fall_through); 1451 __ Bind(&fall_through);
1518 } 1452 }
1519 1453
1520
1521 // var state = ((_A * (_state[kSTATE_LO])) + _state[kSTATE_HI]) & _MASK_64; 1454 // var state = ((_A * (_state[kSTATE_LO])) + _state[kSTATE_HI]) & _MASK_64;
1522 // _state[kSTATE_LO] = state & _MASK_32; 1455 // _state[kSTATE_LO] = state & _MASK_32;
1523 // _state[kSTATE_HI] = state >> 32; 1456 // _state[kSTATE_HI] = state >> 32;
1524 void Intrinsifier::Random_nextState(Assembler* assembler) { 1457 void Intrinsifier::Random_nextState(Assembler* assembler) {
1525 const Library& math_lib = Library::Handle(Library::MathLibrary()); 1458 const Library& math_lib = Library::Handle(Library::MathLibrary());
1526 ASSERT(!math_lib.IsNull()); 1459 ASSERT(!math_lib.IsNull());
1527 const Class& random_class = 1460 const Class& random_class =
1528 Class::Handle(math_lib.LookupClassAllowPrivate(Symbols::_Random())); 1461 Class::Handle(math_lib.LookupClassAllowPrivate(Symbols::_Random()));
1529 ASSERT(!random_class.IsNull()); 1462 ASSERT(!random_class.IsNull());
1530 const Field& state_field = Field::ZoneHandle( 1463 const Field& state_field = Field::ZoneHandle(
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1570 __ movq(RAX, Address(RSP, +kArgumentOffset * kWordSize)); 1503 __ movq(RAX, Address(RSP, +kArgumentOffset * kWordSize));
1571 __ cmpq(RAX, Address(RSP, +kReceiverOffset * kWordSize)); 1504 __ cmpq(RAX, Address(RSP, +kReceiverOffset * kWordSize));
1572 __ j(EQUAL, &is_true, Assembler::kNearJump); 1505 __ j(EQUAL, &is_true, Assembler::kNearJump);
1573 __ LoadObject(RAX, Bool::False()); 1506 __ LoadObject(RAX, Bool::False());
1574 __ ret(); 1507 __ ret();
1575 __ Bind(&is_true); 1508 __ Bind(&is_true);
1576 __ LoadObject(RAX, Bool::True()); 1509 __ LoadObject(RAX, Bool::True());
1577 __ ret(); 1510 __ ret();
1578 } 1511 }
1579 1512
1580
1581 static void RangeCheck(Assembler* assembler, 1513 static void RangeCheck(Assembler* assembler,
1582 Register reg, 1514 Register reg,
1583 intptr_t low, 1515 intptr_t low,
1584 intptr_t high, 1516 intptr_t high,
1585 Condition cc, 1517 Condition cc,
1586 Label* target) { 1518 Label* target) {
1587 __ subq(reg, Immediate(low)); 1519 __ subq(reg, Immediate(low));
1588 __ cmpq(reg, Immediate(high - low)); 1520 __ cmpq(reg, Immediate(high - low));
1589 __ j(cc, target); 1521 __ j(cc, target);
1590 } 1522 }
1591 1523
1592
1593 const Condition kIfNotInRange = ABOVE; 1524 const Condition kIfNotInRange = ABOVE;
1594 const Condition kIfInRange = BELOW_EQUAL; 1525 const Condition kIfInRange = BELOW_EQUAL;
1595 1526
1596
1597 static void JumpIfInteger(Assembler* assembler, Register cid, Label* target) { 1527 static void JumpIfInteger(Assembler* assembler, Register cid, Label* target) {
1598 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfInRange, target); 1528 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfInRange, target);
1599 } 1529 }
1600 1530
1601
1602 static void JumpIfNotInteger(Assembler* assembler, 1531 static void JumpIfNotInteger(Assembler* assembler,
1603 Register cid, 1532 Register cid,
1604 Label* target) { 1533 Label* target) {
1605 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfNotInRange, target); 1534 RangeCheck(assembler, cid, kSmiCid, kBigintCid, kIfNotInRange, target);
1606 } 1535 }
1607 1536
1608
1609 static void JumpIfString(Assembler* assembler, Register cid, Label* target) { 1537 static void JumpIfString(Assembler* assembler, Register cid, Label* target) {
1610 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid, 1538 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid,
1611 kIfInRange, target); 1539 kIfInRange, target);
1612 } 1540 }
1613 1541
1614
1615 static void JumpIfNotString(Assembler* assembler, Register cid, Label* target) { 1542 static void JumpIfNotString(Assembler* assembler, Register cid, Label* target) {
1616 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid, 1543 RangeCheck(assembler, cid, kOneByteStringCid, kExternalTwoByteStringCid,
1617 kIfNotInRange, target); 1544 kIfNotInRange, target);
1618 } 1545 }
1619 1546
1620
1621 // Return type quickly for simple types (not parameterized and not signature). 1547 // Return type quickly for simple types (not parameterized and not signature).
1622 void Intrinsifier::ObjectRuntimeType(Assembler* assembler) { 1548 void Intrinsifier::ObjectRuntimeType(Assembler* assembler) {
1623 Label fall_through, use_canonical_type, not_integer, not_double; 1549 Label fall_through, use_canonical_type, not_integer, not_double;
1624 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1550 __ movq(RAX, Address(RSP, +1 * kWordSize));
1625 __ LoadClassIdMayBeSmi(RCX, RAX); 1551 __ LoadClassIdMayBeSmi(RCX, RAX);
1626 1552
1627 // RCX: untagged cid of instance (RAX). 1553 // RCX: untagged cid of instance (RAX).
1628 __ cmpq(RCX, Immediate(kClosureCid)); 1554 __ cmpq(RCX, Immediate(kClosureCid));
1629 __ j(EQUAL, &fall_through); // Instance is a closure. 1555 __ j(EQUAL, &fall_through); // Instance is a closure.
1630 1556
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
1668 __ cmpq(RCX, Immediate(0)); 1594 __ cmpq(RCX, Immediate(0));
1669 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump); 1595 __ j(NOT_EQUAL, &fall_through, Assembler::kNearJump);
1670 __ movq(RAX, FieldAddress(RDI, Class::canonical_type_offset())); 1596 __ movq(RAX, FieldAddress(RDI, Class::canonical_type_offset()));
1671 __ CompareObject(RAX, Object::null_object()); 1597 __ CompareObject(RAX, Object::null_object());
1672 __ j(EQUAL, &fall_through, Assembler::kNearJump); // Not yet set. 1598 __ j(EQUAL, &fall_through, Assembler::kNearJump); // Not yet set.
1673 __ ret(); 1599 __ ret();
1674 1600
1675 __ Bind(&fall_through); 1601 __ Bind(&fall_through);
1676 } 1602 }
1677 1603
1678
1679 void Intrinsifier::ObjectHaveSameRuntimeType(Assembler* assembler) { 1604 void Intrinsifier::ObjectHaveSameRuntimeType(Assembler* assembler) {
1680 Label fall_through, different_cids, equal, not_equal, not_integer; 1605 Label fall_through, different_cids, equal, not_equal, not_integer;
1681 1606
1682 __ movq(RAX, Address(RSP, +1 * kWordSize)); 1607 __ movq(RAX, Address(RSP, +1 * kWordSize));
1683 __ LoadClassIdMayBeSmi(RCX, RAX); 1608 __ LoadClassIdMayBeSmi(RCX, RAX);
1684 1609
1685 // Check if left hand size is a closure. Closures are handled in the runtime. 1610 // Check if left hand size is a closure. Closures are handled in the runtime.
1686 __ cmpq(RCX, Immediate(kClosureCid)); 1611 __ cmpq(RCX, Immediate(kClosureCid));
1687 __ j(EQUAL, &fall_through); 1612 __ j(EQUAL, &fall_through);
1688 1613
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1732 // Strings only have the same runtime type as other strings. 1657 // Strings only have the same runtime type as other strings.
1733 // Fall-through to the not equal case. 1658 // Fall-through to the not equal case.
1734 1659
1735 __ Bind(&not_equal); 1660 __ Bind(&not_equal);
1736 __ LoadObject(RAX, Bool::False()); 1661 __ LoadObject(RAX, Bool::False());
1737 __ ret(); 1662 __ ret();
1738 1663
1739 __ Bind(&fall_through); 1664 __ Bind(&fall_through);
1740 } 1665 }
1741 1666
1742
1743 void Intrinsifier::String_getHashCode(Assembler* assembler) { 1667 void Intrinsifier::String_getHashCode(Assembler* assembler) {
1744 Label fall_through; 1668 Label fall_through;
1745 __ movq(RAX, Address(RSP, +1 * kWordSize)); // String object. 1669 __ movq(RAX, Address(RSP, +1 * kWordSize)); // String object.
1746 __ movl(RAX, FieldAddress(RAX, String::hash_offset())); 1670 __ movl(RAX, FieldAddress(RAX, String::hash_offset()));
1747 ASSERT(kSmiTag == 0); 1671 ASSERT(kSmiTag == 0);
1748 ASSERT(kSmiTagShift == 1); 1672 ASSERT(kSmiTagShift == 1);
1749 __ addq(RAX, RAX); // Smi tag RAX, setting Z flag. 1673 __ addq(RAX, RAX); // Smi tag RAX, setting Z flag.
1750 __ j(ZERO, &fall_through, Assembler::kNearJump); 1674 __ j(ZERO, &fall_through, Assembler::kNearJump);
1751 __ ret(); 1675 __ ret();
1752 __ Bind(&fall_through); 1676 __ Bind(&fall_through);
1753 // Hash not yet computed. 1677 // Hash not yet computed.
1754 } 1678 }
1755 1679
1756
1757 void Intrinsifier::Object_getHash(Assembler* assembler) { 1680 void Intrinsifier::Object_getHash(Assembler* assembler) {
1758 __ movq(RAX, Address(RSP, +1 * kWordSize)); // Object. 1681 __ movq(RAX, Address(RSP, +1 * kWordSize)); // Object.
1759 __ movl(RAX, FieldAddress(RAX, String::hash_offset())); 1682 __ movl(RAX, FieldAddress(RAX, String::hash_offset()));
1760 __ SmiTag(RAX); 1683 __ SmiTag(RAX);
1761 __ ret(); 1684 __ ret();
1762 } 1685 }
1763 1686
1764
1765 void Intrinsifier::Object_setHash(Assembler* assembler) { 1687 void Intrinsifier::Object_setHash(Assembler* assembler) {
1766 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Object. 1688 __ movq(RAX, Address(RSP, +2 * kWordSize)); // Object.
1767 __ movq(RDX, Address(RSP, +1 * kWordSize)); // Value. 1689 __ movq(RDX, Address(RSP, +1 * kWordSize)); // Value.
1768 __ SmiUntag(RDX); 1690 __ SmiUntag(RDX);
1769 __ movl(FieldAddress(RAX, String::hash_offset()), RDX); 1691 __ movl(FieldAddress(RAX, String::hash_offset()), RDX);
1770 __ ret(); 1692 __ ret();
1771 } 1693 }
1772 1694
1773
1774 void GenerateSubstringMatchesSpecialization(Assembler* assembler, 1695 void GenerateSubstringMatchesSpecialization(Assembler* assembler,
1775 intptr_t receiver_cid, 1696 intptr_t receiver_cid,
1776 intptr_t other_cid, 1697 intptr_t other_cid,
1777 Label* return_true, 1698 Label* return_true,
1778 Label* return_false) { 1699 Label* return_false) {
1779 __ movq(R8, FieldAddress(RAX, String::length_offset())); 1700 __ movq(R8, FieldAddress(RAX, String::length_offset()));
1780 __ movq(R9, FieldAddress(RCX, String::length_offset())); 1701 __ movq(R9, FieldAddress(RCX, String::length_offset()));
1781 1702
1782 // if (other.length == 0) return true; 1703 // if (other.length == 0) return true;
1783 __ testq(R9, R9); 1704 __ testq(R9, R9);
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1826 __ j(NOT_EQUAL, return_false); 1747 __ j(NOT_EQUAL, return_false);
1827 1748
1828 // i++, while (i < len) 1749 // i++, while (i < len)
1829 __ addq(R11, Immediate(1)); 1750 __ addq(R11, Immediate(1));
1830 __ cmpq(R11, R9); 1751 __ cmpq(R11, R9);
1831 __ j(LESS, &loop, Assembler::kNearJump); 1752 __ j(LESS, &loop, Assembler::kNearJump);
1832 1753
1833 __ jmp(return_true); 1754 __ jmp(return_true);
1834 } 1755 }
1835 1756
1836
1837 // bool _substringMatches(int start, String other) 1757 // bool _substringMatches(int start, String other)
1838 // This intrinsic handles a OneByteString or TwoByteString receiver with a 1758 // This intrinsic handles a OneByteString or TwoByteString receiver with a
1839 // OneByteString other. 1759 // OneByteString other.
1840 void Intrinsifier::StringBaseSubstringMatches(Assembler* assembler) { 1760 void Intrinsifier::StringBaseSubstringMatches(Assembler* assembler) {
1841 Label fall_through, return_true, return_false, try_two_byte; 1761 Label fall_through, return_true, return_false, try_two_byte;
1842 __ movq(RAX, Address(RSP, +3 * kWordSize)); // receiver 1762 __ movq(RAX, Address(RSP, +3 * kWordSize)); // receiver
1843 __ movq(RBX, Address(RSP, +2 * kWordSize)); // start 1763 __ movq(RBX, Address(RSP, +2 * kWordSize)); // start
1844 __ movq(RCX, Address(RSP, +1 * kWordSize)); // other 1764 __ movq(RCX, Address(RSP, +1 * kWordSize)); // other
1845 1765
1846 __ testq(RBX, Immediate(kSmiTagMask)); 1766 __ testq(RBX, Immediate(kSmiTagMask));
(...skipping 21 matching lines...) Expand all
1868 __ LoadObject(RAX, Bool::True()); 1788 __ LoadObject(RAX, Bool::True());
1869 __ ret(); 1789 __ ret();
1870 1790
1871 __ Bind(&return_false); 1791 __ Bind(&return_false);
1872 __ LoadObject(RAX, Bool::False()); 1792 __ LoadObject(RAX, Bool::False());
1873 __ ret(); 1793 __ ret();
1874 1794
1875 __ Bind(&fall_through); 1795 __ Bind(&fall_through);
1876 } 1796 }
1877 1797
1878
1879 void Intrinsifier::StringBaseCharAt(Assembler* assembler) { 1798 void Intrinsifier::StringBaseCharAt(Assembler* assembler) {
1880 Label fall_through, try_two_byte_string; 1799 Label fall_through, try_two_byte_string;
1881 __ movq(RCX, Address(RSP, +1 * kWordSize)); // Index. 1800 __ movq(RCX, Address(RSP, +1 * kWordSize)); // Index.
1882 __ movq(RAX, Address(RSP, +2 * kWordSize)); // String. 1801 __ movq(RAX, Address(RSP, +2 * kWordSize)); // String.
1883 __ testq(RCX, Immediate(kSmiTagMask)); 1802 __ testq(RCX, Immediate(kSmiTagMask));
1884 __ j(NOT_ZERO, &fall_through); // Non-smi index. 1803 __ j(NOT_ZERO, &fall_through); // Non-smi index.
1885 // Range check. 1804 // Range check.
1886 __ cmpq(RCX, FieldAddress(RAX, String::length_offset())); 1805 __ cmpq(RCX, FieldAddress(RAX, String::length_offset()));
1887 // Runtime throws exception. 1806 // Runtime throws exception.
1888 __ j(ABOVE_EQUAL, &fall_through); 1807 __ j(ABOVE_EQUAL, &fall_through);
(...skipping 16 matching lines...) Expand all
1905 __ cmpq(RCX, Immediate(Symbols::kNumberOfOneCharCodeSymbols)); 1824 __ cmpq(RCX, Immediate(Symbols::kNumberOfOneCharCodeSymbols));
1906 __ j(GREATER_EQUAL, &fall_through); 1825 __ j(GREATER_EQUAL, &fall_through);
1907 __ movq(RAX, Address(THR, Thread::predefined_symbols_address_offset())); 1826 __ movq(RAX, Address(THR, Thread::predefined_symbols_address_offset()));
1908 __ movq(RAX, Address(RAX, RCX, TIMES_8, 1827 __ movq(RAX, Address(RAX, RCX, TIMES_8,
1909 Symbols::kNullCharCodeSymbolOffset * kWordSize)); 1828 Symbols::kNullCharCodeSymbolOffset * kWordSize));
1910 __ ret(); 1829 __ ret();
1911 1830
1912 __ Bind(&fall_through); 1831 __ Bind(&fall_through);
1913 } 1832 }
1914 1833
1915
1916 void Intrinsifier::StringBaseIsEmpty(Assembler* assembler) { 1834 void Intrinsifier::StringBaseIsEmpty(Assembler* assembler) {
1917 Label is_true; 1835 Label is_true;
1918 // Get length. 1836 // Get length.
1919 __ movq(RAX, Address(RSP, +1 * kWordSize)); // String object. 1837 __ movq(RAX, Address(RSP, +1 * kWordSize)); // String object.
1920 __ movq(RAX, FieldAddress(RAX, String::length_offset())); 1838 __ movq(RAX, FieldAddress(RAX, String::length_offset()));
1921 __ cmpq(RAX, Immediate(Smi::RawValue(0))); 1839 __ cmpq(RAX, Immediate(Smi::RawValue(0)));
1922 __ j(EQUAL, &is_true, Assembler::kNearJump); 1840 __ j(EQUAL, &is_true, Assembler::kNearJump);
1923 __ LoadObject(RAX, Bool::False()); 1841 __ LoadObject(RAX, Bool::False());
1924 __ ret(); 1842 __ ret();
1925 __ Bind(&is_true); 1843 __ Bind(&is_true);
1926 __ LoadObject(RAX, Bool::True()); 1844 __ LoadObject(RAX, Bool::True());
1927 __ ret(); 1845 __ ret();
1928 } 1846 }
1929 1847
1930
1931 void Intrinsifier::OneByteString_getHashCode(Assembler* assembler) { 1848 void Intrinsifier::OneByteString_getHashCode(Assembler* assembler) {
1932 Label compute_hash; 1849 Label compute_hash;
1933 __ movq(RBX, Address(RSP, +1 * kWordSize)); // OneByteString object. 1850 __ movq(RBX, Address(RSP, +1 * kWordSize)); // OneByteString object.
1934 __ movl(RAX, FieldAddress(RBX, String::hash_offset())); 1851 __ movl(RAX, FieldAddress(RBX, String::hash_offset()));
1935 __ cmpq(RAX, Immediate(0)); 1852 __ cmpq(RAX, Immediate(0));
1936 __ j(EQUAL, &compute_hash, Assembler::kNearJump); 1853 __ j(EQUAL, &compute_hash, Assembler::kNearJump);
1937 __ SmiTag(RAX); 1854 __ SmiTag(RAX);
1938 __ ret(); 1855 __ ret();
1939 1856
1940 __ Bind(&compute_hash); 1857 __ Bind(&compute_hash);
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
1990 // return hash_ == 0 ? 1 : hash_; 1907 // return hash_ == 0 ? 1 : hash_;
1991 __ cmpq(RAX, Immediate(0)); 1908 __ cmpq(RAX, Immediate(0));
1992 __ j(NOT_EQUAL, &set_hash_code, Assembler::kNearJump); 1909 __ j(NOT_EQUAL, &set_hash_code, Assembler::kNearJump);
1993 __ incq(RAX); 1910 __ incq(RAX);
1994 __ Bind(&set_hash_code); 1911 __ Bind(&set_hash_code);
1995 __ movl(FieldAddress(RBX, String::hash_offset()), RAX); 1912 __ movl(FieldAddress(RBX, String::hash_offset()), RAX);
1996 __ SmiTag(RAX); 1913 __ SmiTag(RAX);
1997 __ ret(); 1914 __ ret();
1998 } 1915 }
1999 1916
2000
2001 // Allocates one-byte string of length 'end - start'. The content is not 1917 // Allocates one-byte string of length 'end - start'. The content is not
2002 // initialized. 'length-reg' contains tagged length. 1918 // initialized. 'length-reg' contains tagged length.
2003 // Returns new string as tagged pointer in RAX. 1919 // Returns new string as tagged pointer in RAX.
2004 static void TryAllocateOnebyteString(Assembler* assembler, 1920 static void TryAllocateOnebyteString(Assembler* assembler,
2005 Label* ok, 1921 Label* ok,
2006 Label* failure, 1922 Label* failure,
2007 Register length_reg) { 1923 Register length_reg) {
2008 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, failure, false)); 1924 NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, failure, false));
2009 if (length_reg != RDI) { 1925 if (length_reg != RDI) {
2010 __ movq(RDI, length_reg); 1926 __ movq(RDI, length_reg);
2011 } 1927 }
2012 Label pop_and_fail, not_zero_length; 1928 Label pop_and_fail, not_zero_length;
2013 __ pushq(RDI); // Preserve length. 1929 __ pushq(RDI); // Preserve length.
2014 __ sarq(RDI, Immediate(kSmiTagShift)); // Untag length. 1930 __ sarq(RDI, Immediate(kSmiTagShift)); // Untag length.
2015 // If the length is 0 then we have to make the allocated size a bit bigger, 1931 // If the length is 0 then we have to make the allocated size a bit bigger,
2016 // otherwise the string takes up less space than an ExternalOneByteString, 1932 // otherwise the string takes up less space than an ExternalOneByteString,
2017 // and cannot be externalized. TODO(erikcorry): We should probably just 1933 // and cannot be externalized. TODO(erikcorry): We should probably just
2018 // return a static zero length string here instead. 1934 // return a static zero length string here instead.
2019 __ j(NOT_ZERO, &not_zero_length); 1935 __ j(NOT_ZERO, &not_zero_length);
2020 __ addq(RDI, Immediate(1)); 1936 __ addq(RDI, Immediate(1));
2021 __ Bind(&not_zero_length); 1937 __ Bind(&not_zero_length);
2022 const intptr_t fixed_size_plus_alignment_padding = 1938 const intptr_t fixed_size_plus_alignment_padding =
2023 sizeof(RawString) + kObjectAlignment - 1; 1939 sizeof(RawString) + kObjectAlignment - 1;
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
2072 __ popq(RDI); 1988 __ popq(RDI);
2073 __ StoreIntoObjectNoBarrier(RAX, FieldAddress(RAX, String::length_offset()), 1989 __ StoreIntoObjectNoBarrier(RAX, FieldAddress(RAX, String::length_offset()),
2074 RDI); 1990 RDI);
2075 __ jmp(ok, Assembler::kNearJump); 1991 __ jmp(ok, Assembler::kNearJump);
2076 1992
2077 __ Bind(&pop_and_fail); 1993 __ Bind(&pop_and_fail);
2078 __ popq(RDI); 1994 __ popq(RDI);
2079 __ jmp(failure); 1995 __ jmp(failure);
2080 } 1996 }
2081 1997
2082
2083 // Arg0: OneByteString (receiver). 1998 // Arg0: OneByteString (receiver).
2084 // Arg1: Start index as Smi. 1999 // Arg1: Start index as Smi.
2085 // Arg2: End index as Smi. 2000 // Arg2: End index as Smi.
2086 // The indexes must be valid. 2001 // The indexes must be valid.
2087 void Intrinsifier::OneByteString_substringUnchecked(Assembler* assembler) { 2002 void Intrinsifier::OneByteString_substringUnchecked(Assembler* assembler) {
2088 const intptr_t kStringOffset = 3 * kWordSize; 2003 const intptr_t kStringOffset = 3 * kWordSize;
2089 const intptr_t kStartIndexOffset = 2 * kWordSize; 2004 const intptr_t kStartIndexOffset = 2 * kWordSize;
2090 const intptr_t kEndIndexOffset = 1 * kWordSize; 2005 const intptr_t kEndIndexOffset = 1 * kWordSize;
2091 Label fall_through, ok; 2006 Label fall_through, ok;
2092 __ movq(RSI, Address(RSP, +kStartIndexOffset)); 2007 __ movq(RSI, Address(RSP, +kStartIndexOffset));
(...skipping 28 matching lines...) Expand all
2121 __ movzxb(RBX, Address(RSI, RDX, TIMES_1, 0)); 2036 __ movzxb(RBX, Address(RSI, RDX, TIMES_1, 0));
2122 __ movb(FieldAddress(RAX, RDX, TIMES_1, OneByteString::data_offset()), RBX); 2037 __ movb(FieldAddress(RAX, RDX, TIMES_1, OneByteString::data_offset()), RBX);
2123 __ incq(RDX); 2038 __ incq(RDX);
2124 __ Bind(&check); 2039 __ Bind(&check);
2125 __ cmpq(RDX, RCX); 2040 __ cmpq(RDX, RCX);
2126 __ j(LESS, &loop, Assembler::kNearJump); 2041 __ j(LESS, &loop, Assembler::kNearJump);
2127 __ ret(); 2042 __ ret();
2128 __ Bind(&fall_through); 2043 __ Bind(&fall_through);
2129 } 2044 }
2130 2045
2131
2132 void Intrinsifier::OneByteStringSetAt(Assembler* assembler) { 2046 void Intrinsifier::OneByteStringSetAt(Assembler* assembler) {
2133 __ movq(RCX, Address(RSP, +1 * kWordSize)); // Value. 2047 __ movq(RCX, Address(RSP, +1 * kWordSize)); // Value.
2134 __ movq(RBX, Address(RSP, +2 * kWordSize)); // Index. 2048 __ movq(RBX, Address(RSP, +2 * kWordSize)); // Index.
2135 __ movq(RAX, Address(RSP, +3 * kWordSize)); // OneByteString. 2049 __ movq(RAX, Address(RSP, +3 * kWordSize)); // OneByteString.
2136 __ SmiUntag(RBX); 2050 __ SmiUntag(RBX);
2137 __ SmiUntag(RCX); 2051 __ SmiUntag(RCX);
2138 __ movb(FieldAddress(RAX, RBX, TIMES_1, OneByteString::data_offset()), RCX); 2052 __ movb(FieldAddress(RAX, RBX, TIMES_1, OneByteString::data_offset()), RCX);
2139 __ ret(); 2053 __ ret();
2140 } 2054 }
2141 2055
2142
2143 void Intrinsifier::OneByteString_allocate(Assembler* assembler) { 2056 void Intrinsifier::OneByteString_allocate(Assembler* assembler) {
2144 __ movq(RDI, Address(RSP, +1 * kWordSize)); // Length.v= 2057 __ movq(RDI, Address(RSP, +1 * kWordSize)); // Length.v=
2145 Label fall_through, ok; 2058 Label fall_through, ok;
2146 TryAllocateOnebyteString(assembler, &ok, &fall_through, RDI); 2059 TryAllocateOnebyteString(assembler, &ok, &fall_through, RDI);
2147 // RDI: Start address to copy from (untagged). 2060 // RDI: Start address to copy from (untagged).
2148 2061
2149 __ Bind(&ok); 2062 __ Bind(&ok);
2150 __ ret(); 2063 __ ret();
2151 2064
2152 __ Bind(&fall_through); 2065 __ Bind(&fall_through);
2153 } 2066 }
2154 2067
2155
2156 // TODO(srdjan): Add combinations (one-byte/two-byte/external strings). 2068 // TODO(srdjan): Add combinations (one-byte/two-byte/external strings).
2157 static void StringEquality(Assembler* assembler, intptr_t string_cid) { 2069 static void StringEquality(Assembler* assembler, intptr_t string_cid) {
2158 Label fall_through, is_true, is_false, loop; 2070 Label fall_through, is_true, is_false, loop;
2159 __ movq(RAX, Address(RSP, +2 * kWordSize)); // This. 2071 __ movq(RAX, Address(RSP, +2 * kWordSize)); // This.
2160 __ movq(RCX, Address(RSP, +1 * kWordSize)); // Other. 2072 __ movq(RCX, Address(RSP, +1 * kWordSize)); // Other.
2161 2073
2162 // Are identical? 2074 // Are identical?
2163 __ cmpq(RAX, RCX); 2075 __ cmpq(RAX, RCX);
2164 __ j(EQUAL, &is_true, Assembler::kNearJump); 2076 __ j(EQUAL, &is_true, Assembler::kNearJump);
2165 2077
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
2202 __ LoadObject(RAX, Bool::True()); 2114 __ LoadObject(RAX, Bool::True());
2203 __ ret(); 2115 __ ret();
2204 2116
2205 __ Bind(&is_false); 2117 __ Bind(&is_false);
2206 __ LoadObject(RAX, Bool::False()); 2118 __ LoadObject(RAX, Bool::False());
2207 __ ret(); 2119 __ ret();
2208 2120
2209 __ Bind(&fall_through); 2121 __ Bind(&fall_through);
2210 } 2122 }
2211 2123
2212
2213 void Intrinsifier::OneByteString_equality(Assembler* assembler) { 2124 void Intrinsifier::OneByteString_equality(Assembler* assembler) {
2214 StringEquality(assembler, kOneByteStringCid); 2125 StringEquality(assembler, kOneByteStringCid);
2215 } 2126 }
2216 2127
2217
2218 void Intrinsifier::TwoByteString_equality(Assembler* assembler) { 2128 void Intrinsifier::TwoByteString_equality(Assembler* assembler) {
2219 StringEquality(assembler, kTwoByteStringCid); 2129 StringEquality(assembler, kTwoByteStringCid);
2220 } 2130 }
2221 2131
2222
2223 void Intrinsifier::IntrinsifyRegExpExecuteMatch(Assembler* assembler, 2132 void Intrinsifier::IntrinsifyRegExpExecuteMatch(Assembler* assembler,
2224 bool sticky) { 2133 bool sticky) {
2225 if (FLAG_interpret_irregexp) return; 2134 if (FLAG_interpret_irregexp) return;
2226 2135
2227 static const intptr_t kRegExpParamOffset = 3 * kWordSize; 2136 static const intptr_t kRegExpParamOffset = 3 * kWordSize;
2228 static const intptr_t kStringParamOffset = 2 * kWordSize; 2137 static const intptr_t kStringParamOffset = 2 * kWordSize;
2229 // start_index smi is located at offset 1. 2138 // start_index smi is located at offset 1.
2230 2139
2231 // Incoming registers: 2140 // Incoming registers:
2232 // RAX: Function. (Will be loaded with the specialized matcher function.) 2141 // RAX: Function. (Will be loaded with the specialized matcher function.)
2233 // RCX: Unknown. (Must be GC safe on tail call.) 2142 // RCX: Unknown. (Must be GC safe on tail call.)
2234 // R10: Arguments descriptor. (Will be preserved.) 2143 // R10: Arguments descriptor. (Will be preserved.)
2235 2144
2236 // Load the specialized function pointer into RAX. Leverage the fact the 2145 // Load the specialized function pointer into RAX. Leverage the fact the
2237 // string CIDs as well as stored function pointers are in sequence. 2146 // string CIDs as well as stored function pointers are in sequence.
2238 __ movq(RBX, Address(RSP, kRegExpParamOffset)); 2147 __ movq(RBX, Address(RSP, kRegExpParamOffset));
2239 __ movq(RDI, Address(RSP, kStringParamOffset)); 2148 __ movq(RDI, Address(RSP, kStringParamOffset));
2240 __ LoadClassId(RDI, RDI); 2149 __ LoadClassId(RDI, RDI);
2241 __ SubImmediate(RDI, Immediate(kOneByteStringCid)); 2150 __ SubImmediate(RDI, Immediate(kOneByteStringCid));
2242 __ movq(RAX, FieldAddress(RBX, RDI, TIMES_8, RegExp::function_offset( 2151 __ movq(RAX,
2243 kOneByteStringCid, sticky))); 2152 FieldAddress(RBX, RDI, TIMES_8,
2153 RegExp::function_offset(kOneByteStringCid, sticky)));
2244 2154
2245 // Registers are now set up for the lazy compile stub. It expects the function 2155 // Registers are now set up for the lazy compile stub. It expects the function
2246 // in RAX, the argument descriptor in R10, and IC-Data in RCX. 2156 // in RAX, the argument descriptor in R10, and IC-Data in RCX.
2247 __ xorq(RCX, RCX); 2157 __ xorq(RCX, RCX);
2248 2158
2249 // Tail-call the function. 2159 // Tail-call the function.
2250 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset())); 2160 __ movq(CODE_REG, FieldAddress(RAX, Function::code_offset()));
2251 __ movq(RDI, FieldAddress(RAX, Function::entry_point_offset())); 2161 __ movq(RDI, FieldAddress(RAX, Function::entry_point_offset()));
2252 __ jmp(RDI); 2162 __ jmp(RDI);
2253 } 2163 }
2254 2164
2255
2256 // On stack: user tag (+1), return-address (+0). 2165 // On stack: user tag (+1), return-address (+0).
2257 void Intrinsifier::UserTag_makeCurrent(Assembler* assembler) { 2166 void Intrinsifier::UserTag_makeCurrent(Assembler* assembler) {
2258 // RBX: Isolate. 2167 // RBX: Isolate.
2259 __ LoadIsolate(RBX); 2168 __ LoadIsolate(RBX);
2260 // RAX: Current user tag. 2169 // RAX: Current user tag.
2261 __ movq(RAX, Address(RBX, Isolate::current_tag_offset())); 2170 __ movq(RAX, Address(RBX, Isolate::current_tag_offset()));
2262 // R10: UserTag. 2171 // R10: UserTag.
2263 __ movq(R10, Address(RSP, +1 * kWordSize)); 2172 __ movq(R10, Address(RSP, +1 * kWordSize));
2264 // Set Isolate::current_tag_. 2173 // Set Isolate::current_tag_.
2265 __ movq(Address(RBX, Isolate::current_tag_offset()), R10); 2174 __ movq(Address(RBX, Isolate::current_tag_offset()), R10);
2266 // R10: UserTag's tag. 2175 // R10: UserTag's tag.
2267 __ movq(R10, FieldAddress(R10, UserTag::tag_offset())); 2176 __ movq(R10, FieldAddress(R10, UserTag::tag_offset()));
2268 // Set Isolate::user_tag_. 2177 // Set Isolate::user_tag_.
2269 __ movq(Address(RBX, Isolate::user_tag_offset()), R10); 2178 __ movq(Address(RBX, Isolate::user_tag_offset()), R10);
2270 __ ret(); 2179 __ ret();
2271 } 2180 }
2272 2181
2273
2274 void Intrinsifier::UserTag_defaultTag(Assembler* assembler) { 2182 void Intrinsifier::UserTag_defaultTag(Assembler* assembler) {
2275 __ LoadIsolate(RAX); 2183 __ LoadIsolate(RAX);
2276 __ movq(RAX, Address(RAX, Isolate::default_tag_offset())); 2184 __ movq(RAX, Address(RAX, Isolate::default_tag_offset()));
2277 __ ret(); 2185 __ ret();
2278 } 2186 }
2279 2187
2280
2281 void Intrinsifier::Profiler_getCurrentTag(Assembler* assembler) { 2188 void Intrinsifier::Profiler_getCurrentTag(Assembler* assembler) {
2282 __ LoadIsolate(RAX); 2189 __ LoadIsolate(RAX);
2283 __ movq(RAX, Address(RAX, Isolate::current_tag_offset())); 2190 __ movq(RAX, Address(RAX, Isolate::current_tag_offset()));
2284 __ ret(); 2191 __ ret();
2285 } 2192 }
2286 2193
2287
2288 void Intrinsifier::Timeline_isDartStreamEnabled(Assembler* assembler) { 2194 void Intrinsifier::Timeline_isDartStreamEnabled(Assembler* assembler) {
2289 if (!FLAG_support_timeline) { 2195 if (!FLAG_support_timeline) {
2290 __ LoadObject(RAX, Bool::False()); 2196 __ LoadObject(RAX, Bool::False());
2291 __ ret(); 2197 __ ret();
2292 return; 2198 return;
2293 } 2199 }
2294 Label true_label; 2200 Label true_label;
2295 // Load TimelineStream*. 2201 // Load TimelineStream*.
2296 __ movq(RAX, Address(THR, Thread::dart_stream_offset())); 2202 __ movq(RAX, Address(THR, Thread::dart_stream_offset()));
2297 // Load uintptr_t from TimelineStream*. 2203 // Load uintptr_t from TimelineStream*.
2298 __ movq(RAX, Address(RAX, TimelineStream::enabled_offset())); 2204 __ movq(RAX, Address(RAX, TimelineStream::enabled_offset()));
2299 __ cmpq(RAX, Immediate(0)); 2205 __ cmpq(RAX, Immediate(0));
2300 __ j(NOT_ZERO, &true_label, Assembler::kNearJump); 2206 __ j(NOT_ZERO, &true_label, Assembler::kNearJump);
2301 // Not enabled. 2207 // Not enabled.
2302 __ LoadObject(RAX, Bool::False()); 2208 __ LoadObject(RAX, Bool::False());
2303 __ ret(); 2209 __ ret();
2304 // Enabled. 2210 // Enabled.
2305 __ Bind(&true_label); 2211 __ Bind(&true_label);
2306 __ LoadObject(RAX, Bool::True()); 2212 __ LoadObject(RAX, Bool::True());
2307 __ ret(); 2213 __ ret();
2308 } 2214 }
2309 2215
2310
2311 void Intrinsifier::ClearAsyncThreadStackTrace(Assembler* assembler) { 2216 void Intrinsifier::ClearAsyncThreadStackTrace(Assembler* assembler) {
2312 __ LoadObject(RAX, Object::null_object()); 2217 __ LoadObject(RAX, Object::null_object());
2313 __ movq(Address(THR, Thread::async_stack_trace_offset()), RAX); 2218 __ movq(Address(THR, Thread::async_stack_trace_offset()), RAX);
2314 __ ret(); 2219 __ ret();
2315 } 2220 }
2316 2221
2317
2318 void Intrinsifier::SetAsyncThreadStackTrace(Assembler* assembler) { 2222 void Intrinsifier::SetAsyncThreadStackTrace(Assembler* assembler) {
2319 __ movq(Address(THR, Thread::async_stack_trace_offset()), RAX); 2223 __ movq(Address(THR, Thread::async_stack_trace_offset()), RAX);
2320 __ LoadObject(RAX, Object::null_object()); 2224 __ LoadObject(RAX, Object::null_object());
2321 __ ret(); 2225 __ ret();
2322 } 2226 }
2323 2227
2324 #undef __ 2228 #undef __
2325 2229
2326 } // namespace dart 2230 } // namespace dart
2327 2231
2328 #endif // defined TARGET_ARCH_X64 2232 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/intrinsifier_ia32.cc ('k') | runtime/vm/isolate.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698