OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
51 // | 51 // |
52 // TEST(mov_x0_one) { | 52 // TEST(mov_x0_one) { |
53 // SETUP(); | 53 // SETUP(); |
54 // | 54 // |
55 // START(); | 55 // START(); |
56 // __ mov(x0, Operand(1)); | 56 // __ mov(x0, Operand(1)); |
57 // END(); | 57 // END(); |
58 // | 58 // |
59 // RUN(); | 59 // RUN(); |
60 // | 60 // |
61 // ASSERT_EQUAL_64(1, x0); | 61 // CHECK_EQUAL_64(1, x0); |
62 // | 62 // |
63 // TEARDOWN(); | 63 // TEARDOWN(); |
64 // } | 64 // } |
65 // | 65 // |
66 // Within a START ... END block all registers but sp can be modified. sp has to | 66 // Within a START ... END block all registers but sp can be modified. sp has to |
67 // be explicitly saved/restored. The END() macro replaces the function return | 67 // be explicitly saved/restored. The END() macro replaces the function return |
68 // so it may appear multiple times in a test if the test has multiple exit | 68 // so it may appear multiple times in a test if the test has multiple exit |
69 // points. | 69 // points. |
70 // | 70 // |
71 // Once the test has been run all integer and floating point registers as well | 71 // Once the test has been run all integer and floating point registers as well |
72 // as flags are accessible through a RegisterDump instance, see | 72 // as flags are accessible through a RegisterDump instance, see |
73 // utils-arm64.cc for more info on RegisterDump. | 73 // utils-arm64.cc for more info on RegisterDump. |
74 // | 74 // |
75 // We provide some helper assert to handle common cases: | 75 // We provide some helper assert to handle common cases: |
76 // | 76 // |
77 // ASSERT_EQUAL_32(int32_t, int_32t) | 77 // CHECK_EQUAL_32(int32_t, int_32t) |
78 // ASSERT_EQUAL_FP32(float, float) | 78 // CHECK_EQUAL_FP32(float, float) |
79 // ASSERT_EQUAL_32(int32_t, W register) | 79 // CHECK_EQUAL_32(int32_t, W register) |
80 // ASSERT_EQUAL_FP32(float, S register) | 80 // CHECK_EQUAL_FP32(float, S register) |
81 // ASSERT_EQUAL_64(int64_t, int_64t) | 81 // CHECK_EQUAL_64(int64_t, int_64t) |
82 // ASSERT_EQUAL_FP64(double, double) | 82 // CHECK_EQUAL_FP64(double, double) |
83 // ASSERT_EQUAL_64(int64_t, X register) | 83 // CHECK_EQUAL_64(int64_t, X register) |
84 // ASSERT_EQUAL_64(X register, X register) | 84 // CHECK_EQUAL_64(X register, X register) |
85 // ASSERT_EQUAL_FP64(double, D register) | 85 // CHECK_EQUAL_FP64(double, D register) |
86 // | 86 // |
87 // e.g. ASSERT_EQUAL_64(0.5, d30); | 87 // e.g. CHECK_EQUAL_64(0.5, d30); |
88 // | 88 // |
89 // If more advance computation is required before the assert then access the | 89 // If more advance computation is required before the assert then access the |
90 // RegisterDump named core directly: | 90 // RegisterDump named core directly: |
91 // | 91 // |
92 // ASSERT_EQUAL_64(0x1234, core.xreg(0) & 0xffff); | 92 // CHECK_EQUAL_64(0x1234, core.xreg(0) & 0xffff); |
93 | 93 |
94 | 94 |
95 #if 0 // TODO(all): enable. | 95 #if 0 // TODO(all): enable. |
96 static v8::Persistent<v8::Context> env; | 96 static v8::Persistent<v8::Context> env; |
97 | 97 |
98 static void InitializeVM() { | 98 static void InitializeVM() { |
99 if (env.IsEmpty()) { | 99 if (env.IsEmpty()) { |
100 env = v8::Context::New(); | 100 env = v8::Context::New(); |
101 } | 101 } |
102 } | 102 } |
103 #endif | 103 #endif |
104 | 104 |
105 #define __ masm. | 105 #define __ masm. |
106 | 106 |
107 #define BUF_SIZE 8192 | 107 #define BUF_SIZE 8192 |
108 #define SETUP() SETUP_SIZE(BUF_SIZE) | 108 #define SETUP() SETUP_SIZE(BUF_SIZE) |
109 | 109 |
110 #define INIT_V8() \ | 110 #define INIT_V8() \ |
111 CcTest::InitializeVM(); \ | 111 CcTest::InitializeVM(); \ |
112 | 112 |
113 #ifdef USE_SIMULATOR | 113 #ifdef USE_SIMULATOR |
114 | 114 |
115 // Run tests with the simulator. | 115 // Run tests with the simulator. |
116 #define SETUP_SIZE(buf_size) \ | 116 #define SETUP_SIZE(buf_size) \ |
117 Isolate* isolate = Isolate::Current(); \ | 117 Isolate* isolate = Isolate::Current(); \ |
118 HandleScope scope(isolate); \ | 118 HandleScope scope(isolate); \ |
119 ASSERT(isolate != NULL); \ | 119 DCHECK(isolate != NULL); \ |
120 byte* buf = new byte[buf_size]; \ | 120 byte* buf = new byte[buf_size]; \ |
121 MacroAssembler masm(isolate, buf, buf_size); \ | 121 MacroAssembler masm(isolate, buf, buf_size); \ |
122 Decoder<DispatchingDecoderVisitor>* decoder = \ | 122 Decoder<DispatchingDecoderVisitor>* decoder = \ |
123 new Decoder<DispatchingDecoderVisitor>(); \ | 123 new Decoder<DispatchingDecoderVisitor>(); \ |
124 Simulator simulator(decoder); \ | 124 Simulator simulator(decoder); \ |
125 PrintDisassembler* pdis = NULL; \ | 125 PrintDisassembler* pdis = NULL; \ |
126 RegisterDump core; | 126 RegisterDump core; |
127 | 127 |
128 /* if (Cctest::trace_sim()) { \ | 128 /* if (Cctest::trace_sim()) { \ |
129 pdis = new PrintDisassembler(stdout); \ | 129 pdis = new PrintDisassembler(stdout); \ |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
163 | 163 |
164 #define TEARDOWN() \ | 164 #define TEARDOWN() \ |
165 delete pdis; \ | 165 delete pdis; \ |
166 delete[] buf; | 166 delete[] buf; |
167 | 167 |
168 #else // ifdef USE_SIMULATOR. | 168 #else // ifdef USE_SIMULATOR. |
169 // Run the test on real hardware or models. | 169 // Run the test on real hardware or models. |
170 #define SETUP_SIZE(buf_size) \ | 170 #define SETUP_SIZE(buf_size) \ |
171 Isolate* isolate = Isolate::Current(); \ | 171 Isolate* isolate = Isolate::Current(); \ |
172 HandleScope scope(isolate); \ | 172 HandleScope scope(isolate); \ |
173 ASSERT(isolate != NULL); \ | 173 DCHECK(isolate != NULL); \ |
174 byte* buf = new byte[buf_size]; \ | 174 byte* buf = new byte[buf_size]; \ |
175 MacroAssembler masm(isolate, buf, buf_size); \ | 175 MacroAssembler masm(isolate, buf, buf_size); \ |
176 RegisterDump core; | 176 RegisterDump core; |
177 | 177 |
178 #define RESET() \ | 178 #define RESET() \ |
179 __ Reset(); \ | 179 __ Reset(); \ |
180 /* Reset the machine state (like simulator.ResetState()). */ \ | 180 /* Reset the machine state (like simulator.ResetState()). */ \ |
181 __ Msr(NZCV, xzr); \ | 181 __ Msr(NZCV, xzr); \ |
182 __ Msr(FPCR, xzr); | 182 __ Msr(FPCR, xzr); |
183 | 183 |
(...skipping 18 matching lines...) Expand all Loading... |
202 core.Dump(&masm); \ | 202 core.Dump(&masm); \ |
203 __ PopCalleeSavedRegisters(); \ | 203 __ PopCalleeSavedRegisters(); \ |
204 __ Ret(); \ | 204 __ Ret(); \ |
205 __ GetCode(NULL); | 205 __ GetCode(NULL); |
206 | 206 |
207 #define TEARDOWN() \ | 207 #define TEARDOWN() \ |
208 delete[] buf; | 208 delete[] buf; |
209 | 209 |
210 #endif // ifdef USE_SIMULATOR. | 210 #endif // ifdef USE_SIMULATOR. |
211 | 211 |
212 #define ASSERT_EQUAL_NZCV(expected) \ | 212 #define CHECK_EQUAL_NZCV(expected) \ |
213 CHECK(EqualNzcv(expected, core.flags_nzcv())) | 213 CHECK(EqualNzcv(expected, core.flags_nzcv())) |
214 | 214 |
215 #define ASSERT_EQUAL_REGISTERS(expected) \ | 215 #define CHECK_EQUAL_REGISTERS(expected) \ |
216 CHECK(EqualRegisters(&expected, &core)) | 216 CHECK(EqualRegisters(&expected, &core)) |
217 | 217 |
218 #define ASSERT_EQUAL_32(expected, result) \ | 218 #define CHECK_EQUAL_32(expected, result) \ |
219 CHECK(Equal32(static_cast<uint32_t>(expected), &core, result)) | 219 CHECK(Equal32(static_cast<uint32_t>(expected), &core, result)) |
220 | 220 |
221 #define ASSERT_EQUAL_FP32(expected, result) \ | 221 #define CHECK_EQUAL_FP32(expected, result) \ |
222 CHECK(EqualFP32(expected, &core, result)) | 222 CHECK(EqualFP32(expected, &core, result)) |
223 | 223 |
224 #define ASSERT_EQUAL_64(expected, result) \ | 224 #define CHECK_EQUAL_64(expected, result) \ |
225 CHECK(Equal64(expected, &core, result)) | 225 CHECK(Equal64(expected, &core, result)) |
226 | 226 |
227 #define ASSERT_EQUAL_FP64(expected, result) \ | 227 #define CHECK_EQUAL_FP64(expected, result) \ |
228 CHECK(EqualFP64(expected, &core, result)) | 228 CHECK(EqualFP64(expected, &core, result)) |
229 | 229 |
230 #ifdef DEBUG | 230 #ifdef DEBUG |
231 #define ASSERT_LITERAL_POOL_SIZE(expected) \ | 231 #define DCHECK_LITERAL_POOL_SIZE(expected) \ |
232 CHECK((expected) == (__ LiteralPoolSize())) | 232 CHECK((expected) == (__ LiteralPoolSize())) |
233 #else | 233 #else |
234 #define ASSERT_LITERAL_POOL_SIZE(expected) \ | 234 #define DCHECK_LITERAL_POOL_SIZE(expected) \ |
235 ((void) 0) | 235 ((void) 0) |
236 #endif | 236 #endif |
237 | 237 |
238 | 238 |
239 TEST(stack_ops) { | 239 TEST(stack_ops) { |
240 INIT_V8(); | 240 INIT_V8(); |
241 SETUP(); | 241 SETUP(); |
242 | 242 |
243 START(); | 243 START(); |
244 // save csp. | 244 // save csp. |
(...skipping 24 matching lines...) Expand all Loading... |
269 // Write csp, and read back wcsp. | 269 // Write csp, and read back wcsp. |
270 __ Orr(csp, xzr, Operand(0xfffffff8L)); | 270 __ Orr(csp, xzr, Operand(0xfffffff8L)); |
271 __ Mov(w5, wcsp); | 271 __ Mov(w5, wcsp); |
272 | 272 |
273 // restore csp. | 273 // restore csp. |
274 __ Mov(csp, x29); | 274 __ Mov(csp, x29); |
275 END(); | 275 END(); |
276 | 276 |
277 RUN(); | 277 RUN(); |
278 | 278 |
279 ASSERT_EQUAL_64(0x1000, x0); | 279 CHECK_EQUAL_64(0x1000, x0); |
280 ASSERT_EQUAL_64(0x1050, x1); | 280 CHECK_EQUAL_64(0x1050, x1); |
281 ASSERT_EQUAL_64(0x104f, x2); | 281 CHECK_EQUAL_64(0x104f, x2); |
282 ASSERT_EQUAL_64(0x1fff, x3); | 282 CHECK_EQUAL_64(0x1fff, x3); |
283 ASSERT_EQUAL_64(0xfffffff8, x4); | 283 CHECK_EQUAL_64(0xfffffff8, x4); |
284 ASSERT_EQUAL_64(0xfffffff8, x5); | 284 CHECK_EQUAL_64(0xfffffff8, x5); |
285 | 285 |
286 TEARDOWN(); | 286 TEARDOWN(); |
287 } | 287 } |
288 | 288 |
289 | 289 |
290 TEST(mvn) { | 290 TEST(mvn) { |
291 INIT_V8(); | 291 INIT_V8(); |
292 SETUP(); | 292 SETUP(); |
293 | 293 |
294 START(); | 294 START(); |
(...skipping 10 matching lines...) Expand all Loading... |
305 __ Mvn(w10, Operand(w2, UXTB)); | 305 __ Mvn(w10, Operand(w2, UXTB)); |
306 __ Mvn(x11, Operand(x2, SXTB, 1)); | 306 __ Mvn(x11, Operand(x2, SXTB, 1)); |
307 __ Mvn(w12, Operand(w2, UXTH, 2)); | 307 __ Mvn(w12, Operand(w2, UXTH, 2)); |
308 __ Mvn(x13, Operand(x2, SXTH, 3)); | 308 __ Mvn(x13, Operand(x2, SXTH, 3)); |
309 __ Mvn(x14, Operand(w2, UXTW, 4)); | 309 __ Mvn(x14, Operand(w2, UXTW, 4)); |
310 __ Mvn(x15, Operand(w2, SXTW, 4)); | 310 __ Mvn(x15, Operand(w2, SXTW, 4)); |
311 END(); | 311 END(); |
312 | 312 |
313 RUN(); | 313 RUN(); |
314 | 314 |
315 ASSERT_EQUAL_64(0xfffff000, x0); | 315 CHECK_EQUAL_64(0xfffff000, x0); |
316 ASSERT_EQUAL_64(0xfffffffffffff000UL, x1); | 316 CHECK_EQUAL_64(0xfffffffffffff000UL, x1); |
317 ASSERT_EQUAL_64(0x00001fff, x2); | 317 CHECK_EQUAL_64(0x00001fff, x2); |
318 ASSERT_EQUAL_64(0x0000000000003fffUL, x3); | 318 CHECK_EQUAL_64(0x0000000000003fffUL, x3); |
319 ASSERT_EQUAL_64(0xe00001ff, x4); | 319 CHECK_EQUAL_64(0xe00001ff, x4); |
320 ASSERT_EQUAL_64(0xf0000000000000ffUL, x5); | 320 CHECK_EQUAL_64(0xf0000000000000ffUL, x5); |
321 ASSERT_EQUAL_64(0x00000001, x6); | 321 CHECK_EQUAL_64(0x00000001, x6); |
322 ASSERT_EQUAL_64(0x0, x7); | 322 CHECK_EQUAL_64(0x0, x7); |
323 ASSERT_EQUAL_64(0x7ff80000, x8); | 323 CHECK_EQUAL_64(0x7ff80000, x8); |
324 ASSERT_EQUAL_64(0x3ffc000000000000UL, x9); | 324 CHECK_EQUAL_64(0x3ffc000000000000UL, x9); |
325 ASSERT_EQUAL_64(0xffffff00, x10); | 325 CHECK_EQUAL_64(0xffffff00, x10); |
326 ASSERT_EQUAL_64(0x0000000000000001UL, x11); | 326 CHECK_EQUAL_64(0x0000000000000001UL, x11); |
327 ASSERT_EQUAL_64(0xffff8003, x12); | 327 CHECK_EQUAL_64(0xffff8003, x12); |
328 ASSERT_EQUAL_64(0xffffffffffff0007UL, x13); | 328 CHECK_EQUAL_64(0xffffffffffff0007UL, x13); |
329 ASSERT_EQUAL_64(0xfffffffffffe000fUL, x14); | 329 CHECK_EQUAL_64(0xfffffffffffe000fUL, x14); |
330 ASSERT_EQUAL_64(0xfffffffffffe000fUL, x15); | 330 CHECK_EQUAL_64(0xfffffffffffe000fUL, x15); |
331 | 331 |
332 TEARDOWN(); | 332 TEARDOWN(); |
333 } | 333 } |
334 | 334 |
335 | 335 |
336 TEST(mov) { | 336 TEST(mov) { |
337 INIT_V8(); | 337 INIT_V8(); |
338 SETUP(); | 338 SETUP(); |
339 | 339 |
340 START(); | 340 START(); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
377 __ Mov(x22, Operand(x12, ROR, 14)); | 377 __ Mov(x22, Operand(x12, ROR, 14)); |
378 __ Mov(w23, Operand(w13, UXTB)); | 378 __ Mov(w23, Operand(w13, UXTB)); |
379 __ Mov(x24, Operand(x13, SXTB, 1)); | 379 __ Mov(x24, Operand(x13, SXTB, 1)); |
380 __ Mov(w25, Operand(w13, UXTH, 2)); | 380 __ Mov(w25, Operand(w13, UXTH, 2)); |
381 __ Mov(x26, Operand(x13, SXTH, 3)); | 381 __ Mov(x26, Operand(x13, SXTH, 3)); |
382 __ Mov(x27, Operand(w13, UXTW, 4)); | 382 __ Mov(x27, Operand(w13, UXTW, 4)); |
383 END(); | 383 END(); |
384 | 384 |
385 RUN(); | 385 RUN(); |
386 | 386 |
387 ASSERT_EQUAL_64(0x0123456789abcdefL, x0); | 387 CHECK_EQUAL_64(0x0123456789abcdefL, x0); |
388 ASSERT_EQUAL_64(0x00000000abcd0000L, x1); | 388 CHECK_EQUAL_64(0x00000000abcd0000L, x1); |
389 ASSERT_EQUAL_64(0xffffabcdffffffffL, x2); | 389 CHECK_EQUAL_64(0xffffabcdffffffffL, x2); |
390 ASSERT_EQUAL_64(0x5432ffffffffffffL, x3); | 390 CHECK_EQUAL_64(0x5432ffffffffffffL, x3); |
391 ASSERT_EQUAL_64(x4, x5); | 391 CHECK_EQUAL_64(x4, x5); |
392 ASSERT_EQUAL_32(-1, w6); | 392 CHECK_EQUAL_32(-1, w6); |
393 ASSERT_EQUAL_64(0x0123456789abcdefL, x7); | 393 CHECK_EQUAL_64(0x0123456789abcdefL, x7); |
394 ASSERT_EQUAL_32(0x89abcdefL, w8); | 394 CHECK_EQUAL_32(0x89abcdefL, w8); |
395 ASSERT_EQUAL_64(0x0123456789abcdefL, x9); | 395 CHECK_EQUAL_64(0x0123456789abcdefL, x9); |
396 ASSERT_EQUAL_32(0x89abcdefL, w10); | 396 CHECK_EQUAL_32(0x89abcdefL, w10); |
397 ASSERT_EQUAL_64(0x00000fff, x11); | 397 CHECK_EQUAL_64(0x00000fff, x11); |
398 ASSERT_EQUAL_64(0x0000000000000fffUL, x12); | 398 CHECK_EQUAL_64(0x0000000000000fffUL, x12); |
399 ASSERT_EQUAL_64(0x00001ffe, x13); | 399 CHECK_EQUAL_64(0x00001ffe, x13); |
400 ASSERT_EQUAL_64(0x0000000000003ffcUL, x14); | 400 CHECK_EQUAL_64(0x0000000000003ffcUL, x14); |
401 ASSERT_EQUAL_64(0x000001ff, x15); | 401 CHECK_EQUAL_64(0x000001ff, x15); |
402 ASSERT_EQUAL_64(0x00000000000000ffUL, x18); | 402 CHECK_EQUAL_64(0x00000000000000ffUL, x18); |
403 ASSERT_EQUAL_64(0x00000001, x19); | 403 CHECK_EQUAL_64(0x00000001, x19); |
404 ASSERT_EQUAL_64(0x0, x20); | 404 CHECK_EQUAL_64(0x0, x20); |
405 ASSERT_EQUAL_64(0x7ff80000, x21); | 405 CHECK_EQUAL_64(0x7ff80000, x21); |
406 ASSERT_EQUAL_64(0x3ffc000000000000UL, x22); | 406 CHECK_EQUAL_64(0x3ffc000000000000UL, x22); |
407 ASSERT_EQUAL_64(0x000000fe, x23); | 407 CHECK_EQUAL_64(0x000000fe, x23); |
408 ASSERT_EQUAL_64(0xfffffffffffffffcUL, x24); | 408 CHECK_EQUAL_64(0xfffffffffffffffcUL, x24); |
409 ASSERT_EQUAL_64(0x00007ff8, x25); | 409 CHECK_EQUAL_64(0x00007ff8, x25); |
410 ASSERT_EQUAL_64(0x000000000000fff0UL, x26); | 410 CHECK_EQUAL_64(0x000000000000fff0UL, x26); |
411 ASSERT_EQUAL_64(0x000000000001ffe0UL, x27); | 411 CHECK_EQUAL_64(0x000000000001ffe0UL, x27); |
412 | 412 |
413 TEARDOWN(); | 413 TEARDOWN(); |
414 } | 414 } |
415 | 415 |
416 | 416 |
417 TEST(mov_imm_w) { | 417 TEST(mov_imm_w) { |
418 INIT_V8(); | 418 INIT_V8(); |
419 SETUP(); | 419 SETUP(); |
420 | 420 |
421 START(); | 421 START(); |
422 __ Mov(w0, 0xffffffffL); | 422 __ Mov(w0, 0xffffffffL); |
423 __ Mov(w1, 0xffff1234L); | 423 __ Mov(w1, 0xffff1234L); |
424 __ Mov(w2, 0x1234ffffL); | 424 __ Mov(w2, 0x1234ffffL); |
425 __ Mov(w3, 0x00000000L); | 425 __ Mov(w3, 0x00000000L); |
426 __ Mov(w4, 0x00001234L); | 426 __ Mov(w4, 0x00001234L); |
427 __ Mov(w5, 0x12340000L); | 427 __ Mov(w5, 0x12340000L); |
428 __ Mov(w6, 0x12345678L); | 428 __ Mov(w6, 0x12345678L); |
429 __ Mov(w7, (int32_t)0x80000000); | 429 __ Mov(w7, (int32_t)0x80000000); |
430 __ Mov(w8, (int32_t)0xffff0000); | 430 __ Mov(w8, (int32_t)0xffff0000); |
431 __ Mov(w9, kWMinInt); | 431 __ Mov(w9, kWMinInt); |
432 END(); | 432 END(); |
433 | 433 |
434 RUN(); | 434 RUN(); |
435 | 435 |
436 ASSERT_EQUAL_64(0xffffffffL, x0); | 436 CHECK_EQUAL_64(0xffffffffL, x0); |
437 ASSERT_EQUAL_64(0xffff1234L, x1); | 437 CHECK_EQUAL_64(0xffff1234L, x1); |
438 ASSERT_EQUAL_64(0x1234ffffL, x2); | 438 CHECK_EQUAL_64(0x1234ffffL, x2); |
439 ASSERT_EQUAL_64(0x00000000L, x3); | 439 CHECK_EQUAL_64(0x00000000L, x3); |
440 ASSERT_EQUAL_64(0x00001234L, x4); | 440 CHECK_EQUAL_64(0x00001234L, x4); |
441 ASSERT_EQUAL_64(0x12340000L, x5); | 441 CHECK_EQUAL_64(0x12340000L, x5); |
442 ASSERT_EQUAL_64(0x12345678L, x6); | 442 CHECK_EQUAL_64(0x12345678L, x6); |
443 ASSERT_EQUAL_64(0x80000000L, x7); | 443 CHECK_EQUAL_64(0x80000000L, x7); |
444 ASSERT_EQUAL_64(0xffff0000L, x8); | 444 CHECK_EQUAL_64(0xffff0000L, x8); |
445 ASSERT_EQUAL_32(kWMinInt, w9); | 445 CHECK_EQUAL_32(kWMinInt, w9); |
446 | 446 |
447 TEARDOWN(); | 447 TEARDOWN(); |
448 } | 448 } |
449 | 449 |
450 | 450 |
451 TEST(mov_imm_x) { | 451 TEST(mov_imm_x) { |
452 INIT_V8(); | 452 INIT_V8(); |
453 SETUP(); | 453 SETUP(); |
454 | 454 |
455 START(); | 455 START(); |
(...skipping 21 matching lines...) Expand all Loading... |
477 __ Mov(x23, 0x1234567800009abcL); | 477 __ Mov(x23, 0x1234567800009abcL); |
478 __ Mov(x24, 0x1234000056789abcL); | 478 __ Mov(x24, 0x1234000056789abcL); |
479 __ Mov(x25, 0x0000123456789abcL); | 479 __ Mov(x25, 0x0000123456789abcL); |
480 __ Mov(x26, 0x123456789abcdef0L); | 480 __ Mov(x26, 0x123456789abcdef0L); |
481 __ Mov(x27, 0xffff000000000001L); | 481 __ Mov(x27, 0xffff000000000001L); |
482 __ Mov(x28, 0x8000ffff00000000L); | 482 __ Mov(x28, 0x8000ffff00000000L); |
483 END(); | 483 END(); |
484 | 484 |
485 RUN(); | 485 RUN(); |
486 | 486 |
487 ASSERT_EQUAL_64(0xffffffffffff1234L, x1); | 487 CHECK_EQUAL_64(0xffffffffffff1234L, x1); |
488 ASSERT_EQUAL_64(0xffffffff12345678L, x2); | 488 CHECK_EQUAL_64(0xffffffff12345678L, x2); |
489 ASSERT_EQUAL_64(0xffff1234ffff5678L, x3); | 489 CHECK_EQUAL_64(0xffff1234ffff5678L, x3); |
490 ASSERT_EQUAL_64(0x1234ffffffff5678L, x4); | 490 CHECK_EQUAL_64(0x1234ffffffff5678L, x4); |
491 ASSERT_EQUAL_64(0x1234ffff5678ffffL, x5); | 491 CHECK_EQUAL_64(0x1234ffff5678ffffL, x5); |
492 ASSERT_EQUAL_64(0x12345678ffffffffL, x6); | 492 CHECK_EQUAL_64(0x12345678ffffffffL, x6); |
493 ASSERT_EQUAL_64(0x1234ffffffffffffL, x7); | 493 CHECK_EQUAL_64(0x1234ffffffffffffL, x7); |
494 ASSERT_EQUAL_64(0x123456789abcffffL, x8); | 494 CHECK_EQUAL_64(0x123456789abcffffL, x8); |
495 ASSERT_EQUAL_64(0x12345678ffff9abcL, x9); | 495 CHECK_EQUAL_64(0x12345678ffff9abcL, x9); |
496 ASSERT_EQUAL_64(0x1234ffff56789abcL, x10); | 496 CHECK_EQUAL_64(0x1234ffff56789abcL, x10); |
497 ASSERT_EQUAL_64(0xffff123456789abcL, x11); | 497 CHECK_EQUAL_64(0xffff123456789abcL, x11); |
498 ASSERT_EQUAL_64(0x0000000000000000L, x12); | 498 CHECK_EQUAL_64(0x0000000000000000L, x12); |
499 ASSERT_EQUAL_64(0x0000000000001234L, x13); | 499 CHECK_EQUAL_64(0x0000000000001234L, x13); |
500 ASSERT_EQUAL_64(0x0000000012345678L, x14); | 500 CHECK_EQUAL_64(0x0000000012345678L, x14); |
501 ASSERT_EQUAL_64(0x0000123400005678L, x15); | 501 CHECK_EQUAL_64(0x0000123400005678L, x15); |
502 ASSERT_EQUAL_64(0x1234000000005678L, x18); | 502 CHECK_EQUAL_64(0x1234000000005678L, x18); |
503 ASSERT_EQUAL_64(0x1234000056780000L, x19); | 503 CHECK_EQUAL_64(0x1234000056780000L, x19); |
504 ASSERT_EQUAL_64(0x1234567800000000L, x20); | 504 CHECK_EQUAL_64(0x1234567800000000L, x20); |
505 ASSERT_EQUAL_64(0x1234000000000000L, x21); | 505 CHECK_EQUAL_64(0x1234000000000000L, x21); |
506 ASSERT_EQUAL_64(0x123456789abc0000L, x22); | 506 CHECK_EQUAL_64(0x123456789abc0000L, x22); |
507 ASSERT_EQUAL_64(0x1234567800009abcL, x23); | 507 CHECK_EQUAL_64(0x1234567800009abcL, x23); |
508 ASSERT_EQUAL_64(0x1234000056789abcL, x24); | 508 CHECK_EQUAL_64(0x1234000056789abcL, x24); |
509 ASSERT_EQUAL_64(0x0000123456789abcL, x25); | 509 CHECK_EQUAL_64(0x0000123456789abcL, x25); |
510 ASSERT_EQUAL_64(0x123456789abcdef0L, x26); | 510 CHECK_EQUAL_64(0x123456789abcdef0L, x26); |
511 ASSERT_EQUAL_64(0xffff000000000001L, x27); | 511 CHECK_EQUAL_64(0xffff000000000001L, x27); |
512 ASSERT_EQUAL_64(0x8000ffff00000000L, x28); | 512 CHECK_EQUAL_64(0x8000ffff00000000L, x28); |
513 | 513 |
514 TEARDOWN(); | 514 TEARDOWN(); |
515 } | 515 } |
516 | 516 |
517 | 517 |
518 TEST(orr) { | 518 TEST(orr) { |
519 INIT_V8(); | 519 INIT_V8(); |
520 SETUP(); | 520 SETUP(); |
521 | 521 |
522 START(); | 522 START(); |
523 __ Mov(x0, 0xf0f0); | 523 __ Mov(x0, 0xf0f0); |
524 __ Mov(x1, 0xf00000ff); | 524 __ Mov(x1, 0xf00000ff); |
525 | 525 |
526 __ Orr(x2, x0, Operand(x1)); | 526 __ Orr(x2, x0, Operand(x1)); |
527 __ Orr(w3, w0, Operand(w1, LSL, 28)); | 527 __ Orr(w3, w0, Operand(w1, LSL, 28)); |
528 __ Orr(x4, x0, Operand(x1, LSL, 32)); | 528 __ Orr(x4, x0, Operand(x1, LSL, 32)); |
529 __ Orr(x5, x0, Operand(x1, LSR, 4)); | 529 __ Orr(x5, x0, Operand(x1, LSR, 4)); |
530 __ Orr(w6, w0, Operand(w1, ASR, 4)); | 530 __ Orr(w6, w0, Operand(w1, ASR, 4)); |
531 __ Orr(x7, x0, Operand(x1, ASR, 4)); | 531 __ Orr(x7, x0, Operand(x1, ASR, 4)); |
532 __ Orr(w8, w0, Operand(w1, ROR, 12)); | 532 __ Orr(w8, w0, Operand(w1, ROR, 12)); |
533 __ Orr(x9, x0, Operand(x1, ROR, 12)); | 533 __ Orr(x9, x0, Operand(x1, ROR, 12)); |
534 __ Orr(w10, w0, Operand(0xf)); | 534 __ Orr(w10, w0, Operand(0xf)); |
535 __ Orr(x11, x0, Operand(0xf0000000f0000000L)); | 535 __ Orr(x11, x0, Operand(0xf0000000f0000000L)); |
536 END(); | 536 END(); |
537 | 537 |
538 RUN(); | 538 RUN(); |
539 | 539 |
540 ASSERT_EQUAL_64(0xf000f0ff, x2); | 540 CHECK_EQUAL_64(0xf000f0ff, x2); |
541 ASSERT_EQUAL_64(0xf000f0f0, x3); | 541 CHECK_EQUAL_64(0xf000f0f0, x3); |
542 ASSERT_EQUAL_64(0xf00000ff0000f0f0L, x4); | 542 CHECK_EQUAL_64(0xf00000ff0000f0f0L, x4); |
543 ASSERT_EQUAL_64(0x0f00f0ff, x5); | 543 CHECK_EQUAL_64(0x0f00f0ff, x5); |
544 ASSERT_EQUAL_64(0xff00f0ff, x6); | 544 CHECK_EQUAL_64(0xff00f0ff, x6); |
545 ASSERT_EQUAL_64(0x0f00f0ff, x7); | 545 CHECK_EQUAL_64(0x0f00f0ff, x7); |
546 ASSERT_EQUAL_64(0x0ffff0f0, x8); | 546 CHECK_EQUAL_64(0x0ffff0f0, x8); |
547 ASSERT_EQUAL_64(0x0ff00000000ff0f0L, x9); | 547 CHECK_EQUAL_64(0x0ff00000000ff0f0L, x9); |
548 ASSERT_EQUAL_64(0xf0ff, x10); | 548 CHECK_EQUAL_64(0xf0ff, x10); |
549 ASSERT_EQUAL_64(0xf0000000f000f0f0L, x11); | 549 CHECK_EQUAL_64(0xf0000000f000f0f0L, x11); |
550 | 550 |
551 TEARDOWN(); | 551 TEARDOWN(); |
552 } | 552 } |
553 | 553 |
554 | 554 |
555 TEST(orr_extend) { | 555 TEST(orr_extend) { |
556 INIT_V8(); | 556 INIT_V8(); |
557 SETUP(); | 557 SETUP(); |
558 | 558 |
559 START(); | 559 START(); |
560 __ Mov(x0, 1); | 560 __ Mov(x0, 1); |
561 __ Mov(x1, 0x8000000080008080UL); | 561 __ Mov(x1, 0x8000000080008080UL); |
562 __ Orr(w6, w0, Operand(w1, UXTB)); | 562 __ Orr(w6, w0, Operand(w1, UXTB)); |
563 __ Orr(x7, x0, Operand(x1, UXTH, 1)); | 563 __ Orr(x7, x0, Operand(x1, UXTH, 1)); |
564 __ Orr(w8, w0, Operand(w1, UXTW, 2)); | 564 __ Orr(w8, w0, Operand(w1, UXTW, 2)); |
565 __ Orr(x9, x0, Operand(x1, UXTX, 3)); | 565 __ Orr(x9, x0, Operand(x1, UXTX, 3)); |
566 __ Orr(w10, w0, Operand(w1, SXTB)); | 566 __ Orr(w10, w0, Operand(w1, SXTB)); |
567 __ Orr(x11, x0, Operand(x1, SXTH, 1)); | 567 __ Orr(x11, x0, Operand(x1, SXTH, 1)); |
568 __ Orr(x12, x0, Operand(x1, SXTW, 2)); | 568 __ Orr(x12, x0, Operand(x1, SXTW, 2)); |
569 __ Orr(x13, x0, Operand(x1, SXTX, 3)); | 569 __ Orr(x13, x0, Operand(x1, SXTX, 3)); |
570 END(); | 570 END(); |
571 | 571 |
572 RUN(); | 572 RUN(); |
573 | 573 |
574 ASSERT_EQUAL_64(0x00000081, x6); | 574 CHECK_EQUAL_64(0x00000081, x6); |
575 ASSERT_EQUAL_64(0x00010101, x7); | 575 CHECK_EQUAL_64(0x00010101, x7); |
576 ASSERT_EQUAL_64(0x00020201, x8); | 576 CHECK_EQUAL_64(0x00020201, x8); |
577 ASSERT_EQUAL_64(0x0000000400040401UL, x9); | 577 CHECK_EQUAL_64(0x0000000400040401UL, x9); |
578 ASSERT_EQUAL_64(0x00000000ffffff81UL, x10); | 578 CHECK_EQUAL_64(0x00000000ffffff81UL, x10); |
579 ASSERT_EQUAL_64(0xffffffffffff0101UL, x11); | 579 CHECK_EQUAL_64(0xffffffffffff0101UL, x11); |
580 ASSERT_EQUAL_64(0xfffffffe00020201UL, x12); | 580 CHECK_EQUAL_64(0xfffffffe00020201UL, x12); |
581 ASSERT_EQUAL_64(0x0000000400040401UL, x13); | 581 CHECK_EQUAL_64(0x0000000400040401UL, x13); |
582 | 582 |
583 TEARDOWN(); | 583 TEARDOWN(); |
584 } | 584 } |
585 | 585 |
586 | 586 |
587 TEST(bitwise_wide_imm) { | 587 TEST(bitwise_wide_imm) { |
588 INIT_V8(); | 588 INIT_V8(); |
589 SETUP(); | 589 SETUP(); |
590 | 590 |
591 START(); | 591 START(); |
592 __ Mov(x0, 0); | 592 __ Mov(x0, 0); |
593 __ Mov(x1, 0xf0f0f0f0f0f0f0f0UL); | 593 __ Mov(x1, 0xf0f0f0f0f0f0f0f0UL); |
594 | 594 |
595 __ Orr(x10, x0, Operand(0x1234567890abcdefUL)); | 595 __ Orr(x10, x0, Operand(0x1234567890abcdefUL)); |
596 __ Orr(w11, w1, Operand(0x90abcdef)); | 596 __ Orr(w11, w1, Operand(0x90abcdef)); |
597 | 597 |
598 __ Orr(w12, w0, kWMinInt); | 598 __ Orr(w12, w0, kWMinInt); |
599 __ Eor(w13, w0, kWMinInt); | 599 __ Eor(w13, w0, kWMinInt); |
600 END(); | 600 END(); |
601 | 601 |
602 RUN(); | 602 RUN(); |
603 | 603 |
604 ASSERT_EQUAL_64(0, x0); | 604 CHECK_EQUAL_64(0, x0); |
605 ASSERT_EQUAL_64(0xf0f0f0f0f0f0f0f0UL, x1); | 605 CHECK_EQUAL_64(0xf0f0f0f0f0f0f0f0UL, x1); |
606 ASSERT_EQUAL_64(0x1234567890abcdefUL, x10); | 606 CHECK_EQUAL_64(0x1234567890abcdefUL, x10); |
607 ASSERT_EQUAL_64(0xf0fbfdffUL, x11); | 607 CHECK_EQUAL_64(0xf0fbfdffUL, x11); |
608 ASSERT_EQUAL_32(kWMinInt, w12); | 608 CHECK_EQUAL_32(kWMinInt, w12); |
609 ASSERT_EQUAL_32(kWMinInt, w13); | 609 CHECK_EQUAL_32(kWMinInt, w13); |
610 | 610 |
611 TEARDOWN(); | 611 TEARDOWN(); |
612 } | 612 } |
613 | 613 |
614 | 614 |
615 TEST(orn) { | 615 TEST(orn) { |
616 INIT_V8(); | 616 INIT_V8(); |
617 SETUP(); | 617 SETUP(); |
618 | 618 |
619 START(); | 619 START(); |
620 __ Mov(x0, 0xf0f0); | 620 __ Mov(x0, 0xf0f0); |
621 __ Mov(x1, 0xf00000ff); | 621 __ Mov(x1, 0xf00000ff); |
622 | 622 |
623 __ Orn(x2, x0, Operand(x1)); | 623 __ Orn(x2, x0, Operand(x1)); |
624 __ Orn(w3, w0, Operand(w1, LSL, 4)); | 624 __ Orn(w3, w0, Operand(w1, LSL, 4)); |
625 __ Orn(x4, x0, Operand(x1, LSL, 4)); | 625 __ Orn(x4, x0, Operand(x1, LSL, 4)); |
626 __ Orn(x5, x0, Operand(x1, LSR, 1)); | 626 __ Orn(x5, x0, Operand(x1, LSR, 1)); |
627 __ Orn(w6, w0, Operand(w1, ASR, 1)); | 627 __ Orn(w6, w0, Operand(w1, ASR, 1)); |
628 __ Orn(x7, x0, Operand(x1, ASR, 1)); | 628 __ Orn(x7, x0, Operand(x1, ASR, 1)); |
629 __ Orn(w8, w0, Operand(w1, ROR, 16)); | 629 __ Orn(w8, w0, Operand(w1, ROR, 16)); |
630 __ Orn(x9, x0, Operand(x1, ROR, 16)); | 630 __ Orn(x9, x0, Operand(x1, ROR, 16)); |
631 __ Orn(w10, w0, Operand(0xffff)); | 631 __ Orn(w10, w0, Operand(0xffff)); |
632 __ Orn(x11, x0, Operand(0xffff0000ffffL)); | 632 __ Orn(x11, x0, Operand(0xffff0000ffffL)); |
633 END(); | 633 END(); |
634 | 634 |
635 RUN(); | 635 RUN(); |
636 | 636 |
637 ASSERT_EQUAL_64(0xffffffff0ffffff0L, x2); | 637 CHECK_EQUAL_64(0xffffffff0ffffff0L, x2); |
638 ASSERT_EQUAL_64(0xfffff0ff, x3); | 638 CHECK_EQUAL_64(0xfffff0ff, x3); |
639 ASSERT_EQUAL_64(0xfffffff0fffff0ffL, x4); | 639 CHECK_EQUAL_64(0xfffffff0fffff0ffL, x4); |
640 ASSERT_EQUAL_64(0xffffffff87fffff0L, x5); | 640 CHECK_EQUAL_64(0xffffffff87fffff0L, x5); |
641 ASSERT_EQUAL_64(0x07fffff0, x6); | 641 CHECK_EQUAL_64(0x07fffff0, x6); |
642 ASSERT_EQUAL_64(0xffffffff87fffff0L, x7); | 642 CHECK_EQUAL_64(0xffffffff87fffff0L, x7); |
643 ASSERT_EQUAL_64(0xff00ffff, x8); | 643 CHECK_EQUAL_64(0xff00ffff, x8); |
644 ASSERT_EQUAL_64(0xff00ffffffffffffL, x9); | 644 CHECK_EQUAL_64(0xff00ffffffffffffL, x9); |
645 ASSERT_EQUAL_64(0xfffff0f0, x10); | 645 CHECK_EQUAL_64(0xfffff0f0, x10); |
646 ASSERT_EQUAL_64(0xffff0000fffff0f0L, x11); | 646 CHECK_EQUAL_64(0xffff0000fffff0f0L, x11); |
647 | 647 |
648 TEARDOWN(); | 648 TEARDOWN(); |
649 } | 649 } |
650 | 650 |
651 | 651 |
652 TEST(orn_extend) { | 652 TEST(orn_extend) { |
653 INIT_V8(); | 653 INIT_V8(); |
654 SETUP(); | 654 SETUP(); |
655 | 655 |
656 START(); | 656 START(); |
657 __ Mov(x0, 1); | 657 __ Mov(x0, 1); |
658 __ Mov(x1, 0x8000000080008081UL); | 658 __ Mov(x1, 0x8000000080008081UL); |
659 __ Orn(w6, w0, Operand(w1, UXTB)); | 659 __ Orn(w6, w0, Operand(w1, UXTB)); |
660 __ Orn(x7, x0, Operand(x1, UXTH, 1)); | 660 __ Orn(x7, x0, Operand(x1, UXTH, 1)); |
661 __ Orn(w8, w0, Operand(w1, UXTW, 2)); | 661 __ Orn(w8, w0, Operand(w1, UXTW, 2)); |
662 __ Orn(x9, x0, Operand(x1, UXTX, 3)); | 662 __ Orn(x9, x0, Operand(x1, UXTX, 3)); |
663 __ Orn(w10, w0, Operand(w1, SXTB)); | 663 __ Orn(w10, w0, Operand(w1, SXTB)); |
664 __ Orn(x11, x0, Operand(x1, SXTH, 1)); | 664 __ Orn(x11, x0, Operand(x1, SXTH, 1)); |
665 __ Orn(x12, x0, Operand(x1, SXTW, 2)); | 665 __ Orn(x12, x0, Operand(x1, SXTW, 2)); |
666 __ Orn(x13, x0, Operand(x1, SXTX, 3)); | 666 __ Orn(x13, x0, Operand(x1, SXTX, 3)); |
667 END(); | 667 END(); |
668 | 668 |
669 RUN(); | 669 RUN(); |
670 | 670 |
671 ASSERT_EQUAL_64(0xffffff7f, x6); | 671 CHECK_EQUAL_64(0xffffff7f, x6); |
672 ASSERT_EQUAL_64(0xfffffffffffefefdUL, x7); | 672 CHECK_EQUAL_64(0xfffffffffffefefdUL, x7); |
673 ASSERT_EQUAL_64(0xfffdfdfb, x8); | 673 CHECK_EQUAL_64(0xfffdfdfb, x8); |
674 ASSERT_EQUAL_64(0xfffffffbfffbfbf7UL, x9); | 674 CHECK_EQUAL_64(0xfffffffbfffbfbf7UL, x9); |
675 ASSERT_EQUAL_64(0x0000007f, x10); | 675 CHECK_EQUAL_64(0x0000007f, x10); |
676 ASSERT_EQUAL_64(0x0000fefd, x11); | 676 CHECK_EQUAL_64(0x0000fefd, x11); |
677 ASSERT_EQUAL_64(0x00000001fffdfdfbUL, x12); | 677 CHECK_EQUAL_64(0x00000001fffdfdfbUL, x12); |
678 ASSERT_EQUAL_64(0xfffffffbfffbfbf7UL, x13); | 678 CHECK_EQUAL_64(0xfffffffbfffbfbf7UL, x13); |
679 | 679 |
680 TEARDOWN(); | 680 TEARDOWN(); |
681 } | 681 } |
682 | 682 |
683 | 683 |
684 TEST(and_) { | 684 TEST(and_) { |
685 INIT_V8(); | 685 INIT_V8(); |
686 SETUP(); | 686 SETUP(); |
687 | 687 |
688 START(); | 688 START(); |
689 __ Mov(x0, 0xfff0); | 689 __ Mov(x0, 0xfff0); |
690 __ Mov(x1, 0xf00000ff); | 690 __ Mov(x1, 0xf00000ff); |
691 | 691 |
692 __ And(x2, x0, Operand(x1)); | 692 __ And(x2, x0, Operand(x1)); |
693 __ And(w3, w0, Operand(w1, LSL, 4)); | 693 __ And(w3, w0, Operand(w1, LSL, 4)); |
694 __ And(x4, x0, Operand(x1, LSL, 4)); | 694 __ And(x4, x0, Operand(x1, LSL, 4)); |
695 __ And(x5, x0, Operand(x1, LSR, 1)); | 695 __ And(x5, x0, Operand(x1, LSR, 1)); |
696 __ And(w6, w0, Operand(w1, ASR, 20)); | 696 __ And(w6, w0, Operand(w1, ASR, 20)); |
697 __ And(x7, x0, Operand(x1, ASR, 20)); | 697 __ And(x7, x0, Operand(x1, ASR, 20)); |
698 __ And(w8, w0, Operand(w1, ROR, 28)); | 698 __ And(w8, w0, Operand(w1, ROR, 28)); |
699 __ And(x9, x0, Operand(x1, ROR, 28)); | 699 __ And(x9, x0, Operand(x1, ROR, 28)); |
700 __ And(w10, w0, Operand(0xff00)); | 700 __ And(w10, w0, Operand(0xff00)); |
701 __ And(x11, x0, Operand(0xff)); | 701 __ And(x11, x0, Operand(0xff)); |
702 END(); | 702 END(); |
703 | 703 |
704 RUN(); | 704 RUN(); |
705 | 705 |
706 ASSERT_EQUAL_64(0x000000f0, x2); | 706 CHECK_EQUAL_64(0x000000f0, x2); |
707 ASSERT_EQUAL_64(0x00000ff0, x3); | 707 CHECK_EQUAL_64(0x00000ff0, x3); |
708 ASSERT_EQUAL_64(0x00000ff0, x4); | 708 CHECK_EQUAL_64(0x00000ff0, x4); |
709 ASSERT_EQUAL_64(0x00000070, x5); | 709 CHECK_EQUAL_64(0x00000070, x5); |
710 ASSERT_EQUAL_64(0x0000ff00, x6); | 710 CHECK_EQUAL_64(0x0000ff00, x6); |
711 ASSERT_EQUAL_64(0x00000f00, x7); | 711 CHECK_EQUAL_64(0x00000f00, x7); |
712 ASSERT_EQUAL_64(0x00000ff0, x8); | 712 CHECK_EQUAL_64(0x00000ff0, x8); |
713 ASSERT_EQUAL_64(0x00000000, x9); | 713 CHECK_EQUAL_64(0x00000000, x9); |
714 ASSERT_EQUAL_64(0x0000ff00, x10); | 714 CHECK_EQUAL_64(0x0000ff00, x10); |
715 ASSERT_EQUAL_64(0x000000f0, x11); | 715 CHECK_EQUAL_64(0x000000f0, x11); |
716 | 716 |
717 TEARDOWN(); | 717 TEARDOWN(); |
718 } | 718 } |
719 | 719 |
720 | 720 |
721 TEST(and_extend) { | 721 TEST(and_extend) { |
722 INIT_V8(); | 722 INIT_V8(); |
723 SETUP(); | 723 SETUP(); |
724 | 724 |
725 START(); | 725 START(); |
726 __ Mov(x0, 0xffffffffffffffffUL); | 726 __ Mov(x0, 0xffffffffffffffffUL); |
727 __ Mov(x1, 0x8000000080008081UL); | 727 __ Mov(x1, 0x8000000080008081UL); |
728 __ And(w6, w0, Operand(w1, UXTB)); | 728 __ And(w6, w0, Operand(w1, UXTB)); |
729 __ And(x7, x0, Operand(x1, UXTH, 1)); | 729 __ And(x7, x0, Operand(x1, UXTH, 1)); |
730 __ And(w8, w0, Operand(w1, UXTW, 2)); | 730 __ And(w8, w0, Operand(w1, UXTW, 2)); |
731 __ And(x9, x0, Operand(x1, UXTX, 3)); | 731 __ And(x9, x0, Operand(x1, UXTX, 3)); |
732 __ And(w10, w0, Operand(w1, SXTB)); | 732 __ And(w10, w0, Operand(w1, SXTB)); |
733 __ And(x11, x0, Operand(x1, SXTH, 1)); | 733 __ And(x11, x0, Operand(x1, SXTH, 1)); |
734 __ And(x12, x0, Operand(x1, SXTW, 2)); | 734 __ And(x12, x0, Operand(x1, SXTW, 2)); |
735 __ And(x13, x0, Operand(x1, SXTX, 3)); | 735 __ And(x13, x0, Operand(x1, SXTX, 3)); |
736 END(); | 736 END(); |
737 | 737 |
738 RUN(); | 738 RUN(); |
739 | 739 |
740 ASSERT_EQUAL_64(0x00000081, x6); | 740 CHECK_EQUAL_64(0x00000081, x6); |
741 ASSERT_EQUAL_64(0x00010102, x7); | 741 CHECK_EQUAL_64(0x00010102, x7); |
742 ASSERT_EQUAL_64(0x00020204, x8); | 742 CHECK_EQUAL_64(0x00020204, x8); |
743 ASSERT_EQUAL_64(0x0000000400040408UL, x9); | 743 CHECK_EQUAL_64(0x0000000400040408UL, x9); |
744 ASSERT_EQUAL_64(0xffffff81, x10); | 744 CHECK_EQUAL_64(0xffffff81, x10); |
745 ASSERT_EQUAL_64(0xffffffffffff0102UL, x11); | 745 CHECK_EQUAL_64(0xffffffffffff0102UL, x11); |
746 ASSERT_EQUAL_64(0xfffffffe00020204UL, x12); | 746 CHECK_EQUAL_64(0xfffffffe00020204UL, x12); |
747 ASSERT_EQUAL_64(0x0000000400040408UL, x13); | 747 CHECK_EQUAL_64(0x0000000400040408UL, x13); |
748 | 748 |
749 TEARDOWN(); | 749 TEARDOWN(); |
750 } | 750 } |
751 | 751 |
752 | 752 |
753 TEST(ands) { | 753 TEST(ands) { |
754 INIT_V8(); | 754 INIT_V8(); |
755 SETUP(); | 755 SETUP(); |
756 | 756 |
757 START(); | 757 START(); |
758 __ Mov(x1, 0xf00000ff); | 758 __ Mov(x1, 0xf00000ff); |
759 __ Ands(w0, w1, Operand(w1)); | 759 __ Ands(w0, w1, Operand(w1)); |
760 END(); | 760 END(); |
761 | 761 |
762 RUN(); | 762 RUN(); |
763 | 763 |
764 ASSERT_EQUAL_NZCV(NFlag); | 764 CHECK_EQUAL_NZCV(NFlag); |
765 ASSERT_EQUAL_64(0xf00000ff, x0); | 765 CHECK_EQUAL_64(0xf00000ff, x0); |
766 | 766 |
767 START(); | 767 START(); |
768 __ Mov(x0, 0xfff0); | 768 __ Mov(x0, 0xfff0); |
769 __ Mov(x1, 0xf00000ff); | 769 __ Mov(x1, 0xf00000ff); |
770 __ Ands(w0, w0, Operand(w1, LSR, 4)); | 770 __ Ands(w0, w0, Operand(w1, LSR, 4)); |
771 END(); | 771 END(); |
772 | 772 |
773 RUN(); | 773 RUN(); |
774 | 774 |
775 ASSERT_EQUAL_NZCV(ZFlag); | 775 CHECK_EQUAL_NZCV(ZFlag); |
776 ASSERT_EQUAL_64(0x00000000, x0); | 776 CHECK_EQUAL_64(0x00000000, x0); |
777 | 777 |
778 START(); | 778 START(); |
779 __ Mov(x0, 0x8000000000000000L); | 779 __ Mov(x0, 0x8000000000000000L); |
780 __ Mov(x1, 0x00000001); | 780 __ Mov(x1, 0x00000001); |
781 __ Ands(x0, x0, Operand(x1, ROR, 1)); | 781 __ Ands(x0, x0, Operand(x1, ROR, 1)); |
782 END(); | 782 END(); |
783 | 783 |
784 RUN(); | 784 RUN(); |
785 | 785 |
786 ASSERT_EQUAL_NZCV(NFlag); | 786 CHECK_EQUAL_NZCV(NFlag); |
787 ASSERT_EQUAL_64(0x8000000000000000L, x0); | 787 CHECK_EQUAL_64(0x8000000000000000L, x0); |
788 | 788 |
789 START(); | 789 START(); |
790 __ Mov(x0, 0xfff0); | 790 __ Mov(x0, 0xfff0); |
791 __ Ands(w0, w0, Operand(0xf)); | 791 __ Ands(w0, w0, Operand(0xf)); |
792 END(); | 792 END(); |
793 | 793 |
794 RUN(); | 794 RUN(); |
795 | 795 |
796 ASSERT_EQUAL_NZCV(ZFlag); | 796 CHECK_EQUAL_NZCV(ZFlag); |
797 ASSERT_EQUAL_64(0x00000000, x0); | 797 CHECK_EQUAL_64(0x00000000, x0); |
798 | 798 |
799 START(); | 799 START(); |
800 __ Mov(x0, 0xff000000); | 800 __ Mov(x0, 0xff000000); |
801 __ Ands(w0, w0, Operand(0x80000000)); | 801 __ Ands(w0, w0, Operand(0x80000000)); |
802 END(); | 802 END(); |
803 | 803 |
804 RUN(); | 804 RUN(); |
805 | 805 |
806 ASSERT_EQUAL_NZCV(NFlag); | 806 CHECK_EQUAL_NZCV(NFlag); |
807 ASSERT_EQUAL_64(0x80000000, x0); | 807 CHECK_EQUAL_64(0x80000000, x0); |
808 | 808 |
809 TEARDOWN(); | 809 TEARDOWN(); |
810 } | 810 } |
811 | 811 |
812 | 812 |
813 TEST(bic) { | 813 TEST(bic) { |
814 INIT_V8(); | 814 INIT_V8(); |
815 SETUP(); | 815 SETUP(); |
816 | 816 |
817 START(); | 817 START(); |
(...skipping 17 matching lines...) Expand all Loading... |
835 // test infrastructure requires that csp be restored to its original value. | 835 // test infrastructure requires that csp be restored to its original value. |
836 __ Mov(x20, csp); | 836 __ Mov(x20, csp); |
837 __ Mov(x0, 0xffffff); | 837 __ Mov(x0, 0xffffff); |
838 __ Bic(csp, x0, Operand(0xabcdef)); | 838 __ Bic(csp, x0, Operand(0xabcdef)); |
839 __ Mov(x21, csp); | 839 __ Mov(x21, csp); |
840 __ Mov(csp, x20); | 840 __ Mov(csp, x20); |
841 END(); | 841 END(); |
842 | 842 |
843 RUN(); | 843 RUN(); |
844 | 844 |
845 ASSERT_EQUAL_64(0x0000ff00, x2); | 845 CHECK_EQUAL_64(0x0000ff00, x2); |
846 ASSERT_EQUAL_64(0x0000f000, x3); | 846 CHECK_EQUAL_64(0x0000f000, x3); |
847 ASSERT_EQUAL_64(0x0000f000, x4); | 847 CHECK_EQUAL_64(0x0000f000, x4); |
848 ASSERT_EQUAL_64(0x0000ff80, x5); | 848 CHECK_EQUAL_64(0x0000ff80, x5); |
849 ASSERT_EQUAL_64(0x000000f0, x6); | 849 CHECK_EQUAL_64(0x000000f0, x6); |
850 ASSERT_EQUAL_64(0x0000f0f0, x7); | 850 CHECK_EQUAL_64(0x0000f0f0, x7); |
851 ASSERT_EQUAL_64(0x0000f000, x8); | 851 CHECK_EQUAL_64(0x0000f000, x8); |
852 ASSERT_EQUAL_64(0x0000ff00, x9); | 852 CHECK_EQUAL_64(0x0000ff00, x9); |
853 ASSERT_EQUAL_64(0x0000ffe0, x10); | 853 CHECK_EQUAL_64(0x0000ffe0, x10); |
854 ASSERT_EQUAL_64(0x0000fef0, x11); | 854 CHECK_EQUAL_64(0x0000fef0, x11); |
855 | 855 |
856 ASSERT_EQUAL_64(0x543210, x21); | 856 CHECK_EQUAL_64(0x543210, x21); |
857 | 857 |
858 TEARDOWN(); | 858 TEARDOWN(); |
859 } | 859 } |
860 | 860 |
861 | 861 |
862 TEST(bic_extend) { | 862 TEST(bic_extend) { |
863 INIT_V8(); | 863 INIT_V8(); |
864 SETUP(); | 864 SETUP(); |
865 | 865 |
866 START(); | 866 START(); |
867 __ Mov(x0, 0xffffffffffffffffUL); | 867 __ Mov(x0, 0xffffffffffffffffUL); |
868 __ Mov(x1, 0x8000000080008081UL); | 868 __ Mov(x1, 0x8000000080008081UL); |
869 __ Bic(w6, w0, Operand(w1, UXTB)); | 869 __ Bic(w6, w0, Operand(w1, UXTB)); |
870 __ Bic(x7, x0, Operand(x1, UXTH, 1)); | 870 __ Bic(x7, x0, Operand(x1, UXTH, 1)); |
871 __ Bic(w8, w0, Operand(w1, UXTW, 2)); | 871 __ Bic(w8, w0, Operand(w1, UXTW, 2)); |
872 __ Bic(x9, x0, Operand(x1, UXTX, 3)); | 872 __ Bic(x9, x0, Operand(x1, UXTX, 3)); |
873 __ Bic(w10, w0, Operand(w1, SXTB)); | 873 __ Bic(w10, w0, Operand(w1, SXTB)); |
874 __ Bic(x11, x0, Operand(x1, SXTH, 1)); | 874 __ Bic(x11, x0, Operand(x1, SXTH, 1)); |
875 __ Bic(x12, x0, Operand(x1, SXTW, 2)); | 875 __ Bic(x12, x0, Operand(x1, SXTW, 2)); |
876 __ Bic(x13, x0, Operand(x1, SXTX, 3)); | 876 __ Bic(x13, x0, Operand(x1, SXTX, 3)); |
877 END(); | 877 END(); |
878 | 878 |
879 RUN(); | 879 RUN(); |
880 | 880 |
881 ASSERT_EQUAL_64(0xffffff7e, x6); | 881 CHECK_EQUAL_64(0xffffff7e, x6); |
882 ASSERT_EQUAL_64(0xfffffffffffefefdUL, x7); | 882 CHECK_EQUAL_64(0xfffffffffffefefdUL, x7); |
883 ASSERT_EQUAL_64(0xfffdfdfb, x8); | 883 CHECK_EQUAL_64(0xfffdfdfb, x8); |
884 ASSERT_EQUAL_64(0xfffffffbfffbfbf7UL, x9); | 884 CHECK_EQUAL_64(0xfffffffbfffbfbf7UL, x9); |
885 ASSERT_EQUAL_64(0x0000007e, x10); | 885 CHECK_EQUAL_64(0x0000007e, x10); |
886 ASSERT_EQUAL_64(0x0000fefd, x11); | 886 CHECK_EQUAL_64(0x0000fefd, x11); |
887 ASSERT_EQUAL_64(0x00000001fffdfdfbUL, x12); | 887 CHECK_EQUAL_64(0x00000001fffdfdfbUL, x12); |
888 ASSERT_EQUAL_64(0xfffffffbfffbfbf7UL, x13); | 888 CHECK_EQUAL_64(0xfffffffbfffbfbf7UL, x13); |
889 | 889 |
890 TEARDOWN(); | 890 TEARDOWN(); |
891 } | 891 } |
892 | 892 |
893 | 893 |
894 TEST(bics) { | 894 TEST(bics) { |
895 INIT_V8(); | 895 INIT_V8(); |
896 SETUP(); | 896 SETUP(); |
897 | 897 |
898 START(); | 898 START(); |
899 __ Mov(x1, 0xffff); | 899 __ Mov(x1, 0xffff); |
900 __ Bics(w0, w1, Operand(w1)); | 900 __ Bics(w0, w1, Operand(w1)); |
901 END(); | 901 END(); |
902 | 902 |
903 RUN(); | 903 RUN(); |
904 | 904 |
905 ASSERT_EQUAL_NZCV(ZFlag); | 905 CHECK_EQUAL_NZCV(ZFlag); |
906 ASSERT_EQUAL_64(0x00000000, x0); | 906 CHECK_EQUAL_64(0x00000000, x0); |
907 | 907 |
908 START(); | 908 START(); |
909 __ Mov(x0, 0xffffffff); | 909 __ Mov(x0, 0xffffffff); |
910 __ Bics(w0, w0, Operand(w0, LSR, 1)); | 910 __ Bics(w0, w0, Operand(w0, LSR, 1)); |
911 END(); | 911 END(); |
912 | 912 |
913 RUN(); | 913 RUN(); |
914 | 914 |
915 ASSERT_EQUAL_NZCV(NFlag); | 915 CHECK_EQUAL_NZCV(NFlag); |
916 ASSERT_EQUAL_64(0x80000000, x0); | 916 CHECK_EQUAL_64(0x80000000, x0); |
917 | 917 |
918 START(); | 918 START(); |
919 __ Mov(x0, 0x8000000000000000L); | 919 __ Mov(x0, 0x8000000000000000L); |
920 __ Mov(x1, 0x00000001); | 920 __ Mov(x1, 0x00000001); |
921 __ Bics(x0, x0, Operand(x1, ROR, 1)); | 921 __ Bics(x0, x0, Operand(x1, ROR, 1)); |
922 END(); | 922 END(); |
923 | 923 |
924 RUN(); | 924 RUN(); |
925 | 925 |
926 ASSERT_EQUAL_NZCV(ZFlag); | 926 CHECK_EQUAL_NZCV(ZFlag); |
927 ASSERT_EQUAL_64(0x00000000, x0); | 927 CHECK_EQUAL_64(0x00000000, x0); |
928 | 928 |
929 START(); | 929 START(); |
930 __ Mov(x0, 0xffffffffffffffffL); | 930 __ Mov(x0, 0xffffffffffffffffL); |
931 __ Bics(x0, x0, Operand(0x7fffffffffffffffL)); | 931 __ Bics(x0, x0, Operand(0x7fffffffffffffffL)); |
932 END(); | 932 END(); |
933 | 933 |
934 RUN(); | 934 RUN(); |
935 | 935 |
936 ASSERT_EQUAL_NZCV(NFlag); | 936 CHECK_EQUAL_NZCV(NFlag); |
937 ASSERT_EQUAL_64(0x8000000000000000L, x0); | 937 CHECK_EQUAL_64(0x8000000000000000L, x0); |
938 | 938 |
939 START(); | 939 START(); |
940 __ Mov(w0, 0xffff0000); | 940 __ Mov(w0, 0xffff0000); |
941 __ Bics(w0, w0, Operand(0xfffffff0)); | 941 __ Bics(w0, w0, Operand(0xfffffff0)); |
942 END(); | 942 END(); |
943 | 943 |
944 RUN(); | 944 RUN(); |
945 | 945 |
946 ASSERT_EQUAL_NZCV(ZFlag); | 946 CHECK_EQUAL_NZCV(ZFlag); |
947 ASSERT_EQUAL_64(0x00000000, x0); | 947 CHECK_EQUAL_64(0x00000000, x0); |
948 | 948 |
949 TEARDOWN(); | 949 TEARDOWN(); |
950 } | 950 } |
951 | 951 |
952 | 952 |
953 TEST(eor) { | 953 TEST(eor) { |
954 INIT_V8(); | 954 INIT_V8(); |
955 SETUP(); | 955 SETUP(); |
956 | 956 |
957 START(); | 957 START(); |
958 __ Mov(x0, 0xfff0); | 958 __ Mov(x0, 0xfff0); |
959 __ Mov(x1, 0xf00000ff); | 959 __ Mov(x1, 0xf00000ff); |
960 | 960 |
961 __ Eor(x2, x0, Operand(x1)); | 961 __ Eor(x2, x0, Operand(x1)); |
962 __ Eor(w3, w0, Operand(w1, LSL, 4)); | 962 __ Eor(w3, w0, Operand(w1, LSL, 4)); |
963 __ Eor(x4, x0, Operand(x1, LSL, 4)); | 963 __ Eor(x4, x0, Operand(x1, LSL, 4)); |
964 __ Eor(x5, x0, Operand(x1, LSR, 1)); | 964 __ Eor(x5, x0, Operand(x1, LSR, 1)); |
965 __ Eor(w6, w0, Operand(w1, ASR, 20)); | 965 __ Eor(w6, w0, Operand(w1, ASR, 20)); |
966 __ Eor(x7, x0, Operand(x1, ASR, 20)); | 966 __ Eor(x7, x0, Operand(x1, ASR, 20)); |
967 __ Eor(w8, w0, Operand(w1, ROR, 28)); | 967 __ Eor(w8, w0, Operand(w1, ROR, 28)); |
968 __ Eor(x9, x0, Operand(x1, ROR, 28)); | 968 __ Eor(x9, x0, Operand(x1, ROR, 28)); |
969 __ Eor(w10, w0, Operand(0xff00ff00)); | 969 __ Eor(w10, w0, Operand(0xff00ff00)); |
970 __ Eor(x11, x0, Operand(0xff00ff00ff00ff00L)); | 970 __ Eor(x11, x0, Operand(0xff00ff00ff00ff00L)); |
971 END(); | 971 END(); |
972 | 972 |
973 RUN(); | 973 RUN(); |
974 | 974 |
975 ASSERT_EQUAL_64(0xf000ff0f, x2); | 975 CHECK_EQUAL_64(0xf000ff0f, x2); |
976 ASSERT_EQUAL_64(0x0000f000, x3); | 976 CHECK_EQUAL_64(0x0000f000, x3); |
977 ASSERT_EQUAL_64(0x0000000f0000f000L, x4); | 977 CHECK_EQUAL_64(0x0000000f0000f000L, x4); |
978 ASSERT_EQUAL_64(0x7800ff8f, x5); | 978 CHECK_EQUAL_64(0x7800ff8f, x5); |
979 ASSERT_EQUAL_64(0xffff00f0, x6); | 979 CHECK_EQUAL_64(0xffff00f0, x6); |
980 ASSERT_EQUAL_64(0x0000f0f0, x7); | 980 CHECK_EQUAL_64(0x0000f0f0, x7); |
981 ASSERT_EQUAL_64(0x0000f00f, x8); | 981 CHECK_EQUAL_64(0x0000f00f, x8); |
982 ASSERT_EQUAL_64(0x00000ff00000ffffL, x9); | 982 CHECK_EQUAL_64(0x00000ff00000ffffL, x9); |
983 ASSERT_EQUAL_64(0xff0000f0, x10); | 983 CHECK_EQUAL_64(0xff0000f0, x10); |
984 ASSERT_EQUAL_64(0xff00ff00ff0000f0L, x11); | 984 CHECK_EQUAL_64(0xff00ff00ff0000f0L, x11); |
985 | 985 |
986 TEARDOWN(); | 986 TEARDOWN(); |
987 } | 987 } |
988 | 988 |
989 | 989 |
990 TEST(eor_extend) { | 990 TEST(eor_extend) { |
991 INIT_V8(); | 991 INIT_V8(); |
992 SETUP(); | 992 SETUP(); |
993 | 993 |
994 START(); | 994 START(); |
995 __ Mov(x0, 0x1111111111111111UL); | 995 __ Mov(x0, 0x1111111111111111UL); |
996 __ Mov(x1, 0x8000000080008081UL); | 996 __ Mov(x1, 0x8000000080008081UL); |
997 __ Eor(w6, w0, Operand(w1, UXTB)); | 997 __ Eor(w6, w0, Operand(w1, UXTB)); |
998 __ Eor(x7, x0, Operand(x1, UXTH, 1)); | 998 __ Eor(x7, x0, Operand(x1, UXTH, 1)); |
999 __ Eor(w8, w0, Operand(w1, UXTW, 2)); | 999 __ Eor(w8, w0, Operand(w1, UXTW, 2)); |
1000 __ Eor(x9, x0, Operand(x1, UXTX, 3)); | 1000 __ Eor(x9, x0, Operand(x1, UXTX, 3)); |
1001 __ Eor(w10, w0, Operand(w1, SXTB)); | 1001 __ Eor(w10, w0, Operand(w1, SXTB)); |
1002 __ Eor(x11, x0, Operand(x1, SXTH, 1)); | 1002 __ Eor(x11, x0, Operand(x1, SXTH, 1)); |
1003 __ Eor(x12, x0, Operand(x1, SXTW, 2)); | 1003 __ Eor(x12, x0, Operand(x1, SXTW, 2)); |
1004 __ Eor(x13, x0, Operand(x1, SXTX, 3)); | 1004 __ Eor(x13, x0, Operand(x1, SXTX, 3)); |
1005 END(); | 1005 END(); |
1006 | 1006 |
1007 RUN(); | 1007 RUN(); |
1008 | 1008 |
1009 ASSERT_EQUAL_64(0x11111190, x6); | 1009 CHECK_EQUAL_64(0x11111190, x6); |
1010 ASSERT_EQUAL_64(0x1111111111101013UL, x7); | 1010 CHECK_EQUAL_64(0x1111111111101013UL, x7); |
1011 ASSERT_EQUAL_64(0x11131315, x8); | 1011 CHECK_EQUAL_64(0x11131315, x8); |
1012 ASSERT_EQUAL_64(0x1111111511151519UL, x9); | 1012 CHECK_EQUAL_64(0x1111111511151519UL, x9); |
1013 ASSERT_EQUAL_64(0xeeeeee90, x10); | 1013 CHECK_EQUAL_64(0xeeeeee90, x10); |
1014 ASSERT_EQUAL_64(0xeeeeeeeeeeee1013UL, x11); | 1014 CHECK_EQUAL_64(0xeeeeeeeeeeee1013UL, x11); |
1015 ASSERT_EQUAL_64(0xeeeeeeef11131315UL, x12); | 1015 CHECK_EQUAL_64(0xeeeeeeef11131315UL, x12); |
1016 ASSERT_EQUAL_64(0x1111111511151519UL, x13); | 1016 CHECK_EQUAL_64(0x1111111511151519UL, x13); |
1017 | 1017 |
1018 TEARDOWN(); | 1018 TEARDOWN(); |
1019 } | 1019 } |
1020 | 1020 |
1021 | 1021 |
1022 TEST(eon) { | 1022 TEST(eon) { |
1023 INIT_V8(); | 1023 INIT_V8(); |
1024 SETUP(); | 1024 SETUP(); |
1025 | 1025 |
1026 START(); | 1026 START(); |
1027 __ Mov(x0, 0xfff0); | 1027 __ Mov(x0, 0xfff0); |
1028 __ Mov(x1, 0xf00000ff); | 1028 __ Mov(x1, 0xf00000ff); |
1029 | 1029 |
1030 __ Eon(x2, x0, Operand(x1)); | 1030 __ Eon(x2, x0, Operand(x1)); |
1031 __ Eon(w3, w0, Operand(w1, LSL, 4)); | 1031 __ Eon(w3, w0, Operand(w1, LSL, 4)); |
1032 __ Eon(x4, x0, Operand(x1, LSL, 4)); | 1032 __ Eon(x4, x0, Operand(x1, LSL, 4)); |
1033 __ Eon(x5, x0, Operand(x1, LSR, 1)); | 1033 __ Eon(x5, x0, Operand(x1, LSR, 1)); |
1034 __ Eon(w6, w0, Operand(w1, ASR, 20)); | 1034 __ Eon(w6, w0, Operand(w1, ASR, 20)); |
1035 __ Eon(x7, x0, Operand(x1, ASR, 20)); | 1035 __ Eon(x7, x0, Operand(x1, ASR, 20)); |
1036 __ Eon(w8, w0, Operand(w1, ROR, 28)); | 1036 __ Eon(w8, w0, Operand(w1, ROR, 28)); |
1037 __ Eon(x9, x0, Operand(x1, ROR, 28)); | 1037 __ Eon(x9, x0, Operand(x1, ROR, 28)); |
1038 __ Eon(w10, w0, Operand(0x03c003c0)); | 1038 __ Eon(w10, w0, Operand(0x03c003c0)); |
1039 __ Eon(x11, x0, Operand(0x0000100000001000L)); | 1039 __ Eon(x11, x0, Operand(0x0000100000001000L)); |
1040 END(); | 1040 END(); |
1041 | 1041 |
1042 RUN(); | 1042 RUN(); |
1043 | 1043 |
1044 ASSERT_EQUAL_64(0xffffffff0fff00f0L, x2); | 1044 CHECK_EQUAL_64(0xffffffff0fff00f0L, x2); |
1045 ASSERT_EQUAL_64(0xffff0fff, x3); | 1045 CHECK_EQUAL_64(0xffff0fff, x3); |
1046 ASSERT_EQUAL_64(0xfffffff0ffff0fffL, x4); | 1046 CHECK_EQUAL_64(0xfffffff0ffff0fffL, x4); |
1047 ASSERT_EQUAL_64(0xffffffff87ff0070L, x5); | 1047 CHECK_EQUAL_64(0xffffffff87ff0070L, x5); |
1048 ASSERT_EQUAL_64(0x0000ff0f, x6); | 1048 CHECK_EQUAL_64(0x0000ff0f, x6); |
1049 ASSERT_EQUAL_64(0xffffffffffff0f0fL, x7); | 1049 CHECK_EQUAL_64(0xffffffffffff0f0fL, x7); |
1050 ASSERT_EQUAL_64(0xffff0ff0, x8); | 1050 CHECK_EQUAL_64(0xffff0ff0, x8); |
1051 ASSERT_EQUAL_64(0xfffff00fffff0000L, x9); | 1051 CHECK_EQUAL_64(0xfffff00fffff0000L, x9); |
1052 ASSERT_EQUAL_64(0xfc3f03cf, x10); | 1052 CHECK_EQUAL_64(0xfc3f03cf, x10); |
1053 ASSERT_EQUAL_64(0xffffefffffff100fL, x11); | 1053 CHECK_EQUAL_64(0xffffefffffff100fL, x11); |
1054 | 1054 |
1055 TEARDOWN(); | 1055 TEARDOWN(); |
1056 } | 1056 } |
1057 | 1057 |
1058 | 1058 |
1059 TEST(eon_extend) { | 1059 TEST(eon_extend) { |
1060 INIT_V8(); | 1060 INIT_V8(); |
1061 SETUP(); | 1061 SETUP(); |
1062 | 1062 |
1063 START(); | 1063 START(); |
1064 __ Mov(x0, 0x1111111111111111UL); | 1064 __ Mov(x0, 0x1111111111111111UL); |
1065 __ Mov(x1, 0x8000000080008081UL); | 1065 __ Mov(x1, 0x8000000080008081UL); |
1066 __ Eon(w6, w0, Operand(w1, UXTB)); | 1066 __ Eon(w6, w0, Operand(w1, UXTB)); |
1067 __ Eon(x7, x0, Operand(x1, UXTH, 1)); | 1067 __ Eon(x7, x0, Operand(x1, UXTH, 1)); |
1068 __ Eon(w8, w0, Operand(w1, UXTW, 2)); | 1068 __ Eon(w8, w0, Operand(w1, UXTW, 2)); |
1069 __ Eon(x9, x0, Operand(x1, UXTX, 3)); | 1069 __ Eon(x9, x0, Operand(x1, UXTX, 3)); |
1070 __ Eon(w10, w0, Operand(w1, SXTB)); | 1070 __ Eon(w10, w0, Operand(w1, SXTB)); |
1071 __ Eon(x11, x0, Operand(x1, SXTH, 1)); | 1071 __ Eon(x11, x0, Operand(x1, SXTH, 1)); |
1072 __ Eon(x12, x0, Operand(x1, SXTW, 2)); | 1072 __ Eon(x12, x0, Operand(x1, SXTW, 2)); |
1073 __ Eon(x13, x0, Operand(x1, SXTX, 3)); | 1073 __ Eon(x13, x0, Operand(x1, SXTX, 3)); |
1074 END(); | 1074 END(); |
1075 | 1075 |
1076 RUN(); | 1076 RUN(); |
1077 | 1077 |
1078 ASSERT_EQUAL_64(0xeeeeee6f, x6); | 1078 CHECK_EQUAL_64(0xeeeeee6f, x6); |
1079 ASSERT_EQUAL_64(0xeeeeeeeeeeefefecUL, x7); | 1079 CHECK_EQUAL_64(0xeeeeeeeeeeefefecUL, x7); |
1080 ASSERT_EQUAL_64(0xeeececea, x8); | 1080 CHECK_EQUAL_64(0xeeececea, x8); |
1081 ASSERT_EQUAL_64(0xeeeeeeeaeeeaeae6UL, x9); | 1081 CHECK_EQUAL_64(0xeeeeeeeaeeeaeae6UL, x9); |
1082 ASSERT_EQUAL_64(0x1111116f, x10); | 1082 CHECK_EQUAL_64(0x1111116f, x10); |
1083 ASSERT_EQUAL_64(0x111111111111efecUL, x11); | 1083 CHECK_EQUAL_64(0x111111111111efecUL, x11); |
1084 ASSERT_EQUAL_64(0x11111110eeececeaUL, x12); | 1084 CHECK_EQUAL_64(0x11111110eeececeaUL, x12); |
1085 ASSERT_EQUAL_64(0xeeeeeeeaeeeaeae6UL, x13); | 1085 CHECK_EQUAL_64(0xeeeeeeeaeeeaeae6UL, x13); |
1086 | 1086 |
1087 TEARDOWN(); | 1087 TEARDOWN(); |
1088 } | 1088 } |
1089 | 1089 |
1090 | 1090 |
1091 TEST(mul) { | 1091 TEST(mul) { |
1092 INIT_V8(); | 1092 INIT_V8(); |
1093 SETUP(); | 1093 SETUP(); |
1094 | 1094 |
1095 START(); | 1095 START(); |
(...skipping 18 matching lines...) Expand all Loading... |
1114 __ Mneg(w13, w17, w18); | 1114 __ Mneg(w13, w17, w18); |
1115 __ Mneg(w14, w18, w19); | 1115 __ Mneg(w14, w18, w19); |
1116 __ Mneg(x20, x16, x16); | 1116 __ Mneg(x20, x16, x16); |
1117 __ Mneg(x21, x17, x18); | 1117 __ Mneg(x21, x17, x18); |
1118 __ Mneg(x22, x18, x19); | 1118 __ Mneg(x22, x18, x19); |
1119 __ Mneg(x23, x19, x19); | 1119 __ Mneg(x23, x19, x19); |
1120 END(); | 1120 END(); |
1121 | 1121 |
1122 RUN(); | 1122 RUN(); |
1123 | 1123 |
1124 ASSERT_EQUAL_64(0, x0); | 1124 CHECK_EQUAL_64(0, x0); |
1125 ASSERT_EQUAL_64(0, x1); | 1125 CHECK_EQUAL_64(0, x1); |
1126 ASSERT_EQUAL_64(0xffffffff, x2); | 1126 CHECK_EQUAL_64(0xffffffff, x2); |
1127 ASSERT_EQUAL_64(1, x3); | 1127 CHECK_EQUAL_64(1, x3); |
1128 ASSERT_EQUAL_64(0, x4); | 1128 CHECK_EQUAL_64(0, x4); |
1129 ASSERT_EQUAL_64(0xffffffff, x5); | 1129 CHECK_EQUAL_64(0xffffffff, x5); |
1130 ASSERT_EQUAL_64(0xffffffff00000001UL, x6); | 1130 CHECK_EQUAL_64(0xffffffff00000001UL, x6); |
1131 ASSERT_EQUAL_64(1, x7); | 1131 CHECK_EQUAL_64(1, x7); |
1132 ASSERT_EQUAL_64(0xffffffffffffffffUL, x8); | 1132 CHECK_EQUAL_64(0xffffffffffffffffUL, x8); |
1133 ASSERT_EQUAL_64(1, x9); | 1133 CHECK_EQUAL_64(1, x9); |
1134 ASSERT_EQUAL_64(1, x10); | 1134 CHECK_EQUAL_64(1, x10); |
1135 ASSERT_EQUAL_64(0, x11); | 1135 CHECK_EQUAL_64(0, x11); |
1136 ASSERT_EQUAL_64(0, x12); | 1136 CHECK_EQUAL_64(0, x12); |
1137 ASSERT_EQUAL_64(1, x13); | 1137 CHECK_EQUAL_64(1, x13); |
1138 ASSERT_EQUAL_64(0xffffffff, x14); | 1138 CHECK_EQUAL_64(0xffffffff, x14); |
1139 ASSERT_EQUAL_64(0, x20); | 1139 CHECK_EQUAL_64(0, x20); |
1140 ASSERT_EQUAL_64(0xffffffff00000001UL, x21); | 1140 CHECK_EQUAL_64(0xffffffff00000001UL, x21); |
1141 ASSERT_EQUAL_64(0xffffffff, x22); | 1141 CHECK_EQUAL_64(0xffffffff, x22); |
1142 ASSERT_EQUAL_64(0xffffffffffffffffUL, x23); | 1142 CHECK_EQUAL_64(0xffffffffffffffffUL, x23); |
1143 | 1143 |
1144 TEARDOWN(); | 1144 TEARDOWN(); |
1145 } | 1145 } |
1146 | 1146 |
1147 | 1147 |
1148 static void SmullHelper(int64_t expected, int64_t a, int64_t b) { | 1148 static void SmullHelper(int64_t expected, int64_t a, int64_t b) { |
1149 SETUP(); | 1149 SETUP(); |
1150 START(); | 1150 START(); |
1151 __ Mov(w0, a); | 1151 __ Mov(w0, a); |
1152 __ Mov(w1, b); | 1152 __ Mov(w1, b); |
1153 __ Smull(x2, w0, w1); | 1153 __ Smull(x2, w0, w1); |
1154 END(); | 1154 END(); |
1155 RUN(); | 1155 RUN(); |
1156 ASSERT_EQUAL_64(expected, x2); | 1156 CHECK_EQUAL_64(expected, x2); |
1157 TEARDOWN(); | 1157 TEARDOWN(); |
1158 } | 1158 } |
1159 | 1159 |
1160 | 1160 |
1161 TEST(smull) { | 1161 TEST(smull) { |
1162 INIT_V8(); | 1162 INIT_V8(); |
1163 SmullHelper(0, 0, 0); | 1163 SmullHelper(0, 0, 0); |
1164 SmullHelper(1, 1, 1); | 1164 SmullHelper(1, 1, 1); |
1165 SmullHelper(-1, -1, 1); | 1165 SmullHelper(-1, -1, 1); |
1166 SmullHelper(1, -1, -1); | 1166 SmullHelper(1, -1, -1); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1202 __ Madd(x23, x17, x18, x16); | 1202 __ Madd(x23, x17, x18, x16); |
1203 __ Madd(x24, x17, x18, x18); | 1203 __ Madd(x24, x17, x18, x18); |
1204 __ Madd(x25, x18, x18, x17); | 1204 __ Madd(x25, x18, x18, x17); |
1205 __ Madd(x26, x18, x19, x18); | 1205 __ Madd(x26, x18, x19, x18); |
1206 __ Madd(x27, x19, x19, x19); | 1206 __ Madd(x27, x19, x19, x19); |
1207 | 1207 |
1208 END(); | 1208 END(); |
1209 | 1209 |
1210 RUN(); | 1210 RUN(); |
1211 | 1211 |
1212 ASSERT_EQUAL_64(0, x0); | 1212 CHECK_EQUAL_64(0, x0); |
1213 ASSERT_EQUAL_64(1, x1); | 1213 CHECK_EQUAL_64(1, x1); |
1214 ASSERT_EQUAL_64(0xffffffff, x2); | 1214 CHECK_EQUAL_64(0xffffffff, x2); |
1215 ASSERT_EQUAL_64(0xffffffff, x3); | 1215 CHECK_EQUAL_64(0xffffffff, x3); |
1216 ASSERT_EQUAL_64(1, x4); | 1216 CHECK_EQUAL_64(1, x4); |
1217 ASSERT_EQUAL_64(0, x5); | 1217 CHECK_EQUAL_64(0, x5); |
1218 ASSERT_EQUAL_64(0, x6); | 1218 CHECK_EQUAL_64(0, x6); |
1219 ASSERT_EQUAL_64(0xffffffff, x7); | 1219 CHECK_EQUAL_64(0xffffffff, x7); |
1220 ASSERT_EQUAL_64(0xfffffffe, x8); | 1220 CHECK_EQUAL_64(0xfffffffe, x8); |
1221 ASSERT_EQUAL_64(2, x9); | 1221 CHECK_EQUAL_64(2, x9); |
1222 ASSERT_EQUAL_64(0, x10); | 1222 CHECK_EQUAL_64(0, x10); |
1223 ASSERT_EQUAL_64(0, x11); | 1223 CHECK_EQUAL_64(0, x11); |
1224 | 1224 |
1225 ASSERT_EQUAL_64(0, x12); | 1225 CHECK_EQUAL_64(0, x12); |
1226 ASSERT_EQUAL_64(1, x13); | 1226 CHECK_EQUAL_64(1, x13); |
1227 ASSERT_EQUAL_64(0xffffffff, x14); | 1227 CHECK_EQUAL_64(0xffffffff, x14); |
1228 ASSERT_EQUAL_64(0xffffffffffffffff, x15); | 1228 CHECK_EQUAL_64(0xffffffffffffffff, x15); |
1229 ASSERT_EQUAL_64(1, x20); | 1229 CHECK_EQUAL_64(1, x20); |
1230 ASSERT_EQUAL_64(0x100000000UL, x21); | 1230 CHECK_EQUAL_64(0x100000000UL, x21); |
1231 ASSERT_EQUAL_64(0, x22); | 1231 CHECK_EQUAL_64(0, x22); |
1232 ASSERT_EQUAL_64(0xffffffff, x23); | 1232 CHECK_EQUAL_64(0xffffffff, x23); |
1233 ASSERT_EQUAL_64(0x1fffffffe, x24); | 1233 CHECK_EQUAL_64(0x1fffffffe, x24); |
1234 ASSERT_EQUAL_64(0xfffffffe00000002UL, x25); | 1234 CHECK_EQUAL_64(0xfffffffe00000002UL, x25); |
1235 ASSERT_EQUAL_64(0, x26); | 1235 CHECK_EQUAL_64(0, x26); |
1236 ASSERT_EQUAL_64(0, x27); | 1236 CHECK_EQUAL_64(0, x27); |
1237 | 1237 |
1238 TEARDOWN(); | 1238 TEARDOWN(); |
1239 } | 1239 } |
1240 | 1240 |
1241 | 1241 |
1242 TEST(msub) { | 1242 TEST(msub) { |
1243 INIT_V8(); | 1243 INIT_V8(); |
1244 SETUP(); | 1244 SETUP(); |
1245 | 1245 |
1246 START(); | 1246 START(); |
(...skipping 25 matching lines...) Expand all Loading... |
1272 __ Msub(x23, x17, x18, x16); | 1272 __ Msub(x23, x17, x18, x16); |
1273 __ Msub(x24, x17, x18, x18); | 1273 __ Msub(x24, x17, x18, x18); |
1274 __ Msub(x25, x18, x18, x17); | 1274 __ Msub(x25, x18, x18, x17); |
1275 __ Msub(x26, x18, x19, x18); | 1275 __ Msub(x26, x18, x19, x18); |
1276 __ Msub(x27, x19, x19, x19); | 1276 __ Msub(x27, x19, x19, x19); |
1277 | 1277 |
1278 END(); | 1278 END(); |
1279 | 1279 |
1280 RUN(); | 1280 RUN(); |
1281 | 1281 |
1282 ASSERT_EQUAL_64(0, x0); | 1282 CHECK_EQUAL_64(0, x0); |
1283 ASSERT_EQUAL_64(1, x1); | 1283 CHECK_EQUAL_64(1, x1); |
1284 ASSERT_EQUAL_64(0xffffffff, x2); | 1284 CHECK_EQUAL_64(0xffffffff, x2); |
1285 ASSERT_EQUAL_64(0xffffffff, x3); | 1285 CHECK_EQUAL_64(0xffffffff, x3); |
1286 ASSERT_EQUAL_64(1, x4); | 1286 CHECK_EQUAL_64(1, x4); |
1287 ASSERT_EQUAL_64(0xfffffffe, x5); | 1287 CHECK_EQUAL_64(0xfffffffe, x5); |
1288 ASSERT_EQUAL_64(0xfffffffe, x6); | 1288 CHECK_EQUAL_64(0xfffffffe, x6); |
1289 ASSERT_EQUAL_64(1, x7); | 1289 CHECK_EQUAL_64(1, x7); |
1290 ASSERT_EQUAL_64(0, x8); | 1290 CHECK_EQUAL_64(0, x8); |
1291 ASSERT_EQUAL_64(0, x9); | 1291 CHECK_EQUAL_64(0, x9); |
1292 ASSERT_EQUAL_64(0xfffffffe, x10); | 1292 CHECK_EQUAL_64(0xfffffffe, x10); |
1293 ASSERT_EQUAL_64(0xfffffffe, x11); | 1293 CHECK_EQUAL_64(0xfffffffe, x11); |
1294 | 1294 |
1295 ASSERT_EQUAL_64(0, x12); | 1295 CHECK_EQUAL_64(0, x12); |
1296 ASSERT_EQUAL_64(1, x13); | 1296 CHECK_EQUAL_64(1, x13); |
1297 ASSERT_EQUAL_64(0xffffffff, x14); | 1297 CHECK_EQUAL_64(0xffffffff, x14); |
1298 ASSERT_EQUAL_64(0xffffffffffffffffUL, x15); | 1298 CHECK_EQUAL_64(0xffffffffffffffffUL, x15); |
1299 ASSERT_EQUAL_64(1, x20); | 1299 CHECK_EQUAL_64(1, x20); |
1300 ASSERT_EQUAL_64(0xfffffffeUL, x21); | 1300 CHECK_EQUAL_64(0xfffffffeUL, x21); |
1301 ASSERT_EQUAL_64(0xfffffffffffffffeUL, x22); | 1301 CHECK_EQUAL_64(0xfffffffffffffffeUL, x22); |
1302 ASSERT_EQUAL_64(0xffffffff00000001UL, x23); | 1302 CHECK_EQUAL_64(0xffffffff00000001UL, x23); |
1303 ASSERT_EQUAL_64(0, x24); | 1303 CHECK_EQUAL_64(0, x24); |
1304 ASSERT_EQUAL_64(0x200000000UL, x25); | 1304 CHECK_EQUAL_64(0x200000000UL, x25); |
1305 ASSERT_EQUAL_64(0x1fffffffeUL, x26); | 1305 CHECK_EQUAL_64(0x1fffffffeUL, x26); |
1306 ASSERT_EQUAL_64(0xfffffffffffffffeUL, x27); | 1306 CHECK_EQUAL_64(0xfffffffffffffffeUL, x27); |
1307 | 1307 |
1308 TEARDOWN(); | 1308 TEARDOWN(); |
1309 } | 1309 } |
1310 | 1310 |
1311 | 1311 |
1312 TEST(smulh) { | 1312 TEST(smulh) { |
1313 INIT_V8(); | 1313 INIT_V8(); |
1314 SETUP(); | 1314 SETUP(); |
1315 | 1315 |
1316 START(); | 1316 START(); |
(...skipping 17 matching lines...) Expand all Loading... |
1334 __ Smulh(x6, x26, x26); | 1334 __ Smulh(x6, x26, x26); |
1335 __ Smulh(x7, x26, x27); | 1335 __ Smulh(x7, x26, x27); |
1336 __ Smulh(x8, x27, x27); | 1336 __ Smulh(x8, x27, x27); |
1337 __ Smulh(x9, x28, x28); | 1337 __ Smulh(x9, x28, x28); |
1338 __ Smulh(x10, x28, x29); | 1338 __ Smulh(x10, x28, x29); |
1339 __ Smulh(x11, x29, x29); | 1339 __ Smulh(x11, x29, x29); |
1340 END(); | 1340 END(); |
1341 | 1341 |
1342 RUN(); | 1342 RUN(); |
1343 | 1343 |
1344 ASSERT_EQUAL_64(0, x0); | 1344 CHECK_EQUAL_64(0, x0); |
1345 ASSERT_EQUAL_64(0, x1); | 1345 CHECK_EQUAL_64(0, x1); |
1346 ASSERT_EQUAL_64(0, x2); | 1346 CHECK_EQUAL_64(0, x2); |
1347 ASSERT_EQUAL_64(0x01234567, x3); | 1347 CHECK_EQUAL_64(0x01234567, x3); |
1348 ASSERT_EQUAL_64(0x02468acf, x4); | 1348 CHECK_EQUAL_64(0x02468acf, x4); |
1349 ASSERT_EQUAL_64(0xffffffffffffffffUL, x5); | 1349 CHECK_EQUAL_64(0xffffffffffffffffUL, x5); |
1350 ASSERT_EQUAL_64(0x4000000000000000UL, x6); | 1350 CHECK_EQUAL_64(0x4000000000000000UL, x6); |
1351 ASSERT_EQUAL_64(0, x7); | 1351 CHECK_EQUAL_64(0, x7); |
1352 ASSERT_EQUAL_64(0, x8); | 1352 CHECK_EQUAL_64(0, x8); |
1353 ASSERT_EQUAL_64(0x1c71c71c71c71c71UL, x9); | 1353 CHECK_EQUAL_64(0x1c71c71c71c71c71UL, x9); |
1354 ASSERT_EQUAL_64(0xe38e38e38e38e38eUL, x10); | 1354 CHECK_EQUAL_64(0xe38e38e38e38e38eUL, x10); |
1355 ASSERT_EQUAL_64(0x1c71c71c71c71c72UL, x11); | 1355 CHECK_EQUAL_64(0x1c71c71c71c71c72UL, x11); |
1356 | 1356 |
1357 TEARDOWN(); | 1357 TEARDOWN(); |
1358 } | 1358 } |
1359 | 1359 |
1360 | 1360 |
1361 TEST(smaddl_umaddl) { | 1361 TEST(smaddl_umaddl) { |
1362 INIT_V8(); | 1362 INIT_V8(); |
1363 SETUP(); | 1363 SETUP(); |
1364 | 1364 |
1365 START(); | 1365 START(); |
1366 __ Mov(x17, 1); | 1366 __ Mov(x17, 1); |
1367 __ Mov(x18, 0xffffffff); | 1367 __ Mov(x18, 0xffffffff); |
1368 __ Mov(x19, 0xffffffffffffffffUL); | 1368 __ Mov(x19, 0xffffffffffffffffUL); |
1369 __ Mov(x20, 4); | 1369 __ Mov(x20, 4); |
1370 __ Mov(x21, 0x200000000UL); | 1370 __ Mov(x21, 0x200000000UL); |
1371 | 1371 |
1372 __ Smaddl(x9, w17, w18, x20); | 1372 __ Smaddl(x9, w17, w18, x20); |
1373 __ Smaddl(x10, w18, w18, x20); | 1373 __ Smaddl(x10, w18, w18, x20); |
1374 __ Smaddl(x11, w19, w19, x20); | 1374 __ Smaddl(x11, w19, w19, x20); |
1375 __ Smaddl(x12, w19, w19, x21); | 1375 __ Smaddl(x12, w19, w19, x21); |
1376 __ Umaddl(x13, w17, w18, x20); | 1376 __ Umaddl(x13, w17, w18, x20); |
1377 __ Umaddl(x14, w18, w18, x20); | 1377 __ Umaddl(x14, w18, w18, x20); |
1378 __ Umaddl(x15, w19, w19, x20); | 1378 __ Umaddl(x15, w19, w19, x20); |
1379 __ Umaddl(x22, w19, w19, x21); | 1379 __ Umaddl(x22, w19, w19, x21); |
1380 END(); | 1380 END(); |
1381 | 1381 |
1382 RUN(); | 1382 RUN(); |
1383 | 1383 |
1384 ASSERT_EQUAL_64(3, x9); | 1384 CHECK_EQUAL_64(3, x9); |
1385 ASSERT_EQUAL_64(5, x10); | 1385 CHECK_EQUAL_64(5, x10); |
1386 ASSERT_EQUAL_64(5, x11); | 1386 CHECK_EQUAL_64(5, x11); |
1387 ASSERT_EQUAL_64(0x200000001UL, x12); | 1387 CHECK_EQUAL_64(0x200000001UL, x12); |
1388 ASSERT_EQUAL_64(0x100000003UL, x13); | 1388 CHECK_EQUAL_64(0x100000003UL, x13); |
1389 ASSERT_EQUAL_64(0xfffffffe00000005UL, x14); | 1389 CHECK_EQUAL_64(0xfffffffe00000005UL, x14); |
1390 ASSERT_EQUAL_64(0xfffffffe00000005UL, x15); | 1390 CHECK_EQUAL_64(0xfffffffe00000005UL, x15); |
1391 ASSERT_EQUAL_64(0x1, x22); | 1391 CHECK_EQUAL_64(0x1, x22); |
1392 | 1392 |
1393 TEARDOWN(); | 1393 TEARDOWN(); |
1394 } | 1394 } |
1395 | 1395 |
1396 | 1396 |
1397 TEST(smsubl_umsubl) { | 1397 TEST(smsubl_umsubl) { |
1398 INIT_V8(); | 1398 INIT_V8(); |
1399 SETUP(); | 1399 SETUP(); |
1400 | 1400 |
1401 START(); | 1401 START(); |
1402 __ Mov(x17, 1); | 1402 __ Mov(x17, 1); |
1403 __ Mov(x18, 0xffffffff); | 1403 __ Mov(x18, 0xffffffff); |
1404 __ Mov(x19, 0xffffffffffffffffUL); | 1404 __ Mov(x19, 0xffffffffffffffffUL); |
1405 __ Mov(x20, 4); | 1405 __ Mov(x20, 4); |
1406 __ Mov(x21, 0x200000000UL); | 1406 __ Mov(x21, 0x200000000UL); |
1407 | 1407 |
1408 __ Smsubl(x9, w17, w18, x20); | 1408 __ Smsubl(x9, w17, w18, x20); |
1409 __ Smsubl(x10, w18, w18, x20); | 1409 __ Smsubl(x10, w18, w18, x20); |
1410 __ Smsubl(x11, w19, w19, x20); | 1410 __ Smsubl(x11, w19, w19, x20); |
1411 __ Smsubl(x12, w19, w19, x21); | 1411 __ Smsubl(x12, w19, w19, x21); |
1412 __ Umsubl(x13, w17, w18, x20); | 1412 __ Umsubl(x13, w17, w18, x20); |
1413 __ Umsubl(x14, w18, w18, x20); | 1413 __ Umsubl(x14, w18, w18, x20); |
1414 __ Umsubl(x15, w19, w19, x20); | 1414 __ Umsubl(x15, w19, w19, x20); |
1415 __ Umsubl(x22, w19, w19, x21); | 1415 __ Umsubl(x22, w19, w19, x21); |
1416 END(); | 1416 END(); |
1417 | 1417 |
1418 RUN(); | 1418 RUN(); |
1419 | 1419 |
1420 ASSERT_EQUAL_64(5, x9); | 1420 CHECK_EQUAL_64(5, x9); |
1421 ASSERT_EQUAL_64(3, x10); | 1421 CHECK_EQUAL_64(3, x10); |
1422 ASSERT_EQUAL_64(3, x11); | 1422 CHECK_EQUAL_64(3, x11); |
1423 ASSERT_EQUAL_64(0x1ffffffffUL, x12); | 1423 CHECK_EQUAL_64(0x1ffffffffUL, x12); |
1424 ASSERT_EQUAL_64(0xffffffff00000005UL, x13); | 1424 CHECK_EQUAL_64(0xffffffff00000005UL, x13); |
1425 ASSERT_EQUAL_64(0x200000003UL, x14); | 1425 CHECK_EQUAL_64(0x200000003UL, x14); |
1426 ASSERT_EQUAL_64(0x200000003UL, x15); | 1426 CHECK_EQUAL_64(0x200000003UL, x15); |
1427 ASSERT_EQUAL_64(0x3ffffffffUL, x22); | 1427 CHECK_EQUAL_64(0x3ffffffffUL, x22); |
1428 | 1428 |
1429 TEARDOWN(); | 1429 TEARDOWN(); |
1430 } | 1430 } |
1431 | 1431 |
1432 | 1432 |
1433 TEST(div) { | 1433 TEST(div) { |
1434 INIT_V8(); | 1434 INIT_V8(); |
1435 SETUP(); | 1435 SETUP(); |
1436 | 1436 |
1437 START(); | 1437 START(); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1473 | 1473 |
1474 __ Mov(x17, 0); | 1474 __ Mov(x17, 0); |
1475 __ Udiv(w18, w16, w17); | 1475 __ Udiv(w18, w16, w17); |
1476 __ Sdiv(w19, w16, w17); | 1476 __ Sdiv(w19, w16, w17); |
1477 __ Udiv(x20, x16, x17); | 1477 __ Udiv(x20, x16, x17); |
1478 __ Sdiv(x21, x16, x17); | 1478 __ Sdiv(x21, x16, x17); |
1479 END(); | 1479 END(); |
1480 | 1480 |
1481 RUN(); | 1481 RUN(); |
1482 | 1482 |
1483 ASSERT_EQUAL_64(1, x0); | 1483 CHECK_EQUAL_64(1, x0); |
1484 ASSERT_EQUAL_64(0xffffffff, x1); | 1484 CHECK_EQUAL_64(0xffffffff, x1); |
1485 ASSERT_EQUAL_64(1, x2); | 1485 CHECK_EQUAL_64(1, x2); |
1486 ASSERT_EQUAL_64(0xffffffff, x3); | 1486 CHECK_EQUAL_64(0xffffffff, x3); |
1487 ASSERT_EQUAL_64(1, x4); | 1487 CHECK_EQUAL_64(1, x4); |
1488 ASSERT_EQUAL_64(1, x5); | 1488 CHECK_EQUAL_64(1, x5); |
1489 ASSERT_EQUAL_64(0, x6); | 1489 CHECK_EQUAL_64(0, x6); |
1490 ASSERT_EQUAL_64(1, x7); | 1490 CHECK_EQUAL_64(1, x7); |
1491 ASSERT_EQUAL_64(0, x8); | 1491 CHECK_EQUAL_64(0, x8); |
1492 ASSERT_EQUAL_64(0xffffffff00000001UL, x9); | 1492 CHECK_EQUAL_64(0xffffffff00000001UL, x9); |
1493 ASSERT_EQUAL_64(0x40000000, x10); | 1493 CHECK_EQUAL_64(0x40000000, x10); |
1494 ASSERT_EQUAL_64(0xC0000000, x11); | 1494 CHECK_EQUAL_64(0xC0000000, x11); |
1495 ASSERT_EQUAL_64(0x40000000, x12); | 1495 CHECK_EQUAL_64(0x40000000, x12); |
1496 ASSERT_EQUAL_64(0x40000000, x13); | 1496 CHECK_EQUAL_64(0x40000000, x13); |
1497 ASSERT_EQUAL_64(0x4000000000000000UL, x14); | 1497 CHECK_EQUAL_64(0x4000000000000000UL, x14); |
1498 ASSERT_EQUAL_64(0xC000000000000000UL, x15); | 1498 CHECK_EQUAL_64(0xC000000000000000UL, x15); |
1499 ASSERT_EQUAL_64(0, x22); | 1499 CHECK_EQUAL_64(0, x22); |
1500 ASSERT_EQUAL_64(0x80000000, x23); | 1500 CHECK_EQUAL_64(0x80000000, x23); |
1501 ASSERT_EQUAL_64(0, x24); | 1501 CHECK_EQUAL_64(0, x24); |
1502 ASSERT_EQUAL_64(0x8000000000000000UL, x25); | 1502 CHECK_EQUAL_64(0x8000000000000000UL, x25); |
1503 ASSERT_EQUAL_64(0, x26); | 1503 CHECK_EQUAL_64(0, x26); |
1504 ASSERT_EQUAL_64(0, x27); | 1504 CHECK_EQUAL_64(0, x27); |
1505 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x28); | 1505 CHECK_EQUAL_64(0x7fffffffffffffffUL, x28); |
1506 ASSERT_EQUAL_64(0, x29); | 1506 CHECK_EQUAL_64(0, x29); |
1507 ASSERT_EQUAL_64(0, x18); | 1507 CHECK_EQUAL_64(0, x18); |
1508 ASSERT_EQUAL_64(0, x19); | 1508 CHECK_EQUAL_64(0, x19); |
1509 ASSERT_EQUAL_64(0, x20); | 1509 CHECK_EQUAL_64(0, x20); |
1510 ASSERT_EQUAL_64(0, x21); | 1510 CHECK_EQUAL_64(0, x21); |
1511 | 1511 |
1512 TEARDOWN(); | 1512 TEARDOWN(); |
1513 } | 1513 } |
1514 | 1514 |
1515 | 1515 |
1516 TEST(rbit_rev) { | 1516 TEST(rbit_rev) { |
1517 INIT_V8(); | 1517 INIT_V8(); |
1518 SETUP(); | 1518 SETUP(); |
1519 | 1519 |
1520 START(); | 1520 START(); |
1521 __ Mov(x24, 0xfedcba9876543210UL); | 1521 __ Mov(x24, 0xfedcba9876543210UL); |
1522 __ Rbit(w0, w24); | 1522 __ Rbit(w0, w24); |
1523 __ Rbit(x1, x24); | 1523 __ Rbit(x1, x24); |
1524 __ Rev16(w2, w24); | 1524 __ Rev16(w2, w24); |
1525 __ Rev16(x3, x24); | 1525 __ Rev16(x3, x24); |
1526 __ Rev(w4, w24); | 1526 __ Rev(w4, w24); |
1527 __ Rev32(x5, x24); | 1527 __ Rev32(x5, x24); |
1528 __ Rev(x6, x24); | 1528 __ Rev(x6, x24); |
1529 END(); | 1529 END(); |
1530 | 1530 |
1531 RUN(); | 1531 RUN(); |
1532 | 1532 |
1533 ASSERT_EQUAL_64(0x084c2a6e, x0); | 1533 CHECK_EQUAL_64(0x084c2a6e, x0); |
1534 ASSERT_EQUAL_64(0x084c2a6e195d3b7fUL, x1); | 1534 CHECK_EQUAL_64(0x084c2a6e195d3b7fUL, x1); |
1535 ASSERT_EQUAL_64(0x54761032, x2); | 1535 CHECK_EQUAL_64(0x54761032, x2); |
1536 ASSERT_EQUAL_64(0xdcfe98ba54761032UL, x3); | 1536 CHECK_EQUAL_64(0xdcfe98ba54761032UL, x3); |
1537 ASSERT_EQUAL_64(0x10325476, x4); | 1537 CHECK_EQUAL_64(0x10325476, x4); |
1538 ASSERT_EQUAL_64(0x98badcfe10325476UL, x5); | 1538 CHECK_EQUAL_64(0x98badcfe10325476UL, x5); |
1539 ASSERT_EQUAL_64(0x1032547698badcfeUL, x6); | 1539 CHECK_EQUAL_64(0x1032547698badcfeUL, x6); |
1540 | 1540 |
1541 TEARDOWN(); | 1541 TEARDOWN(); |
1542 } | 1542 } |
1543 | 1543 |
1544 | 1544 |
1545 TEST(clz_cls) { | 1545 TEST(clz_cls) { |
1546 INIT_V8(); | 1546 INIT_V8(); |
1547 SETUP(); | 1547 SETUP(); |
1548 | 1548 |
1549 START(); | 1549 START(); |
1550 __ Mov(x24, 0x0008000000800000UL); | 1550 __ Mov(x24, 0x0008000000800000UL); |
1551 __ Mov(x25, 0xff800000fff80000UL); | 1551 __ Mov(x25, 0xff800000fff80000UL); |
1552 __ Mov(x26, 0); | 1552 __ Mov(x26, 0); |
1553 __ Clz(w0, w24); | 1553 __ Clz(w0, w24); |
1554 __ Clz(x1, x24); | 1554 __ Clz(x1, x24); |
1555 __ Clz(w2, w25); | 1555 __ Clz(w2, w25); |
1556 __ Clz(x3, x25); | 1556 __ Clz(x3, x25); |
1557 __ Clz(w4, w26); | 1557 __ Clz(w4, w26); |
1558 __ Clz(x5, x26); | 1558 __ Clz(x5, x26); |
1559 __ Cls(w6, w24); | 1559 __ Cls(w6, w24); |
1560 __ Cls(x7, x24); | 1560 __ Cls(x7, x24); |
1561 __ Cls(w8, w25); | 1561 __ Cls(w8, w25); |
1562 __ Cls(x9, x25); | 1562 __ Cls(x9, x25); |
1563 __ Cls(w10, w26); | 1563 __ Cls(w10, w26); |
1564 __ Cls(x11, x26); | 1564 __ Cls(x11, x26); |
1565 END(); | 1565 END(); |
1566 | 1566 |
1567 RUN(); | 1567 RUN(); |
1568 | 1568 |
1569 ASSERT_EQUAL_64(8, x0); | 1569 CHECK_EQUAL_64(8, x0); |
1570 ASSERT_EQUAL_64(12, x1); | 1570 CHECK_EQUAL_64(12, x1); |
1571 ASSERT_EQUAL_64(0, x2); | 1571 CHECK_EQUAL_64(0, x2); |
1572 ASSERT_EQUAL_64(0, x3); | 1572 CHECK_EQUAL_64(0, x3); |
1573 ASSERT_EQUAL_64(32, x4); | 1573 CHECK_EQUAL_64(32, x4); |
1574 ASSERT_EQUAL_64(64, x5); | 1574 CHECK_EQUAL_64(64, x5); |
1575 ASSERT_EQUAL_64(7, x6); | 1575 CHECK_EQUAL_64(7, x6); |
1576 ASSERT_EQUAL_64(11, x7); | 1576 CHECK_EQUAL_64(11, x7); |
1577 ASSERT_EQUAL_64(12, x8); | 1577 CHECK_EQUAL_64(12, x8); |
1578 ASSERT_EQUAL_64(8, x9); | 1578 CHECK_EQUAL_64(8, x9); |
1579 ASSERT_EQUAL_64(31, x10); | 1579 CHECK_EQUAL_64(31, x10); |
1580 ASSERT_EQUAL_64(63, x11); | 1580 CHECK_EQUAL_64(63, x11); |
1581 | 1581 |
1582 TEARDOWN(); | 1582 TEARDOWN(); |
1583 } | 1583 } |
1584 | 1584 |
1585 | 1585 |
1586 TEST(label) { | 1586 TEST(label) { |
1587 INIT_V8(); | 1587 INIT_V8(); |
1588 SETUP(); | 1588 SETUP(); |
1589 | 1589 |
1590 Label label_1, label_2, label_3, label_4; | 1590 Label label_1, label_2, label_3, label_4; |
(...skipping 17 matching lines...) Expand all Loading... |
1608 __ Bl(&label_4); | 1608 __ Bl(&label_4); |
1609 END(); | 1609 END(); |
1610 | 1610 |
1611 __ Bind(&label_4); | 1611 __ Bind(&label_4); |
1612 __ Mov(x1, 0x1); | 1612 __ Mov(x1, 0x1); |
1613 __ Mov(lr, x22); | 1613 __ Mov(lr, x22); |
1614 END(); | 1614 END(); |
1615 | 1615 |
1616 RUN(); | 1616 RUN(); |
1617 | 1617 |
1618 ASSERT_EQUAL_64(0x1, x0); | 1618 CHECK_EQUAL_64(0x1, x0); |
1619 ASSERT_EQUAL_64(0x1, x1); | 1619 CHECK_EQUAL_64(0x1, x1); |
1620 | 1620 |
1621 TEARDOWN(); | 1621 TEARDOWN(); |
1622 } | 1622 } |
1623 | 1623 |
1624 | 1624 |
1625 TEST(branch_at_start) { | 1625 TEST(branch_at_start) { |
1626 INIT_V8(); | 1626 INIT_V8(); |
1627 SETUP(); | 1627 SETUP(); |
1628 | 1628 |
1629 Label good, exit; | 1629 Label good, exit; |
(...skipping 12 matching lines...) Expand all Loading... |
1642 START_AFTER_RESET(); | 1642 START_AFTER_RESET(); |
1643 __ Mov(x0, 0x1); | 1643 __ Mov(x0, 0x1); |
1644 END(); | 1644 END(); |
1645 | 1645 |
1646 __ Bind(&good); | 1646 __ Bind(&good); |
1647 __ B(&exit); | 1647 __ B(&exit); |
1648 END(); | 1648 END(); |
1649 | 1649 |
1650 RUN(); | 1650 RUN(); |
1651 | 1651 |
1652 ASSERT_EQUAL_64(0x1, x0); | 1652 CHECK_EQUAL_64(0x1, x0); |
1653 TEARDOWN(); | 1653 TEARDOWN(); |
1654 } | 1654 } |
1655 | 1655 |
1656 | 1656 |
1657 TEST(adr) { | 1657 TEST(adr) { |
1658 INIT_V8(); | 1658 INIT_V8(); |
1659 SETUP(); | 1659 SETUP(); |
1660 | 1660 |
1661 Label label_1, label_2, label_3, label_4; | 1661 Label label_1, label_2, label_3, label_4; |
1662 | 1662 |
(...skipping 23 matching lines...) Expand all Loading... |
1686 __ Adr(x3, &label_3); | 1686 __ Adr(x3, &label_3); |
1687 __ Adr(x4, &label_3); | 1687 __ Adr(x4, &label_3); |
1688 __ Adr(x5, &label_2); // Simple reverse reference. | 1688 __ Adr(x5, &label_2); // Simple reverse reference. |
1689 __ Br(x5); // label_2 | 1689 __ Br(x5); // label_2 |
1690 | 1690 |
1691 __ Bind(&label_4); | 1691 __ Bind(&label_4); |
1692 END(); | 1692 END(); |
1693 | 1693 |
1694 RUN(); | 1694 RUN(); |
1695 | 1695 |
1696 ASSERT_EQUAL_64(0x0, x0); | 1696 CHECK_EQUAL_64(0x0, x0); |
1697 ASSERT_EQUAL_64(0x0, x1); | 1697 CHECK_EQUAL_64(0x0, x1); |
1698 | 1698 |
1699 TEARDOWN(); | 1699 TEARDOWN(); |
1700 } | 1700 } |
1701 | 1701 |
1702 | 1702 |
1703 TEST(adr_far) { | 1703 TEST(adr_far) { |
1704 INIT_V8(); | 1704 INIT_V8(); |
1705 | 1705 |
1706 int max_range = 1 << (Instruction::ImmPCRelRangeBitwidth - 1); | 1706 int max_range = 1 << (Instruction::ImmPCRelRangeBitwidth - 1); |
1707 SETUP_SIZE(max_range + 1000 * kInstructionSize); | 1707 SETUP_SIZE(max_range + 1000 * kInstructionSize); |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1752 | 1752 |
1753 __ B(&done); | 1753 __ B(&done); |
1754 __ Bind(&fail); | 1754 __ Bind(&fail); |
1755 __ Orr(x0, x0, 1 << 4); | 1755 __ Orr(x0, x0, 1 << 4); |
1756 __ Bind(&done); | 1756 __ Bind(&done); |
1757 | 1757 |
1758 END(); | 1758 END(); |
1759 | 1759 |
1760 RUN(); | 1760 RUN(); |
1761 | 1761 |
1762 ASSERT_EQUAL_64(0xf, x0); | 1762 CHECK_EQUAL_64(0xf, x0); |
1763 | 1763 |
1764 TEARDOWN(); | 1764 TEARDOWN(); |
1765 } | 1765 } |
1766 | 1766 |
1767 | 1767 |
1768 TEST(branch_cond) { | 1768 TEST(branch_cond) { |
1769 INIT_V8(); | 1769 INIT_V8(); |
1770 SETUP(); | 1770 SETUP(); |
1771 | 1771 |
1772 Label wrong; | 1772 Label wrong; |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1842 __ Bind(&ok_6); | 1842 __ Bind(&ok_6); |
1843 | 1843 |
1844 END(); | 1844 END(); |
1845 | 1845 |
1846 __ Bind(&wrong); | 1846 __ Bind(&wrong); |
1847 __ Mov(x0, 0x0); | 1847 __ Mov(x0, 0x0); |
1848 END(); | 1848 END(); |
1849 | 1849 |
1850 RUN(); | 1850 RUN(); |
1851 | 1851 |
1852 ASSERT_EQUAL_64(0x1, x0); | 1852 CHECK_EQUAL_64(0x1, x0); |
1853 | 1853 |
1854 TEARDOWN(); | 1854 TEARDOWN(); |
1855 } | 1855 } |
1856 | 1856 |
1857 | 1857 |
1858 TEST(branch_to_reg) { | 1858 TEST(branch_to_reg) { |
1859 INIT_V8(); | 1859 INIT_V8(); |
1860 SETUP(); | 1860 SETUP(); |
1861 | 1861 |
1862 // Test br. | 1862 // Test br. |
(...skipping 26 matching lines...) Expand all Loading... |
1889 | 1889 |
1890 __ Bind(&after_fn2); | 1890 __ Bind(&after_fn2); |
1891 __ Bl(&fn2); | 1891 __ Bl(&fn2); |
1892 __ Mov(x3, lr); | 1892 __ Mov(x3, lr); |
1893 | 1893 |
1894 __ Mov(lr, x29); | 1894 __ Mov(lr, x29); |
1895 END(); | 1895 END(); |
1896 | 1896 |
1897 RUN(); | 1897 RUN(); |
1898 | 1898 |
1899 ASSERT_EQUAL_64(core.xreg(3) + kInstructionSize, x0); | 1899 CHECK_EQUAL_64(core.xreg(3) + kInstructionSize, x0); |
1900 ASSERT_EQUAL_64(42, x1); | 1900 CHECK_EQUAL_64(42, x1); |
1901 ASSERT_EQUAL_64(84, x2); | 1901 CHECK_EQUAL_64(84, x2); |
1902 | 1902 |
1903 TEARDOWN(); | 1903 TEARDOWN(); |
1904 } | 1904 } |
1905 | 1905 |
1906 | 1906 |
1907 TEST(compare_branch) { | 1907 TEST(compare_branch) { |
1908 INIT_V8(); | 1908 INIT_V8(); |
1909 SETUP(); | 1909 SETUP(); |
1910 | 1910 |
1911 START(); | 1911 START(); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1959 __ Cbnz(w18, &b); | 1959 __ Cbnz(w18, &b); |
1960 __ B(&b_end); | 1960 __ B(&b_end); |
1961 __ Bind(&b); | 1961 __ Bind(&b); |
1962 __ Mov(x5, 1); | 1962 __ Mov(x5, 1); |
1963 __ Bind(&b_end); | 1963 __ Bind(&b_end); |
1964 | 1964 |
1965 END(); | 1965 END(); |
1966 | 1966 |
1967 RUN(); | 1967 RUN(); |
1968 | 1968 |
1969 ASSERT_EQUAL_64(1, x0); | 1969 CHECK_EQUAL_64(1, x0); |
1970 ASSERT_EQUAL_64(0, x1); | 1970 CHECK_EQUAL_64(0, x1); |
1971 ASSERT_EQUAL_64(1, x2); | 1971 CHECK_EQUAL_64(1, x2); |
1972 ASSERT_EQUAL_64(0, x3); | 1972 CHECK_EQUAL_64(0, x3); |
1973 ASSERT_EQUAL_64(1, x4); | 1973 CHECK_EQUAL_64(1, x4); |
1974 ASSERT_EQUAL_64(0, x5); | 1974 CHECK_EQUAL_64(0, x5); |
1975 | 1975 |
1976 TEARDOWN(); | 1976 TEARDOWN(); |
1977 } | 1977 } |
1978 | 1978 |
1979 | 1979 |
1980 TEST(test_branch) { | 1980 TEST(test_branch) { |
1981 INIT_V8(); | 1981 INIT_V8(); |
1982 SETUP(); | 1982 SETUP(); |
1983 | 1983 |
1984 START(); | 1984 START(); |
(...skipping 27 matching lines...) Expand all Loading... |
2012 Label nbo, nbo_end; | 2012 Label nbo, nbo_end; |
2013 __ Tbnz(w16, 2, &nbo); | 2013 __ Tbnz(w16, 2, &nbo); |
2014 __ B(&nbo_end); | 2014 __ B(&nbo_end); |
2015 __ Bind(&nbo); | 2015 __ Bind(&nbo); |
2016 __ Mov(x3, 1); | 2016 __ Mov(x3, 1); |
2017 __ Bind(&nbo_end); | 2017 __ Bind(&nbo_end); |
2018 END(); | 2018 END(); |
2019 | 2019 |
2020 RUN(); | 2020 RUN(); |
2021 | 2021 |
2022 ASSERT_EQUAL_64(1, x0); | 2022 CHECK_EQUAL_64(1, x0); |
2023 ASSERT_EQUAL_64(0, x1); | 2023 CHECK_EQUAL_64(0, x1); |
2024 ASSERT_EQUAL_64(1, x2); | 2024 CHECK_EQUAL_64(1, x2); |
2025 ASSERT_EQUAL_64(0, x3); | 2025 CHECK_EQUAL_64(0, x3); |
2026 | 2026 |
2027 TEARDOWN(); | 2027 TEARDOWN(); |
2028 } | 2028 } |
2029 | 2029 |
2030 | 2030 |
2031 TEST(far_branch_backward) { | 2031 TEST(far_branch_backward) { |
2032 INIT_V8(); | 2032 INIT_V8(); |
2033 | 2033 |
2034 // Test that the MacroAssembler correctly resolves backward branches to labels | 2034 // Test that the MacroAssembler correctly resolves backward branches to labels |
2035 // that are outside the immediate range of branch instructions. | 2035 // that are outside the immediate range of branch instructions. |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2088 CHECK_GE(7 * kInstructionSize, __ SizeOfCodeGeneratedSince(&test_tbz)); | 2088 CHECK_GE(7 * kInstructionSize, __ SizeOfCodeGeneratedSince(&test_tbz)); |
2089 | 2089 |
2090 __ Bind(&fail); | 2090 __ Bind(&fail); |
2091 __ Mov(x1, 0); | 2091 __ Mov(x1, 0); |
2092 __ Bind(&done); | 2092 __ Bind(&done); |
2093 | 2093 |
2094 END(); | 2094 END(); |
2095 | 2095 |
2096 RUN(); | 2096 RUN(); |
2097 | 2097 |
2098 ASSERT_EQUAL_64(0x7, x0); | 2098 CHECK_EQUAL_64(0x7, x0); |
2099 ASSERT_EQUAL_64(0x1, x1); | 2099 CHECK_EQUAL_64(0x1, x1); |
2100 | 2100 |
2101 TEARDOWN(); | 2101 TEARDOWN(); |
2102 } | 2102 } |
2103 | 2103 |
2104 | 2104 |
2105 TEST(far_branch_simple_veneer) { | 2105 TEST(far_branch_simple_veneer) { |
2106 INIT_V8(); | 2106 INIT_V8(); |
2107 | 2107 |
2108 // Test that the MacroAssembler correctly emits veneers for forward branches | 2108 // Test that the MacroAssembler correctly emits veneers for forward branches |
2109 // to labels that are outside the immediate range of branch instructions. | 2109 // to labels that are outside the immediate range of branch instructions. |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2158 | 2158 |
2159 __ B(&done); | 2159 __ B(&done); |
2160 __ Bind(&fail); | 2160 __ Bind(&fail); |
2161 __ Mov(x1, 0); | 2161 __ Mov(x1, 0); |
2162 __ Bind(&done); | 2162 __ Bind(&done); |
2163 | 2163 |
2164 END(); | 2164 END(); |
2165 | 2165 |
2166 RUN(); | 2166 RUN(); |
2167 | 2167 |
2168 ASSERT_EQUAL_64(0x7, x0); | 2168 CHECK_EQUAL_64(0x7, x0); |
2169 ASSERT_EQUAL_64(0x1, x1); | 2169 CHECK_EQUAL_64(0x1, x1); |
2170 | 2170 |
2171 TEARDOWN(); | 2171 TEARDOWN(); |
2172 } | 2172 } |
2173 | 2173 |
2174 | 2174 |
2175 TEST(far_branch_veneer_link_chain) { | 2175 TEST(far_branch_veneer_link_chain) { |
2176 INIT_V8(); | 2176 INIT_V8(); |
2177 | 2177 |
2178 // Test that the MacroAssembler correctly emits veneers for forward branches | 2178 // Test that the MacroAssembler correctly emits veneers for forward branches |
2179 // that target out-of-range labels and are part of multiple instructions | 2179 // that target out-of-range labels and are part of multiple instructions |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2253 | 2253 |
2254 __ B(&done); | 2254 __ B(&done); |
2255 __ Bind(&fail); | 2255 __ Bind(&fail); |
2256 __ Mov(x1, 0); | 2256 __ Mov(x1, 0); |
2257 __ Bind(&done); | 2257 __ Bind(&done); |
2258 | 2258 |
2259 END(); | 2259 END(); |
2260 | 2260 |
2261 RUN(); | 2261 RUN(); |
2262 | 2262 |
2263 ASSERT_EQUAL_64(0x7, x0); | 2263 CHECK_EQUAL_64(0x7, x0); |
2264 ASSERT_EQUAL_64(0x1, x1); | 2264 CHECK_EQUAL_64(0x1, x1); |
2265 | 2265 |
2266 TEARDOWN(); | 2266 TEARDOWN(); |
2267 } | 2267 } |
2268 | 2268 |
2269 | 2269 |
2270 TEST(far_branch_veneer_broken_link_chain) { | 2270 TEST(far_branch_veneer_broken_link_chain) { |
2271 INIT_V8(); | 2271 INIT_V8(); |
2272 | 2272 |
2273 // Check that the MacroAssembler correctly handles the situation when removing | 2273 // Check that the MacroAssembler correctly handles the situation when removing |
2274 // a branch from the link chain of a label and the two links on each side of | 2274 // a branch from the link chain of a label and the two links on each side of |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2343 | 2343 |
2344 __ B(&done); | 2344 __ B(&done); |
2345 __ Bind(&fail); | 2345 __ Bind(&fail); |
2346 __ Mov(x1, 0); | 2346 __ Mov(x1, 0); |
2347 __ Bind(&done); | 2347 __ Bind(&done); |
2348 | 2348 |
2349 END(); | 2349 END(); |
2350 | 2350 |
2351 RUN(); | 2351 RUN(); |
2352 | 2352 |
2353 ASSERT_EQUAL_64(0x3, x0); | 2353 CHECK_EQUAL_64(0x3, x0); |
2354 ASSERT_EQUAL_64(0x1, x1); | 2354 CHECK_EQUAL_64(0x1, x1); |
2355 | 2355 |
2356 TEARDOWN(); | 2356 TEARDOWN(); |
2357 } | 2357 } |
2358 | 2358 |
2359 | 2359 |
2360 TEST(branch_type) { | 2360 TEST(branch_type) { |
2361 INIT_V8(); | 2361 INIT_V8(); |
2362 | 2362 |
2363 SETUP(); | 2363 SETUP(); |
2364 | 2364 |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2401 | 2401 |
2402 __ Bind(&fail); | 2402 __ Bind(&fail); |
2403 __ Mov(x0, 0x1); | 2403 __ Mov(x0, 0x1); |
2404 | 2404 |
2405 __ Bind(&done); | 2405 __ Bind(&done); |
2406 | 2406 |
2407 END(); | 2407 END(); |
2408 | 2408 |
2409 RUN(); | 2409 RUN(); |
2410 | 2410 |
2411 ASSERT_EQUAL_64(0x0, x0); | 2411 CHECK_EQUAL_64(0x0, x0); |
2412 | 2412 |
2413 TEARDOWN(); | 2413 TEARDOWN(); |
2414 } | 2414 } |
2415 | 2415 |
2416 | 2416 |
2417 TEST(ldr_str_offset) { | 2417 TEST(ldr_str_offset) { |
2418 INIT_V8(); | 2418 INIT_V8(); |
2419 SETUP(); | 2419 SETUP(); |
2420 | 2420 |
2421 uint64_t src[2] = {0xfedcba9876543210UL, 0x0123456789abcdefUL}; | 2421 uint64_t src[2] = {0xfedcba9876543210UL, 0x0123456789abcdefUL}; |
(...skipping 11 matching lines...) Expand all Loading... |
2433 __ Ldr(x2, MemOperand(x17, 8)); | 2433 __ Ldr(x2, MemOperand(x17, 8)); |
2434 __ Str(x2, MemOperand(x18, 16)); | 2434 __ Str(x2, MemOperand(x18, 16)); |
2435 __ Ldrb(w3, MemOperand(x17, 1)); | 2435 __ Ldrb(w3, MemOperand(x17, 1)); |
2436 __ Strb(w3, MemOperand(x18, 25)); | 2436 __ Strb(w3, MemOperand(x18, 25)); |
2437 __ Ldrh(w4, MemOperand(x17, 2)); | 2437 __ Ldrh(w4, MemOperand(x17, 2)); |
2438 __ Strh(w4, MemOperand(x18, 33)); | 2438 __ Strh(w4, MemOperand(x18, 33)); |
2439 END(); | 2439 END(); |
2440 | 2440 |
2441 RUN(); | 2441 RUN(); |
2442 | 2442 |
2443 ASSERT_EQUAL_64(0x76543210, x0); | 2443 CHECK_EQUAL_64(0x76543210, x0); |
2444 ASSERT_EQUAL_64(0x76543210, dst[0]); | 2444 CHECK_EQUAL_64(0x76543210, dst[0]); |
2445 ASSERT_EQUAL_64(0xfedcba98, x1); | 2445 CHECK_EQUAL_64(0xfedcba98, x1); |
2446 ASSERT_EQUAL_64(0xfedcba9800000000UL, dst[1]); | 2446 CHECK_EQUAL_64(0xfedcba9800000000UL, dst[1]); |
2447 ASSERT_EQUAL_64(0x0123456789abcdefUL, x2); | 2447 CHECK_EQUAL_64(0x0123456789abcdefUL, x2); |
2448 ASSERT_EQUAL_64(0x0123456789abcdefUL, dst[2]); | 2448 CHECK_EQUAL_64(0x0123456789abcdefUL, dst[2]); |
2449 ASSERT_EQUAL_64(0x32, x3); | 2449 CHECK_EQUAL_64(0x32, x3); |
2450 ASSERT_EQUAL_64(0x3200, dst[3]); | 2450 CHECK_EQUAL_64(0x3200, dst[3]); |
2451 ASSERT_EQUAL_64(0x7654, x4); | 2451 CHECK_EQUAL_64(0x7654, x4); |
2452 ASSERT_EQUAL_64(0x765400, dst[4]); | 2452 CHECK_EQUAL_64(0x765400, dst[4]); |
2453 ASSERT_EQUAL_64(src_base, x17); | 2453 CHECK_EQUAL_64(src_base, x17); |
2454 ASSERT_EQUAL_64(dst_base, x18); | 2454 CHECK_EQUAL_64(dst_base, x18); |
2455 | 2455 |
2456 TEARDOWN(); | 2456 TEARDOWN(); |
2457 } | 2457 } |
2458 | 2458 |
2459 | 2459 |
2460 TEST(ldr_str_wide) { | 2460 TEST(ldr_str_wide) { |
2461 INIT_V8(); | 2461 INIT_V8(); |
2462 SETUP(); | 2462 SETUP(); |
2463 | 2463 |
2464 uint32_t src[8192]; | 2464 uint32_t src[8192]; |
(...skipping 17 matching lines...) Expand all Loading... |
2482 __ Ldr(w0, MemOperand(x22, 8191 * sizeof(src[0]))); | 2482 __ Ldr(w0, MemOperand(x22, 8191 * sizeof(src[0]))); |
2483 __ Str(w0, MemOperand(x23, 8191 * sizeof(dst[0]))); | 2483 __ Str(w0, MemOperand(x23, 8191 * sizeof(dst[0]))); |
2484 __ Ldr(w1, MemOperand(x24, 4096 * sizeof(src[0]), PostIndex)); | 2484 __ Ldr(w1, MemOperand(x24, 4096 * sizeof(src[0]), PostIndex)); |
2485 __ Str(w1, MemOperand(x25, 4096 * sizeof(dst[0]), PostIndex)); | 2485 __ Str(w1, MemOperand(x25, 4096 * sizeof(dst[0]), PostIndex)); |
2486 __ Ldr(w2, MemOperand(x26, 6144 * sizeof(src[0]), PreIndex)); | 2486 __ Ldr(w2, MemOperand(x26, 6144 * sizeof(src[0]), PreIndex)); |
2487 __ Str(w2, MemOperand(x27, 6144 * sizeof(dst[0]), PreIndex)); | 2487 __ Str(w2, MemOperand(x27, 6144 * sizeof(dst[0]), PreIndex)); |
2488 END(); | 2488 END(); |
2489 | 2489 |
2490 RUN(); | 2490 RUN(); |
2491 | 2491 |
2492 ASSERT_EQUAL_32(8191, w0); | 2492 CHECK_EQUAL_32(8191, w0); |
2493 ASSERT_EQUAL_32(8191, dst[8191]); | 2493 CHECK_EQUAL_32(8191, dst[8191]); |
2494 ASSERT_EQUAL_64(src_base, x22); | 2494 CHECK_EQUAL_64(src_base, x22); |
2495 ASSERT_EQUAL_64(dst_base, x23); | 2495 CHECK_EQUAL_64(dst_base, x23); |
2496 ASSERT_EQUAL_32(0, w1); | 2496 CHECK_EQUAL_32(0, w1); |
2497 ASSERT_EQUAL_32(0, dst[0]); | 2497 CHECK_EQUAL_32(0, dst[0]); |
2498 ASSERT_EQUAL_64(src_base + 4096 * sizeof(src[0]), x24); | 2498 CHECK_EQUAL_64(src_base + 4096 * sizeof(src[0]), x24); |
2499 ASSERT_EQUAL_64(dst_base + 4096 * sizeof(dst[0]), x25); | 2499 CHECK_EQUAL_64(dst_base + 4096 * sizeof(dst[0]), x25); |
2500 ASSERT_EQUAL_32(6144, w2); | 2500 CHECK_EQUAL_32(6144, w2); |
2501 ASSERT_EQUAL_32(6144, dst[6144]); | 2501 CHECK_EQUAL_32(6144, dst[6144]); |
2502 ASSERT_EQUAL_64(src_base + 6144 * sizeof(src[0]), x26); | 2502 CHECK_EQUAL_64(src_base + 6144 * sizeof(src[0]), x26); |
2503 ASSERT_EQUAL_64(dst_base + 6144 * sizeof(dst[0]), x27); | 2503 CHECK_EQUAL_64(dst_base + 6144 * sizeof(dst[0]), x27); |
2504 | 2504 |
2505 TEARDOWN(); | 2505 TEARDOWN(); |
2506 } | 2506 } |
2507 | 2507 |
2508 | 2508 |
2509 TEST(ldr_str_preindex) { | 2509 TEST(ldr_str_preindex) { |
2510 INIT_V8(); | 2510 INIT_V8(); |
2511 SETUP(); | 2511 SETUP(); |
2512 | 2512 |
2513 uint64_t src[2] = {0xfedcba9876543210UL, 0x0123456789abcdefUL}; | 2513 uint64_t src[2] = {0xfedcba9876543210UL, 0x0123456789abcdefUL}; |
(...skipping 19 matching lines...) Expand all Loading... |
2533 __ Ldr(w2, MemOperand(x21, -4, PreIndex)); | 2533 __ Ldr(w2, MemOperand(x21, -4, PreIndex)); |
2534 __ Str(w2, MemOperand(x22, -4, PreIndex)); | 2534 __ Str(w2, MemOperand(x22, -4, PreIndex)); |
2535 __ Ldrb(w3, MemOperand(x23, 1, PreIndex)); | 2535 __ Ldrb(w3, MemOperand(x23, 1, PreIndex)); |
2536 __ Strb(w3, MemOperand(x24, 25, PreIndex)); | 2536 __ Strb(w3, MemOperand(x24, 25, PreIndex)); |
2537 __ Ldrh(w4, MemOperand(x25, 3, PreIndex)); | 2537 __ Ldrh(w4, MemOperand(x25, 3, PreIndex)); |
2538 __ Strh(w4, MemOperand(x26, 41, PreIndex)); | 2538 __ Strh(w4, MemOperand(x26, 41, PreIndex)); |
2539 END(); | 2539 END(); |
2540 | 2540 |
2541 RUN(); | 2541 RUN(); |
2542 | 2542 |
2543 ASSERT_EQUAL_64(0xfedcba98, x0); | 2543 CHECK_EQUAL_64(0xfedcba98, x0); |
2544 ASSERT_EQUAL_64(0xfedcba9800000000UL, dst[1]); | 2544 CHECK_EQUAL_64(0xfedcba9800000000UL, dst[1]); |
2545 ASSERT_EQUAL_64(0x0123456789abcdefUL, x1); | 2545 CHECK_EQUAL_64(0x0123456789abcdefUL, x1); |
2546 ASSERT_EQUAL_64(0x0123456789abcdefUL, dst[2]); | 2546 CHECK_EQUAL_64(0x0123456789abcdefUL, dst[2]); |
2547 ASSERT_EQUAL_64(0x01234567, x2); | 2547 CHECK_EQUAL_64(0x01234567, x2); |
2548 ASSERT_EQUAL_64(0x0123456700000000UL, dst[4]); | 2548 CHECK_EQUAL_64(0x0123456700000000UL, dst[4]); |
2549 ASSERT_EQUAL_64(0x32, x3); | 2549 CHECK_EQUAL_64(0x32, x3); |
2550 ASSERT_EQUAL_64(0x3200, dst[3]); | 2550 CHECK_EQUAL_64(0x3200, dst[3]); |
2551 ASSERT_EQUAL_64(0x9876, x4); | 2551 CHECK_EQUAL_64(0x9876, x4); |
2552 ASSERT_EQUAL_64(0x987600, dst[5]); | 2552 CHECK_EQUAL_64(0x987600, dst[5]); |
2553 ASSERT_EQUAL_64(src_base + 4, x17); | 2553 CHECK_EQUAL_64(src_base + 4, x17); |
2554 ASSERT_EQUAL_64(dst_base + 12, x18); | 2554 CHECK_EQUAL_64(dst_base + 12, x18); |
2555 ASSERT_EQUAL_64(src_base + 8, x19); | 2555 CHECK_EQUAL_64(src_base + 8, x19); |
2556 ASSERT_EQUAL_64(dst_base + 16, x20); | 2556 CHECK_EQUAL_64(dst_base + 16, x20); |
2557 ASSERT_EQUAL_64(src_base + 12, x21); | 2557 CHECK_EQUAL_64(src_base + 12, x21); |
2558 ASSERT_EQUAL_64(dst_base + 36, x22); | 2558 CHECK_EQUAL_64(dst_base + 36, x22); |
2559 ASSERT_EQUAL_64(src_base + 1, x23); | 2559 CHECK_EQUAL_64(src_base + 1, x23); |
2560 ASSERT_EQUAL_64(dst_base + 25, x24); | 2560 CHECK_EQUAL_64(dst_base + 25, x24); |
2561 ASSERT_EQUAL_64(src_base + 3, x25); | 2561 CHECK_EQUAL_64(src_base + 3, x25); |
2562 ASSERT_EQUAL_64(dst_base + 41, x26); | 2562 CHECK_EQUAL_64(dst_base + 41, x26); |
2563 | 2563 |
2564 TEARDOWN(); | 2564 TEARDOWN(); |
2565 } | 2565 } |
2566 | 2566 |
2567 | 2567 |
2568 TEST(ldr_str_postindex) { | 2568 TEST(ldr_str_postindex) { |
2569 INIT_V8(); | 2569 INIT_V8(); |
2570 SETUP(); | 2570 SETUP(); |
2571 | 2571 |
2572 uint64_t src[2] = {0xfedcba9876543210UL, 0x0123456789abcdefUL}; | 2572 uint64_t src[2] = {0xfedcba9876543210UL, 0x0123456789abcdefUL}; |
(...skipping 19 matching lines...) Expand all Loading... |
2592 __ Ldr(x2, MemOperand(x21, -8, PostIndex)); | 2592 __ Ldr(x2, MemOperand(x21, -8, PostIndex)); |
2593 __ Str(x2, MemOperand(x22, -32, PostIndex)); | 2593 __ Str(x2, MemOperand(x22, -32, PostIndex)); |
2594 __ Ldrb(w3, MemOperand(x23, 1, PostIndex)); | 2594 __ Ldrb(w3, MemOperand(x23, 1, PostIndex)); |
2595 __ Strb(w3, MemOperand(x24, 5, PostIndex)); | 2595 __ Strb(w3, MemOperand(x24, 5, PostIndex)); |
2596 __ Ldrh(w4, MemOperand(x25, -3, PostIndex)); | 2596 __ Ldrh(w4, MemOperand(x25, -3, PostIndex)); |
2597 __ Strh(w4, MemOperand(x26, -41, PostIndex)); | 2597 __ Strh(w4, MemOperand(x26, -41, PostIndex)); |
2598 END(); | 2598 END(); |
2599 | 2599 |
2600 RUN(); | 2600 RUN(); |
2601 | 2601 |
2602 ASSERT_EQUAL_64(0xfedcba98, x0); | 2602 CHECK_EQUAL_64(0xfedcba98, x0); |
2603 ASSERT_EQUAL_64(0xfedcba9800000000UL, dst[1]); | 2603 CHECK_EQUAL_64(0xfedcba9800000000UL, dst[1]); |
2604 ASSERT_EQUAL_64(0x0123456789abcdefUL, x1); | 2604 CHECK_EQUAL_64(0x0123456789abcdefUL, x1); |
2605 ASSERT_EQUAL_64(0x0123456789abcdefUL, dst[2]); | 2605 CHECK_EQUAL_64(0x0123456789abcdefUL, dst[2]); |
2606 ASSERT_EQUAL_64(0x0123456789abcdefUL, x2); | 2606 CHECK_EQUAL_64(0x0123456789abcdefUL, x2); |
2607 ASSERT_EQUAL_64(0x0123456789abcdefUL, dst[4]); | 2607 CHECK_EQUAL_64(0x0123456789abcdefUL, dst[4]); |
2608 ASSERT_EQUAL_64(0x32, x3); | 2608 CHECK_EQUAL_64(0x32, x3); |
2609 ASSERT_EQUAL_64(0x3200, dst[3]); | 2609 CHECK_EQUAL_64(0x3200, dst[3]); |
2610 ASSERT_EQUAL_64(0x9876, x4); | 2610 CHECK_EQUAL_64(0x9876, x4); |
2611 ASSERT_EQUAL_64(0x987600, dst[5]); | 2611 CHECK_EQUAL_64(0x987600, dst[5]); |
2612 ASSERT_EQUAL_64(src_base + 8, x17); | 2612 CHECK_EQUAL_64(src_base + 8, x17); |
2613 ASSERT_EQUAL_64(dst_base + 24, x18); | 2613 CHECK_EQUAL_64(dst_base + 24, x18); |
2614 ASSERT_EQUAL_64(src_base + 16, x19); | 2614 CHECK_EQUAL_64(src_base + 16, x19); |
2615 ASSERT_EQUAL_64(dst_base + 32, x20); | 2615 CHECK_EQUAL_64(dst_base + 32, x20); |
2616 ASSERT_EQUAL_64(src_base, x21); | 2616 CHECK_EQUAL_64(src_base, x21); |
2617 ASSERT_EQUAL_64(dst_base, x22); | 2617 CHECK_EQUAL_64(dst_base, x22); |
2618 ASSERT_EQUAL_64(src_base + 2, x23); | 2618 CHECK_EQUAL_64(src_base + 2, x23); |
2619 ASSERT_EQUAL_64(dst_base + 30, x24); | 2619 CHECK_EQUAL_64(dst_base + 30, x24); |
2620 ASSERT_EQUAL_64(src_base, x25); | 2620 CHECK_EQUAL_64(src_base, x25); |
2621 ASSERT_EQUAL_64(dst_base, x26); | 2621 CHECK_EQUAL_64(dst_base, x26); |
2622 | 2622 |
2623 TEARDOWN(); | 2623 TEARDOWN(); |
2624 } | 2624 } |
2625 | 2625 |
2626 | 2626 |
2627 TEST(load_signed) { | 2627 TEST(load_signed) { |
2628 INIT_V8(); | 2628 INIT_V8(); |
2629 SETUP(); | 2629 SETUP(); |
2630 | 2630 |
2631 uint32_t src[2] = {0x80008080, 0x7fff7f7f}; | 2631 uint32_t src[2] = {0x80008080, 0x7fff7f7f}; |
2632 uintptr_t src_base = reinterpret_cast<uintptr_t>(src); | 2632 uintptr_t src_base = reinterpret_cast<uintptr_t>(src); |
2633 | 2633 |
2634 START(); | 2634 START(); |
2635 __ Mov(x24, src_base); | 2635 __ Mov(x24, src_base); |
2636 __ Ldrsb(w0, MemOperand(x24)); | 2636 __ Ldrsb(w0, MemOperand(x24)); |
2637 __ Ldrsb(w1, MemOperand(x24, 4)); | 2637 __ Ldrsb(w1, MemOperand(x24, 4)); |
2638 __ Ldrsh(w2, MemOperand(x24)); | 2638 __ Ldrsh(w2, MemOperand(x24)); |
2639 __ Ldrsh(w3, MemOperand(x24, 4)); | 2639 __ Ldrsh(w3, MemOperand(x24, 4)); |
2640 __ Ldrsb(x4, MemOperand(x24)); | 2640 __ Ldrsb(x4, MemOperand(x24)); |
2641 __ Ldrsb(x5, MemOperand(x24, 4)); | 2641 __ Ldrsb(x5, MemOperand(x24, 4)); |
2642 __ Ldrsh(x6, MemOperand(x24)); | 2642 __ Ldrsh(x6, MemOperand(x24)); |
2643 __ Ldrsh(x7, MemOperand(x24, 4)); | 2643 __ Ldrsh(x7, MemOperand(x24, 4)); |
2644 __ Ldrsw(x8, MemOperand(x24)); | 2644 __ Ldrsw(x8, MemOperand(x24)); |
2645 __ Ldrsw(x9, MemOperand(x24, 4)); | 2645 __ Ldrsw(x9, MemOperand(x24, 4)); |
2646 END(); | 2646 END(); |
2647 | 2647 |
2648 RUN(); | 2648 RUN(); |
2649 | 2649 |
2650 ASSERT_EQUAL_64(0xffffff80, x0); | 2650 CHECK_EQUAL_64(0xffffff80, x0); |
2651 ASSERT_EQUAL_64(0x0000007f, x1); | 2651 CHECK_EQUAL_64(0x0000007f, x1); |
2652 ASSERT_EQUAL_64(0xffff8080, x2); | 2652 CHECK_EQUAL_64(0xffff8080, x2); |
2653 ASSERT_EQUAL_64(0x00007f7f, x3); | 2653 CHECK_EQUAL_64(0x00007f7f, x3); |
2654 ASSERT_EQUAL_64(0xffffffffffffff80UL, x4); | 2654 CHECK_EQUAL_64(0xffffffffffffff80UL, x4); |
2655 ASSERT_EQUAL_64(0x000000000000007fUL, x5); | 2655 CHECK_EQUAL_64(0x000000000000007fUL, x5); |
2656 ASSERT_EQUAL_64(0xffffffffffff8080UL, x6); | 2656 CHECK_EQUAL_64(0xffffffffffff8080UL, x6); |
2657 ASSERT_EQUAL_64(0x0000000000007f7fUL, x7); | 2657 CHECK_EQUAL_64(0x0000000000007f7fUL, x7); |
2658 ASSERT_EQUAL_64(0xffffffff80008080UL, x8); | 2658 CHECK_EQUAL_64(0xffffffff80008080UL, x8); |
2659 ASSERT_EQUAL_64(0x000000007fff7f7fUL, x9); | 2659 CHECK_EQUAL_64(0x000000007fff7f7fUL, x9); |
2660 | 2660 |
2661 TEARDOWN(); | 2661 TEARDOWN(); |
2662 } | 2662 } |
2663 | 2663 |
2664 | 2664 |
2665 TEST(load_store_regoffset) { | 2665 TEST(load_store_regoffset) { |
2666 INIT_V8(); | 2666 INIT_V8(); |
2667 SETUP(); | 2667 SETUP(); |
2668 | 2668 |
2669 uint32_t src[3] = {1, 2, 3}; | 2669 uint32_t src[3] = {1, 2, 3}; |
(...skipping 19 matching lines...) Expand all Loading... |
2689 __ Ldr(w2, MemOperand(x18, x26)); | 2689 __ Ldr(w2, MemOperand(x18, x26)); |
2690 __ Ldr(w3, MemOperand(x18, x27, SXTW)); | 2690 __ Ldr(w3, MemOperand(x18, x27, SXTW)); |
2691 __ Ldr(w4, MemOperand(x18, x28, SXTW, 2)); | 2691 __ Ldr(w4, MemOperand(x18, x28, SXTW, 2)); |
2692 __ Str(w0, MemOperand(x17, x24)); | 2692 __ Str(w0, MemOperand(x17, x24)); |
2693 __ Str(x1, MemOperand(x17, x25)); | 2693 __ Str(x1, MemOperand(x17, x25)); |
2694 __ Str(w2, MemOperand(x20, x29, SXTW, 2)); | 2694 __ Str(w2, MemOperand(x20, x29, SXTW, 2)); |
2695 END(); | 2695 END(); |
2696 | 2696 |
2697 RUN(); | 2697 RUN(); |
2698 | 2698 |
2699 ASSERT_EQUAL_64(1, x0); | 2699 CHECK_EQUAL_64(1, x0); |
2700 ASSERT_EQUAL_64(0x0000000300000002UL, x1); | 2700 CHECK_EQUAL_64(0x0000000300000002UL, x1); |
2701 ASSERT_EQUAL_64(3, x2); | 2701 CHECK_EQUAL_64(3, x2); |
2702 ASSERT_EQUAL_64(3, x3); | 2702 CHECK_EQUAL_64(3, x3); |
2703 ASSERT_EQUAL_64(2, x4); | 2703 CHECK_EQUAL_64(2, x4); |
2704 ASSERT_EQUAL_32(1, dst[0]); | 2704 CHECK_EQUAL_32(1, dst[0]); |
2705 ASSERT_EQUAL_32(2, dst[1]); | 2705 CHECK_EQUAL_32(2, dst[1]); |
2706 ASSERT_EQUAL_32(3, dst[2]); | 2706 CHECK_EQUAL_32(3, dst[2]); |
2707 ASSERT_EQUAL_32(3, dst[3]); | 2707 CHECK_EQUAL_32(3, dst[3]); |
2708 | 2708 |
2709 TEARDOWN(); | 2709 TEARDOWN(); |
2710 } | 2710 } |
2711 | 2711 |
2712 | 2712 |
2713 TEST(load_store_float) { | 2713 TEST(load_store_float) { |
2714 INIT_V8(); | 2714 INIT_V8(); |
2715 SETUP(); | 2715 SETUP(); |
2716 | 2716 |
2717 float src[3] = {1.0, 2.0, 3.0}; | 2717 float src[3] = {1.0, 2.0, 3.0}; |
(...skipping 11 matching lines...) Expand all Loading... |
2729 __ Ldr(s0, MemOperand(x17, sizeof(src[0]))); | 2729 __ Ldr(s0, MemOperand(x17, sizeof(src[0]))); |
2730 __ Str(s0, MemOperand(x18, sizeof(dst[0]), PostIndex)); | 2730 __ Str(s0, MemOperand(x18, sizeof(dst[0]), PostIndex)); |
2731 __ Ldr(s1, MemOperand(x19, sizeof(src[0]), PostIndex)); | 2731 __ Ldr(s1, MemOperand(x19, sizeof(src[0]), PostIndex)); |
2732 __ Str(s1, MemOperand(x20, 2 * sizeof(dst[0]), PreIndex)); | 2732 __ Str(s1, MemOperand(x20, 2 * sizeof(dst[0]), PreIndex)); |
2733 __ Ldr(s2, MemOperand(x21, 2 * sizeof(src[0]), PreIndex)); | 2733 __ Ldr(s2, MemOperand(x21, 2 * sizeof(src[0]), PreIndex)); |
2734 __ Str(s2, MemOperand(x22, sizeof(dst[0]))); | 2734 __ Str(s2, MemOperand(x22, sizeof(dst[0]))); |
2735 END(); | 2735 END(); |
2736 | 2736 |
2737 RUN(); | 2737 RUN(); |
2738 | 2738 |
2739 ASSERT_EQUAL_FP32(2.0, s0); | 2739 CHECK_EQUAL_FP32(2.0, s0); |
2740 ASSERT_EQUAL_FP32(2.0, dst[0]); | 2740 CHECK_EQUAL_FP32(2.0, dst[0]); |
2741 ASSERT_EQUAL_FP32(1.0, s1); | 2741 CHECK_EQUAL_FP32(1.0, s1); |
2742 ASSERT_EQUAL_FP32(1.0, dst[2]); | 2742 CHECK_EQUAL_FP32(1.0, dst[2]); |
2743 ASSERT_EQUAL_FP32(3.0, s2); | 2743 CHECK_EQUAL_FP32(3.0, s2); |
2744 ASSERT_EQUAL_FP32(3.0, dst[1]); | 2744 CHECK_EQUAL_FP32(3.0, dst[1]); |
2745 ASSERT_EQUAL_64(src_base, x17); | 2745 CHECK_EQUAL_64(src_base, x17); |
2746 ASSERT_EQUAL_64(dst_base + sizeof(dst[0]), x18); | 2746 CHECK_EQUAL_64(dst_base + sizeof(dst[0]), x18); |
2747 ASSERT_EQUAL_64(src_base + sizeof(src[0]), x19); | 2747 CHECK_EQUAL_64(src_base + sizeof(src[0]), x19); |
2748 ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20); | 2748 CHECK_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20); |
2749 ASSERT_EQUAL_64(src_base + 2 * sizeof(src[0]), x21); | 2749 CHECK_EQUAL_64(src_base + 2 * sizeof(src[0]), x21); |
2750 ASSERT_EQUAL_64(dst_base, x22); | 2750 CHECK_EQUAL_64(dst_base, x22); |
2751 | 2751 |
2752 TEARDOWN(); | 2752 TEARDOWN(); |
2753 } | 2753 } |
2754 | 2754 |
2755 | 2755 |
2756 TEST(load_store_double) { | 2756 TEST(load_store_double) { |
2757 INIT_V8(); | 2757 INIT_V8(); |
2758 SETUP(); | 2758 SETUP(); |
2759 | 2759 |
2760 double src[3] = {1.0, 2.0, 3.0}; | 2760 double src[3] = {1.0, 2.0, 3.0}; |
(...skipping 11 matching lines...) Expand all Loading... |
2772 __ Ldr(d0, MemOperand(x17, sizeof(src[0]))); | 2772 __ Ldr(d0, MemOperand(x17, sizeof(src[0]))); |
2773 __ Str(d0, MemOperand(x18, sizeof(dst[0]), PostIndex)); | 2773 __ Str(d0, MemOperand(x18, sizeof(dst[0]), PostIndex)); |
2774 __ Ldr(d1, MemOperand(x19, sizeof(src[0]), PostIndex)); | 2774 __ Ldr(d1, MemOperand(x19, sizeof(src[0]), PostIndex)); |
2775 __ Str(d1, MemOperand(x20, 2 * sizeof(dst[0]), PreIndex)); | 2775 __ Str(d1, MemOperand(x20, 2 * sizeof(dst[0]), PreIndex)); |
2776 __ Ldr(d2, MemOperand(x21, 2 * sizeof(src[0]), PreIndex)); | 2776 __ Ldr(d2, MemOperand(x21, 2 * sizeof(src[0]), PreIndex)); |
2777 __ Str(d2, MemOperand(x22, sizeof(dst[0]))); | 2777 __ Str(d2, MemOperand(x22, sizeof(dst[0]))); |
2778 END(); | 2778 END(); |
2779 | 2779 |
2780 RUN(); | 2780 RUN(); |
2781 | 2781 |
2782 ASSERT_EQUAL_FP64(2.0, d0); | 2782 CHECK_EQUAL_FP64(2.0, d0); |
2783 ASSERT_EQUAL_FP64(2.0, dst[0]); | 2783 CHECK_EQUAL_FP64(2.0, dst[0]); |
2784 ASSERT_EQUAL_FP64(1.0, d1); | 2784 CHECK_EQUAL_FP64(1.0, d1); |
2785 ASSERT_EQUAL_FP64(1.0, dst[2]); | 2785 CHECK_EQUAL_FP64(1.0, dst[2]); |
2786 ASSERT_EQUAL_FP64(3.0, d2); | 2786 CHECK_EQUAL_FP64(3.0, d2); |
2787 ASSERT_EQUAL_FP64(3.0, dst[1]); | 2787 CHECK_EQUAL_FP64(3.0, dst[1]); |
2788 ASSERT_EQUAL_64(src_base, x17); | 2788 CHECK_EQUAL_64(src_base, x17); |
2789 ASSERT_EQUAL_64(dst_base + sizeof(dst[0]), x18); | 2789 CHECK_EQUAL_64(dst_base + sizeof(dst[0]), x18); |
2790 ASSERT_EQUAL_64(src_base + sizeof(src[0]), x19); | 2790 CHECK_EQUAL_64(src_base + sizeof(src[0]), x19); |
2791 ASSERT_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20); | 2791 CHECK_EQUAL_64(dst_base + 2 * sizeof(dst[0]), x20); |
2792 ASSERT_EQUAL_64(src_base + 2 * sizeof(src[0]), x21); | 2792 CHECK_EQUAL_64(src_base + 2 * sizeof(src[0]), x21); |
2793 ASSERT_EQUAL_64(dst_base, x22); | 2793 CHECK_EQUAL_64(dst_base, x22); |
2794 | 2794 |
2795 TEARDOWN(); | 2795 TEARDOWN(); |
2796 } | 2796 } |
2797 | 2797 |
2798 | 2798 |
2799 TEST(ldp_stp_float) { | 2799 TEST(ldp_stp_float) { |
2800 INIT_V8(); | 2800 INIT_V8(); |
2801 SETUP(); | 2801 SETUP(); |
2802 | 2802 |
2803 float src[2] = {1.0, 2.0}; | 2803 float src[2] = {1.0, 2.0}; |
2804 float dst[3] = {0.0, 0.0, 0.0}; | 2804 float dst[3] = {0.0, 0.0, 0.0}; |
2805 uintptr_t src_base = reinterpret_cast<uintptr_t>(src); | 2805 uintptr_t src_base = reinterpret_cast<uintptr_t>(src); |
2806 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); | 2806 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); |
2807 | 2807 |
2808 START(); | 2808 START(); |
2809 __ Mov(x16, src_base); | 2809 __ Mov(x16, src_base); |
2810 __ Mov(x17, dst_base); | 2810 __ Mov(x17, dst_base); |
2811 __ Ldp(s31, s0, MemOperand(x16, 2 * sizeof(src[0]), PostIndex)); | 2811 __ Ldp(s31, s0, MemOperand(x16, 2 * sizeof(src[0]), PostIndex)); |
2812 __ Stp(s0, s31, MemOperand(x17, sizeof(dst[1]), PreIndex)); | 2812 __ Stp(s0, s31, MemOperand(x17, sizeof(dst[1]), PreIndex)); |
2813 END(); | 2813 END(); |
2814 | 2814 |
2815 RUN(); | 2815 RUN(); |
2816 | 2816 |
2817 ASSERT_EQUAL_FP32(1.0, s31); | 2817 CHECK_EQUAL_FP32(1.0, s31); |
2818 ASSERT_EQUAL_FP32(2.0, s0); | 2818 CHECK_EQUAL_FP32(2.0, s0); |
2819 ASSERT_EQUAL_FP32(0.0, dst[0]); | 2819 CHECK_EQUAL_FP32(0.0, dst[0]); |
2820 ASSERT_EQUAL_FP32(2.0, dst[1]); | 2820 CHECK_EQUAL_FP32(2.0, dst[1]); |
2821 ASSERT_EQUAL_FP32(1.0, dst[2]); | 2821 CHECK_EQUAL_FP32(1.0, dst[2]); |
2822 ASSERT_EQUAL_64(src_base + 2 * sizeof(src[0]), x16); | 2822 CHECK_EQUAL_64(src_base + 2 * sizeof(src[0]), x16); |
2823 ASSERT_EQUAL_64(dst_base + sizeof(dst[1]), x17); | 2823 CHECK_EQUAL_64(dst_base + sizeof(dst[1]), x17); |
2824 | 2824 |
2825 TEARDOWN(); | 2825 TEARDOWN(); |
2826 } | 2826 } |
2827 | 2827 |
2828 | 2828 |
2829 TEST(ldp_stp_double) { | 2829 TEST(ldp_stp_double) { |
2830 INIT_V8(); | 2830 INIT_V8(); |
2831 SETUP(); | 2831 SETUP(); |
2832 | 2832 |
2833 double src[2] = {1.0, 2.0}; | 2833 double src[2] = {1.0, 2.0}; |
2834 double dst[3] = {0.0, 0.0, 0.0}; | 2834 double dst[3] = {0.0, 0.0, 0.0}; |
2835 uintptr_t src_base = reinterpret_cast<uintptr_t>(src); | 2835 uintptr_t src_base = reinterpret_cast<uintptr_t>(src); |
2836 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); | 2836 uintptr_t dst_base = reinterpret_cast<uintptr_t>(dst); |
2837 | 2837 |
2838 START(); | 2838 START(); |
2839 __ Mov(x16, src_base); | 2839 __ Mov(x16, src_base); |
2840 __ Mov(x17, dst_base); | 2840 __ Mov(x17, dst_base); |
2841 __ Ldp(d31, d0, MemOperand(x16, 2 * sizeof(src[0]), PostIndex)); | 2841 __ Ldp(d31, d0, MemOperand(x16, 2 * sizeof(src[0]), PostIndex)); |
2842 __ Stp(d0, d31, MemOperand(x17, sizeof(dst[1]), PreIndex)); | 2842 __ Stp(d0, d31, MemOperand(x17, sizeof(dst[1]), PreIndex)); |
2843 END(); | 2843 END(); |
2844 | 2844 |
2845 RUN(); | 2845 RUN(); |
2846 | 2846 |
2847 ASSERT_EQUAL_FP64(1.0, d31); | 2847 CHECK_EQUAL_FP64(1.0, d31); |
2848 ASSERT_EQUAL_FP64(2.0, d0); | 2848 CHECK_EQUAL_FP64(2.0, d0); |
2849 ASSERT_EQUAL_FP64(0.0, dst[0]); | 2849 CHECK_EQUAL_FP64(0.0, dst[0]); |
2850 ASSERT_EQUAL_FP64(2.0, dst[1]); | 2850 CHECK_EQUAL_FP64(2.0, dst[1]); |
2851 ASSERT_EQUAL_FP64(1.0, dst[2]); | 2851 CHECK_EQUAL_FP64(1.0, dst[2]); |
2852 ASSERT_EQUAL_64(src_base + 2 * sizeof(src[0]), x16); | 2852 CHECK_EQUAL_64(src_base + 2 * sizeof(src[0]), x16); |
2853 ASSERT_EQUAL_64(dst_base + sizeof(dst[1]), x17); | 2853 CHECK_EQUAL_64(dst_base + sizeof(dst[1]), x17); |
2854 | 2854 |
2855 TEARDOWN(); | 2855 TEARDOWN(); |
2856 } | 2856 } |
2857 | 2857 |
2858 | 2858 |
2859 TEST(ldp_stp_offset) { | 2859 TEST(ldp_stp_offset) { |
2860 INIT_V8(); | 2860 INIT_V8(); |
2861 SETUP(); | 2861 SETUP(); |
2862 | 2862 |
2863 uint64_t src[3] = {0x0011223344556677UL, 0x8899aabbccddeeffUL, | 2863 uint64_t src[3] = {0x0011223344556677UL, 0x8899aabbccddeeffUL, |
(...skipping 14 matching lines...) Expand all Loading... |
2878 __ Ldp(x8, x9, MemOperand(x18, -16)); | 2878 __ Ldp(x8, x9, MemOperand(x18, -16)); |
2879 __ Stp(w0, w1, MemOperand(x17)); | 2879 __ Stp(w0, w1, MemOperand(x17)); |
2880 __ Stp(w2, w3, MemOperand(x17, 8)); | 2880 __ Stp(w2, w3, MemOperand(x17, 8)); |
2881 __ Stp(x4, x5, MemOperand(x17, 16)); | 2881 __ Stp(x4, x5, MemOperand(x17, 16)); |
2882 __ Stp(w6, w7, MemOperand(x19, -24)); | 2882 __ Stp(w6, w7, MemOperand(x19, -24)); |
2883 __ Stp(x8, x9, MemOperand(x19, -16)); | 2883 __ Stp(x8, x9, MemOperand(x19, -16)); |
2884 END(); | 2884 END(); |
2885 | 2885 |
2886 RUN(); | 2886 RUN(); |
2887 | 2887 |
2888 ASSERT_EQUAL_64(0x44556677, x0); | 2888 CHECK_EQUAL_64(0x44556677, x0); |
2889 ASSERT_EQUAL_64(0x00112233, x1); | 2889 CHECK_EQUAL_64(0x00112233, x1); |
2890 ASSERT_EQUAL_64(0x0011223344556677UL, dst[0]); | 2890 CHECK_EQUAL_64(0x0011223344556677UL, dst[0]); |
2891 ASSERT_EQUAL_64(0x00112233, x2); | 2891 CHECK_EQUAL_64(0x00112233, x2); |
2892 ASSERT_EQUAL_64(0xccddeeff, x3); | 2892 CHECK_EQUAL_64(0xccddeeff, x3); |
2893 ASSERT_EQUAL_64(0xccddeeff00112233UL, dst[1]); | 2893 CHECK_EQUAL_64(0xccddeeff00112233UL, dst[1]); |
2894 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, x4); | 2894 CHECK_EQUAL_64(0x8899aabbccddeeffUL, x4); |
2895 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, dst[2]); | 2895 CHECK_EQUAL_64(0x8899aabbccddeeffUL, dst[2]); |
2896 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, x5); | 2896 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, x5); |
2897 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, dst[3]); | 2897 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, dst[3]); |
2898 ASSERT_EQUAL_64(0x8899aabb, x6); | 2898 CHECK_EQUAL_64(0x8899aabb, x6); |
2899 ASSERT_EQUAL_64(0xbbaa9988, x7); | 2899 CHECK_EQUAL_64(0xbbaa9988, x7); |
2900 ASSERT_EQUAL_64(0xbbaa99888899aabbUL, dst[4]); | 2900 CHECK_EQUAL_64(0xbbaa99888899aabbUL, dst[4]); |
2901 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, x8); | 2901 CHECK_EQUAL_64(0x8899aabbccddeeffUL, x8); |
2902 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, dst[5]); | 2902 CHECK_EQUAL_64(0x8899aabbccddeeffUL, dst[5]); |
2903 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, x9); | 2903 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, x9); |
2904 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, dst[6]); | 2904 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, dst[6]); |
2905 ASSERT_EQUAL_64(src_base, x16); | 2905 CHECK_EQUAL_64(src_base, x16); |
2906 ASSERT_EQUAL_64(dst_base, x17); | 2906 CHECK_EQUAL_64(dst_base, x17); |
2907 ASSERT_EQUAL_64(src_base + 24, x18); | 2907 CHECK_EQUAL_64(src_base + 24, x18); |
2908 ASSERT_EQUAL_64(dst_base + 56, x19); | 2908 CHECK_EQUAL_64(dst_base + 56, x19); |
2909 | 2909 |
2910 TEARDOWN(); | 2910 TEARDOWN(); |
2911 } | 2911 } |
2912 | 2912 |
2913 | 2913 |
2914 TEST(ldnp_stnp_offset) { | 2914 TEST(ldnp_stnp_offset) { |
2915 INIT_V8(); | 2915 INIT_V8(); |
2916 SETUP(); | 2916 SETUP(); |
2917 | 2917 |
2918 uint64_t src[3] = {0x0011223344556677UL, 0x8899aabbccddeeffUL, | 2918 uint64_t src[3] = {0x0011223344556677UL, 0x8899aabbccddeeffUL, |
(...skipping 14 matching lines...) Expand all Loading... |
2933 __ Ldnp(x8, x9, MemOperand(x18, -16)); | 2933 __ Ldnp(x8, x9, MemOperand(x18, -16)); |
2934 __ Stnp(w0, w1, MemOperand(x17)); | 2934 __ Stnp(w0, w1, MemOperand(x17)); |
2935 __ Stnp(w2, w3, MemOperand(x17, 8)); | 2935 __ Stnp(w2, w3, MemOperand(x17, 8)); |
2936 __ Stnp(x4, x5, MemOperand(x17, 16)); | 2936 __ Stnp(x4, x5, MemOperand(x17, 16)); |
2937 __ Stnp(w6, w7, MemOperand(x19, -24)); | 2937 __ Stnp(w6, w7, MemOperand(x19, -24)); |
2938 __ Stnp(x8, x9, MemOperand(x19, -16)); | 2938 __ Stnp(x8, x9, MemOperand(x19, -16)); |
2939 END(); | 2939 END(); |
2940 | 2940 |
2941 RUN(); | 2941 RUN(); |
2942 | 2942 |
2943 ASSERT_EQUAL_64(0x44556677, x0); | 2943 CHECK_EQUAL_64(0x44556677, x0); |
2944 ASSERT_EQUAL_64(0x00112233, x1); | 2944 CHECK_EQUAL_64(0x00112233, x1); |
2945 ASSERT_EQUAL_64(0x0011223344556677UL, dst[0]); | 2945 CHECK_EQUAL_64(0x0011223344556677UL, dst[0]); |
2946 ASSERT_EQUAL_64(0x00112233, x2); | 2946 CHECK_EQUAL_64(0x00112233, x2); |
2947 ASSERT_EQUAL_64(0xccddeeff, x3); | 2947 CHECK_EQUAL_64(0xccddeeff, x3); |
2948 ASSERT_EQUAL_64(0xccddeeff00112233UL, dst[1]); | 2948 CHECK_EQUAL_64(0xccddeeff00112233UL, dst[1]); |
2949 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, x4); | 2949 CHECK_EQUAL_64(0x8899aabbccddeeffUL, x4); |
2950 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, dst[2]); | 2950 CHECK_EQUAL_64(0x8899aabbccddeeffUL, dst[2]); |
2951 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, x5); | 2951 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, x5); |
2952 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, dst[3]); | 2952 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, dst[3]); |
2953 ASSERT_EQUAL_64(0x8899aabb, x6); | 2953 CHECK_EQUAL_64(0x8899aabb, x6); |
2954 ASSERT_EQUAL_64(0xbbaa9988, x7); | 2954 CHECK_EQUAL_64(0xbbaa9988, x7); |
2955 ASSERT_EQUAL_64(0xbbaa99888899aabbUL, dst[4]); | 2955 CHECK_EQUAL_64(0xbbaa99888899aabbUL, dst[4]); |
2956 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, x8); | 2956 CHECK_EQUAL_64(0x8899aabbccddeeffUL, x8); |
2957 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, dst[5]); | 2957 CHECK_EQUAL_64(0x8899aabbccddeeffUL, dst[5]); |
2958 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, x9); | 2958 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, x9); |
2959 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, dst[6]); | 2959 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, dst[6]); |
2960 ASSERT_EQUAL_64(src_base, x16); | 2960 CHECK_EQUAL_64(src_base, x16); |
2961 ASSERT_EQUAL_64(dst_base, x17); | 2961 CHECK_EQUAL_64(dst_base, x17); |
2962 ASSERT_EQUAL_64(src_base + 24, x18); | 2962 CHECK_EQUAL_64(src_base + 24, x18); |
2963 ASSERT_EQUAL_64(dst_base + 56, x19); | 2963 CHECK_EQUAL_64(dst_base + 56, x19); |
2964 | 2964 |
2965 TEARDOWN(); | 2965 TEARDOWN(); |
2966 } | 2966 } |
2967 | 2967 |
2968 | 2968 |
2969 TEST(ldp_stp_preindex) { | 2969 TEST(ldp_stp_preindex) { |
2970 INIT_V8(); | 2970 INIT_V8(); |
2971 SETUP(); | 2971 SETUP(); |
2972 | 2972 |
2973 uint64_t src[3] = {0x0011223344556677UL, 0x8899aabbccddeeffUL, | 2973 uint64_t src[3] = {0x0011223344556677UL, 0x8899aabbccddeeffUL, |
(...skipping 15 matching lines...) Expand all Loading... |
2989 __ Ldp(x4, x5, MemOperand(x16, 8, PreIndex)); | 2989 __ Ldp(x4, x5, MemOperand(x16, 8, PreIndex)); |
2990 __ Mov(x21, x16); | 2990 __ Mov(x21, x16); |
2991 __ Ldp(x6, x7, MemOperand(x16, -8, PreIndex)); | 2991 __ Ldp(x6, x7, MemOperand(x16, -8, PreIndex)); |
2992 __ Stp(x7, x6, MemOperand(x18, 8, PreIndex)); | 2992 __ Stp(x7, x6, MemOperand(x18, 8, PreIndex)); |
2993 __ Mov(x22, x18); | 2993 __ Mov(x22, x18); |
2994 __ Stp(x5, x4, MemOperand(x18, -8, PreIndex)); | 2994 __ Stp(x5, x4, MemOperand(x18, -8, PreIndex)); |
2995 END(); | 2995 END(); |
2996 | 2996 |
2997 RUN(); | 2997 RUN(); |
2998 | 2998 |
2999 ASSERT_EQUAL_64(0x00112233, x0); | 2999 CHECK_EQUAL_64(0x00112233, x0); |
3000 ASSERT_EQUAL_64(0xccddeeff, x1); | 3000 CHECK_EQUAL_64(0xccddeeff, x1); |
3001 ASSERT_EQUAL_64(0x44556677, x2); | 3001 CHECK_EQUAL_64(0x44556677, x2); |
3002 ASSERT_EQUAL_64(0x00112233, x3); | 3002 CHECK_EQUAL_64(0x00112233, x3); |
3003 ASSERT_EQUAL_64(0xccddeeff00112233UL, dst[0]); | 3003 CHECK_EQUAL_64(0xccddeeff00112233UL, dst[0]); |
3004 ASSERT_EQUAL_64(0x0000000000112233UL, dst[1]); | 3004 CHECK_EQUAL_64(0x0000000000112233UL, dst[1]); |
3005 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, x4); | 3005 CHECK_EQUAL_64(0x8899aabbccddeeffUL, x4); |
3006 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, x5); | 3006 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, x5); |
3007 ASSERT_EQUAL_64(0x0011223344556677UL, x6); | 3007 CHECK_EQUAL_64(0x0011223344556677UL, x6); |
3008 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, x7); | 3008 CHECK_EQUAL_64(0x8899aabbccddeeffUL, x7); |
3009 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, dst[2]); | 3009 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, dst[2]); |
3010 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, dst[3]); | 3010 CHECK_EQUAL_64(0x8899aabbccddeeffUL, dst[3]); |
3011 ASSERT_EQUAL_64(0x0011223344556677UL, dst[4]); | 3011 CHECK_EQUAL_64(0x0011223344556677UL, dst[4]); |
3012 ASSERT_EQUAL_64(src_base, x16); | 3012 CHECK_EQUAL_64(src_base, x16); |
3013 ASSERT_EQUAL_64(dst_base, x17); | 3013 CHECK_EQUAL_64(dst_base, x17); |
3014 ASSERT_EQUAL_64(dst_base + 16, x18); | 3014 CHECK_EQUAL_64(dst_base + 16, x18); |
3015 ASSERT_EQUAL_64(src_base + 4, x19); | 3015 CHECK_EQUAL_64(src_base + 4, x19); |
3016 ASSERT_EQUAL_64(dst_base + 4, x20); | 3016 CHECK_EQUAL_64(dst_base + 4, x20); |
3017 ASSERT_EQUAL_64(src_base + 8, x21); | 3017 CHECK_EQUAL_64(src_base + 8, x21); |
3018 ASSERT_EQUAL_64(dst_base + 24, x22); | 3018 CHECK_EQUAL_64(dst_base + 24, x22); |
3019 | 3019 |
3020 TEARDOWN(); | 3020 TEARDOWN(); |
3021 } | 3021 } |
3022 | 3022 |
3023 | 3023 |
3024 TEST(ldp_stp_postindex) { | 3024 TEST(ldp_stp_postindex) { |
3025 INIT_V8(); | 3025 INIT_V8(); |
3026 SETUP(); | 3026 SETUP(); |
3027 | 3027 |
3028 uint64_t src[4] = {0x0011223344556677UL, 0x8899aabbccddeeffUL, | 3028 uint64_t src[4] = {0x0011223344556677UL, 0x8899aabbccddeeffUL, |
(...skipping 15 matching lines...) Expand all Loading... |
3044 __ Ldp(x4, x5, MemOperand(x16, 8, PostIndex)); | 3044 __ Ldp(x4, x5, MemOperand(x16, 8, PostIndex)); |
3045 __ Mov(x21, x16); | 3045 __ Mov(x21, x16); |
3046 __ Ldp(x6, x7, MemOperand(x16, -8, PostIndex)); | 3046 __ Ldp(x6, x7, MemOperand(x16, -8, PostIndex)); |
3047 __ Stp(x7, x6, MemOperand(x18, 8, PostIndex)); | 3047 __ Stp(x7, x6, MemOperand(x18, 8, PostIndex)); |
3048 __ Mov(x22, x18); | 3048 __ Mov(x22, x18); |
3049 __ Stp(x5, x4, MemOperand(x18, -8, PostIndex)); | 3049 __ Stp(x5, x4, MemOperand(x18, -8, PostIndex)); |
3050 END(); | 3050 END(); |
3051 | 3051 |
3052 RUN(); | 3052 RUN(); |
3053 | 3053 |
3054 ASSERT_EQUAL_64(0x44556677, x0); | 3054 CHECK_EQUAL_64(0x44556677, x0); |
3055 ASSERT_EQUAL_64(0x00112233, x1); | 3055 CHECK_EQUAL_64(0x00112233, x1); |
3056 ASSERT_EQUAL_64(0x00112233, x2); | 3056 CHECK_EQUAL_64(0x00112233, x2); |
3057 ASSERT_EQUAL_64(0xccddeeff, x3); | 3057 CHECK_EQUAL_64(0xccddeeff, x3); |
3058 ASSERT_EQUAL_64(0x4455667700112233UL, dst[0]); | 3058 CHECK_EQUAL_64(0x4455667700112233UL, dst[0]); |
3059 ASSERT_EQUAL_64(0x0000000000112233UL, dst[1]); | 3059 CHECK_EQUAL_64(0x0000000000112233UL, dst[1]); |
3060 ASSERT_EQUAL_64(0x0011223344556677UL, x4); | 3060 CHECK_EQUAL_64(0x0011223344556677UL, x4); |
3061 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, x5); | 3061 CHECK_EQUAL_64(0x8899aabbccddeeffUL, x5); |
3062 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, x6); | 3062 CHECK_EQUAL_64(0x8899aabbccddeeffUL, x6); |
3063 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, x7); | 3063 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, x7); |
3064 ASSERT_EQUAL_64(0xffeeddccbbaa9988UL, dst[2]); | 3064 CHECK_EQUAL_64(0xffeeddccbbaa9988UL, dst[2]); |
3065 ASSERT_EQUAL_64(0x8899aabbccddeeffUL, dst[3]); | 3065 CHECK_EQUAL_64(0x8899aabbccddeeffUL, dst[3]); |
3066 ASSERT_EQUAL_64(0x0011223344556677UL, dst[4]); | 3066 CHECK_EQUAL_64(0x0011223344556677UL, dst[4]); |
3067 ASSERT_EQUAL_64(src_base, x16); | 3067 CHECK_EQUAL_64(src_base, x16); |
3068 ASSERT_EQUAL_64(dst_base, x17); | 3068 CHECK_EQUAL_64(dst_base, x17); |
3069 ASSERT_EQUAL_64(dst_base + 16, x18); | 3069 CHECK_EQUAL_64(dst_base + 16, x18); |
3070 ASSERT_EQUAL_64(src_base + 4, x19); | 3070 CHECK_EQUAL_64(src_base + 4, x19); |
3071 ASSERT_EQUAL_64(dst_base + 4, x20); | 3071 CHECK_EQUAL_64(dst_base + 4, x20); |
3072 ASSERT_EQUAL_64(src_base + 8, x21); | 3072 CHECK_EQUAL_64(src_base + 8, x21); |
3073 ASSERT_EQUAL_64(dst_base + 24, x22); | 3073 CHECK_EQUAL_64(dst_base + 24, x22); |
3074 | 3074 |
3075 TEARDOWN(); | 3075 TEARDOWN(); |
3076 } | 3076 } |
3077 | 3077 |
3078 | 3078 |
3079 TEST(ldp_sign_extend) { | 3079 TEST(ldp_sign_extend) { |
3080 INIT_V8(); | 3080 INIT_V8(); |
3081 SETUP(); | 3081 SETUP(); |
3082 | 3082 |
3083 uint32_t src[2] = {0x80000000, 0x7fffffff}; | 3083 uint32_t src[2] = {0x80000000, 0x7fffffff}; |
3084 uintptr_t src_base = reinterpret_cast<uintptr_t>(src); | 3084 uintptr_t src_base = reinterpret_cast<uintptr_t>(src); |
3085 | 3085 |
3086 START(); | 3086 START(); |
3087 __ Mov(x24, src_base); | 3087 __ Mov(x24, src_base); |
3088 __ Ldpsw(x0, x1, MemOperand(x24)); | 3088 __ Ldpsw(x0, x1, MemOperand(x24)); |
3089 END(); | 3089 END(); |
3090 | 3090 |
3091 RUN(); | 3091 RUN(); |
3092 | 3092 |
3093 ASSERT_EQUAL_64(0xffffffff80000000UL, x0); | 3093 CHECK_EQUAL_64(0xffffffff80000000UL, x0); |
3094 ASSERT_EQUAL_64(0x000000007fffffffUL, x1); | 3094 CHECK_EQUAL_64(0x000000007fffffffUL, x1); |
3095 | 3095 |
3096 TEARDOWN(); | 3096 TEARDOWN(); |
3097 } | 3097 } |
3098 | 3098 |
3099 | 3099 |
3100 TEST(ldur_stur) { | 3100 TEST(ldur_stur) { |
3101 INIT_V8(); | 3101 INIT_V8(); |
3102 SETUP(); | 3102 SETUP(); |
3103 | 3103 |
3104 int64_t src[2] = {0x0123456789abcdefUL, 0x0123456789abcdefUL}; | 3104 int64_t src[2] = {0x0123456789abcdefUL, 0x0123456789abcdefUL}; |
(...skipping 12 matching lines...) Expand all Loading... |
3117 __ Ldr(x1, MemOperand(x17, 3)); | 3117 __ Ldr(x1, MemOperand(x17, 3)); |
3118 __ Str(x1, MemOperand(x18, 9)); | 3118 __ Str(x1, MemOperand(x18, 9)); |
3119 __ Ldr(w2, MemOperand(x19, -9)); | 3119 __ Ldr(w2, MemOperand(x19, -9)); |
3120 __ Str(w2, MemOperand(x20, -5)); | 3120 __ Str(w2, MemOperand(x20, -5)); |
3121 __ Ldrb(w3, MemOperand(x19, -1)); | 3121 __ Ldrb(w3, MemOperand(x19, -1)); |
3122 __ Strb(w3, MemOperand(x21, -1)); | 3122 __ Strb(w3, MemOperand(x21, -1)); |
3123 END(); | 3123 END(); |
3124 | 3124 |
3125 RUN(); | 3125 RUN(); |
3126 | 3126 |
3127 ASSERT_EQUAL_64(0x6789abcd, x0); | 3127 CHECK_EQUAL_64(0x6789abcd, x0); |
3128 ASSERT_EQUAL_64(0x6789abcd0000L, dst[0]); | 3128 CHECK_EQUAL_64(0x6789abcd0000L, dst[0]); |
3129 ASSERT_EQUAL_64(0xabcdef0123456789L, x1); | 3129 CHECK_EQUAL_64(0xabcdef0123456789L, x1); |
3130 ASSERT_EQUAL_64(0xcdef012345678900L, dst[1]); | 3130 CHECK_EQUAL_64(0xcdef012345678900L, dst[1]); |
3131 ASSERT_EQUAL_64(0x000000ab, dst[2]); | 3131 CHECK_EQUAL_64(0x000000ab, dst[2]); |
3132 ASSERT_EQUAL_64(0xabcdef01, x2); | 3132 CHECK_EQUAL_64(0xabcdef01, x2); |
3133 ASSERT_EQUAL_64(0x00abcdef01000000L, dst[3]); | 3133 CHECK_EQUAL_64(0x00abcdef01000000L, dst[3]); |
3134 ASSERT_EQUAL_64(0x00000001, x3); | 3134 CHECK_EQUAL_64(0x00000001, x3); |
3135 ASSERT_EQUAL_64(0x0100000000000000L, dst[4]); | 3135 CHECK_EQUAL_64(0x0100000000000000L, dst[4]); |
3136 ASSERT_EQUAL_64(src_base, x17); | 3136 CHECK_EQUAL_64(src_base, x17); |
3137 ASSERT_EQUAL_64(dst_base, x18); | 3137 CHECK_EQUAL_64(dst_base, x18); |
3138 ASSERT_EQUAL_64(src_base + 16, x19); | 3138 CHECK_EQUAL_64(src_base + 16, x19); |
3139 ASSERT_EQUAL_64(dst_base + 32, x20); | 3139 CHECK_EQUAL_64(dst_base + 32, x20); |
3140 | 3140 |
3141 TEARDOWN(); | 3141 TEARDOWN(); |
3142 } | 3142 } |
3143 | 3143 |
3144 | 3144 |
3145 #if 0 // TODO(all) enable. | 3145 #if 0 // TODO(all) enable. |
3146 // TODO(rodolph): Adapt w16 Literal tests for RelocInfo. | 3146 // TODO(rodolph): Adapt w16 Literal tests for RelocInfo. |
3147 TEST(ldr_literal) { | 3147 TEST(ldr_literal) { |
3148 INIT_V8(); | 3148 INIT_V8(); |
3149 SETUP(); | 3149 SETUP(); |
3150 | 3150 |
3151 START(); | 3151 START(); |
3152 __ Ldr(x2, 0x1234567890abcdefUL); | 3152 __ Ldr(x2, 0x1234567890abcdefUL); |
3153 __ Ldr(w3, 0xfedcba09); | 3153 __ Ldr(w3, 0xfedcba09); |
3154 __ Ldr(d13, 1.234); | 3154 __ Ldr(d13, 1.234); |
3155 __ Ldr(s25, 2.5); | 3155 __ Ldr(s25, 2.5); |
3156 END(); | 3156 END(); |
3157 | 3157 |
3158 RUN(); | 3158 RUN(); |
3159 | 3159 |
3160 ASSERT_EQUAL_64(0x1234567890abcdefUL, x2); | 3160 CHECK_EQUAL_64(0x1234567890abcdefUL, x2); |
3161 ASSERT_EQUAL_64(0xfedcba09, x3); | 3161 CHECK_EQUAL_64(0xfedcba09, x3); |
3162 ASSERT_EQUAL_FP64(1.234, d13); | 3162 CHECK_EQUAL_FP64(1.234, d13); |
3163 ASSERT_EQUAL_FP32(2.5, s25); | 3163 CHECK_EQUAL_FP32(2.5, s25); |
3164 | 3164 |
3165 TEARDOWN(); | 3165 TEARDOWN(); |
3166 } | 3166 } |
3167 | 3167 |
3168 | 3168 |
3169 static void LdrLiteralRangeHelper(ptrdiff_t range_, | 3169 static void LdrLiteralRangeHelper(ptrdiff_t range_, |
3170 LiteralPoolEmitOption option, | 3170 LiteralPoolEmitOption option, |
3171 bool expect_dump) { | 3171 bool expect_dump) { |
3172 ASSERT(range_ > 0); | 3172 DCHECK(range_ > 0); |
3173 SETUP_SIZE(range_ + 1024); | 3173 SETUP_SIZE(range_ + 1024); |
3174 | 3174 |
3175 Label label_1, label_2; | 3175 Label label_1, label_2; |
3176 | 3176 |
3177 size_t range = static_cast<size_t>(range_); | 3177 size_t range = static_cast<size_t>(range_); |
3178 size_t code_size = 0; | 3178 size_t code_size = 0; |
3179 size_t pool_guard_size; | 3179 size_t pool_guard_size; |
3180 | 3180 |
3181 if (option == NoJumpRequired) { | 3181 if (option == NoJumpRequired) { |
3182 // Space for an explicit branch. | 3182 // Space for an explicit branch. |
3183 pool_guard_size = sizeof(Instr); | 3183 pool_guard_size = sizeof(Instr); |
3184 } else { | 3184 } else { |
3185 pool_guard_size = 0; | 3185 pool_guard_size = 0; |
3186 } | 3186 } |
3187 | 3187 |
3188 START(); | 3188 START(); |
3189 // Force a pool dump so the pool starts off empty. | 3189 // Force a pool dump so the pool starts off empty. |
3190 __ EmitLiteralPool(JumpRequired); | 3190 __ EmitLiteralPool(JumpRequired); |
3191 ASSERT_LITERAL_POOL_SIZE(0); | 3191 DCHECK_LITERAL_POOL_SIZE(0); |
3192 | 3192 |
3193 __ Ldr(x0, 0x1234567890abcdefUL); | 3193 __ Ldr(x0, 0x1234567890abcdefUL); |
3194 __ Ldr(w1, 0xfedcba09); | 3194 __ Ldr(w1, 0xfedcba09); |
3195 __ Ldr(d0, 1.234); | 3195 __ Ldr(d0, 1.234); |
3196 __ Ldr(s1, 2.5); | 3196 __ Ldr(s1, 2.5); |
3197 ASSERT_LITERAL_POOL_SIZE(4); | 3197 DCHECK_LITERAL_POOL_SIZE(4); |
3198 | 3198 |
3199 code_size += 4 * sizeof(Instr); | 3199 code_size += 4 * sizeof(Instr); |
3200 | 3200 |
3201 // Check that the requested range (allowing space for a branch over the pool) | 3201 // Check that the requested range (allowing space for a branch over the pool) |
3202 // can be handled by this test. | 3202 // can be handled by this test. |
3203 ASSERT((code_size + pool_guard_size) <= range); | 3203 DCHECK((code_size + pool_guard_size) <= range); |
3204 | 3204 |
3205 // Emit NOPs up to 'range', leaving space for the pool guard. | 3205 // Emit NOPs up to 'range', leaving space for the pool guard. |
3206 while ((code_size + pool_guard_size) < range) { | 3206 while ((code_size + pool_guard_size) < range) { |
3207 __ Nop(); | 3207 __ Nop(); |
3208 code_size += sizeof(Instr); | 3208 code_size += sizeof(Instr); |
3209 } | 3209 } |
3210 | 3210 |
3211 // Emit the guard sequence before the literal pool. | 3211 // Emit the guard sequence before the literal pool. |
3212 if (option == NoJumpRequired) { | 3212 if (option == NoJumpRequired) { |
3213 __ B(&label_1); | 3213 __ B(&label_1); |
3214 code_size += sizeof(Instr); | 3214 code_size += sizeof(Instr); |
3215 } | 3215 } |
3216 | 3216 |
3217 ASSERT(code_size == range); | 3217 DCHECK(code_size == range); |
3218 ASSERT_LITERAL_POOL_SIZE(4); | 3218 DCHECK_LITERAL_POOL_SIZE(4); |
3219 | 3219 |
3220 // Possibly generate a literal pool. | 3220 // Possibly generate a literal pool. |
3221 __ CheckLiteralPool(option); | 3221 __ CheckLiteralPool(option); |
3222 __ Bind(&label_1); | 3222 __ Bind(&label_1); |
3223 if (expect_dump) { | 3223 if (expect_dump) { |
3224 ASSERT_LITERAL_POOL_SIZE(0); | 3224 DCHECK_LITERAL_POOL_SIZE(0); |
3225 } else { | 3225 } else { |
3226 ASSERT_LITERAL_POOL_SIZE(4); | 3226 DCHECK_LITERAL_POOL_SIZE(4); |
3227 } | 3227 } |
3228 | 3228 |
3229 // Force a pool flush to check that a second pool functions correctly. | 3229 // Force a pool flush to check that a second pool functions correctly. |
3230 __ EmitLiteralPool(JumpRequired); | 3230 __ EmitLiteralPool(JumpRequired); |
3231 ASSERT_LITERAL_POOL_SIZE(0); | 3231 DCHECK_LITERAL_POOL_SIZE(0); |
3232 | 3232 |
3233 // These loads should be after the pool (and will require a new one). | 3233 // These loads should be after the pool (and will require a new one). |
3234 __ Ldr(x4, 0x34567890abcdef12UL); | 3234 __ Ldr(x4, 0x34567890abcdef12UL); |
3235 __ Ldr(w5, 0xdcba09fe); | 3235 __ Ldr(w5, 0xdcba09fe); |
3236 __ Ldr(d4, 123.4); | 3236 __ Ldr(d4, 123.4); |
3237 __ Ldr(s5, 250.0); | 3237 __ Ldr(s5, 250.0); |
3238 ASSERT_LITERAL_POOL_SIZE(4); | 3238 DCHECK_LITERAL_POOL_SIZE(4); |
3239 END(); | 3239 END(); |
3240 | 3240 |
3241 RUN(); | 3241 RUN(); |
3242 | 3242 |
3243 // Check that the literals loaded correctly. | 3243 // Check that the literals loaded correctly. |
3244 ASSERT_EQUAL_64(0x1234567890abcdefUL, x0); | 3244 CHECK_EQUAL_64(0x1234567890abcdefUL, x0); |
3245 ASSERT_EQUAL_64(0xfedcba09, x1); | 3245 CHECK_EQUAL_64(0xfedcba09, x1); |
3246 ASSERT_EQUAL_FP64(1.234, d0); | 3246 CHECK_EQUAL_FP64(1.234, d0); |
3247 ASSERT_EQUAL_FP32(2.5, s1); | 3247 CHECK_EQUAL_FP32(2.5, s1); |
3248 ASSERT_EQUAL_64(0x34567890abcdef12UL, x4); | 3248 CHECK_EQUAL_64(0x34567890abcdef12UL, x4); |
3249 ASSERT_EQUAL_64(0xdcba09fe, x5); | 3249 CHECK_EQUAL_64(0xdcba09fe, x5); |
3250 ASSERT_EQUAL_FP64(123.4, d4); | 3250 CHECK_EQUAL_FP64(123.4, d4); |
3251 ASSERT_EQUAL_FP32(250.0, s5); | 3251 CHECK_EQUAL_FP32(250.0, s5); |
3252 | 3252 |
3253 TEARDOWN(); | 3253 TEARDOWN(); |
3254 } | 3254 } |
3255 | 3255 |
3256 | 3256 |
3257 TEST(ldr_literal_range_1) { | 3257 TEST(ldr_literal_range_1) { |
3258 INIT_V8(); | 3258 INIT_V8(); |
3259 LdrLiteralRangeHelper(kRecommendedLiteralPoolRange, | 3259 LdrLiteralRangeHelper(kRecommendedLiteralPoolRange, |
3260 NoJumpRequired, | 3260 NoJumpRequired, |
3261 true); | 3261 true); |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3328 __ Sub(x23, x3, Operand(1)); | 3328 __ Sub(x23, x3, Operand(1)); |
3329 | 3329 |
3330 __ Sub(w24, w0, Operand(0x1)); | 3330 __ Sub(w24, w0, Operand(0x1)); |
3331 __ Sub(w25, w1, Operand(0x111)); | 3331 __ Sub(w25, w1, Operand(0x111)); |
3332 __ Sub(w26, w1, Operand(0x1 << 12)); | 3332 __ Sub(w26, w1, Operand(0x1 << 12)); |
3333 __ Sub(w27, w3, Operand(1)); | 3333 __ Sub(w27, w3, Operand(1)); |
3334 END(); | 3334 END(); |
3335 | 3335 |
3336 RUN(); | 3336 RUN(); |
3337 | 3337 |
3338 ASSERT_EQUAL_64(0x123, x10); | 3338 CHECK_EQUAL_64(0x123, x10); |
3339 ASSERT_EQUAL_64(0x123111, x11); | 3339 CHECK_EQUAL_64(0x123111, x11); |
3340 ASSERT_EQUAL_64(0xabc000, x12); | 3340 CHECK_EQUAL_64(0xabc000, x12); |
3341 ASSERT_EQUAL_64(0x0, x13); | 3341 CHECK_EQUAL_64(0x0, x13); |
3342 | 3342 |
3343 ASSERT_EQUAL_32(0x123, w14); | 3343 CHECK_EQUAL_32(0x123, w14); |
3344 ASSERT_EQUAL_32(0x123111, w15); | 3344 CHECK_EQUAL_32(0x123111, w15); |
3345 ASSERT_EQUAL_32(0xabc000, w16); | 3345 CHECK_EQUAL_32(0xabc000, w16); |
3346 ASSERT_EQUAL_32(0x0, w17); | 3346 CHECK_EQUAL_32(0x0, w17); |
3347 | 3347 |
3348 ASSERT_EQUAL_64(0xffffffffffffffffL, x20); | 3348 CHECK_EQUAL_64(0xffffffffffffffffL, x20); |
3349 ASSERT_EQUAL_64(0x1000, x21); | 3349 CHECK_EQUAL_64(0x1000, x21); |
3350 ASSERT_EQUAL_64(0x111, x22); | 3350 CHECK_EQUAL_64(0x111, x22); |
3351 ASSERT_EQUAL_64(0x7fffffffffffffffL, x23); | 3351 CHECK_EQUAL_64(0x7fffffffffffffffL, x23); |
3352 | 3352 |
3353 ASSERT_EQUAL_32(0xffffffff, w24); | 3353 CHECK_EQUAL_32(0xffffffff, w24); |
3354 ASSERT_EQUAL_32(0x1000, w25); | 3354 CHECK_EQUAL_32(0x1000, w25); |
3355 ASSERT_EQUAL_32(0x111, w26); | 3355 CHECK_EQUAL_32(0x111, w26); |
3356 ASSERT_EQUAL_32(0xffffffff, w27); | 3356 CHECK_EQUAL_32(0xffffffff, w27); |
3357 | 3357 |
3358 TEARDOWN(); | 3358 TEARDOWN(); |
3359 } | 3359 } |
3360 | 3360 |
3361 | 3361 |
3362 TEST(add_sub_wide_imm) { | 3362 TEST(add_sub_wide_imm) { |
3363 INIT_V8(); | 3363 INIT_V8(); |
3364 SETUP(); | 3364 SETUP(); |
3365 | 3365 |
3366 START(); | 3366 START(); |
3367 __ Mov(x0, 0x0); | 3367 __ Mov(x0, 0x0); |
3368 __ Mov(x1, 0x1); | 3368 __ Mov(x1, 0x1); |
3369 | 3369 |
3370 __ Add(x10, x0, Operand(0x1234567890abcdefUL)); | 3370 __ Add(x10, x0, Operand(0x1234567890abcdefUL)); |
3371 __ Add(x11, x1, Operand(0xffffffff)); | 3371 __ Add(x11, x1, Operand(0xffffffff)); |
3372 | 3372 |
3373 __ Add(w12, w0, Operand(0x12345678)); | 3373 __ Add(w12, w0, Operand(0x12345678)); |
3374 __ Add(w13, w1, Operand(0xffffffff)); | 3374 __ Add(w13, w1, Operand(0xffffffff)); |
3375 | 3375 |
3376 __ Add(w18, w0, Operand(kWMinInt)); | 3376 __ Add(w18, w0, Operand(kWMinInt)); |
3377 __ Sub(w19, w0, Operand(kWMinInt)); | 3377 __ Sub(w19, w0, Operand(kWMinInt)); |
3378 | 3378 |
3379 __ Sub(x20, x0, Operand(0x1234567890abcdefUL)); | 3379 __ Sub(x20, x0, Operand(0x1234567890abcdefUL)); |
3380 __ Sub(w21, w0, Operand(0x12345678)); | 3380 __ Sub(w21, w0, Operand(0x12345678)); |
3381 END(); | 3381 END(); |
3382 | 3382 |
3383 RUN(); | 3383 RUN(); |
3384 | 3384 |
3385 ASSERT_EQUAL_64(0x1234567890abcdefUL, x10); | 3385 CHECK_EQUAL_64(0x1234567890abcdefUL, x10); |
3386 ASSERT_EQUAL_64(0x100000000UL, x11); | 3386 CHECK_EQUAL_64(0x100000000UL, x11); |
3387 | 3387 |
3388 ASSERT_EQUAL_32(0x12345678, w12); | 3388 CHECK_EQUAL_32(0x12345678, w12); |
3389 ASSERT_EQUAL_64(0x0, x13); | 3389 CHECK_EQUAL_64(0x0, x13); |
3390 | 3390 |
3391 ASSERT_EQUAL_32(kWMinInt, w18); | 3391 CHECK_EQUAL_32(kWMinInt, w18); |
3392 ASSERT_EQUAL_32(kWMinInt, w19); | 3392 CHECK_EQUAL_32(kWMinInt, w19); |
3393 | 3393 |
3394 ASSERT_EQUAL_64(-0x1234567890abcdefUL, x20); | 3394 CHECK_EQUAL_64(-0x1234567890abcdefUL, x20); |
3395 ASSERT_EQUAL_32(-0x12345678, w21); | 3395 CHECK_EQUAL_32(-0x12345678, w21); |
3396 | 3396 |
3397 TEARDOWN(); | 3397 TEARDOWN(); |
3398 } | 3398 } |
3399 | 3399 |
3400 | 3400 |
3401 TEST(add_sub_shifted) { | 3401 TEST(add_sub_shifted) { |
3402 INIT_V8(); | 3402 INIT_V8(); |
3403 SETUP(); | 3403 SETUP(); |
3404 | 3404 |
3405 START(); | 3405 START(); |
(...skipping 16 matching lines...) Expand all Loading... |
3422 __ Sub(x22, x3, Operand(x1, LSR, 8)); | 3422 __ Sub(x22, x3, Operand(x1, LSR, 8)); |
3423 __ Sub(x23, x3, Operand(x1, ASR, 8)); | 3423 __ Sub(x23, x3, Operand(x1, ASR, 8)); |
3424 __ Sub(x24, x3, Operand(x2, ASR, 8)); | 3424 __ Sub(x24, x3, Operand(x2, ASR, 8)); |
3425 __ Sub(w25, w3, Operand(w1, ASR, 8)); | 3425 __ Sub(w25, w3, Operand(w1, ASR, 8)); |
3426 __ Sub(w26, w3, Operand(w1, ROR, 8)); | 3426 __ Sub(w26, w3, Operand(w1, ROR, 8)); |
3427 __ Sub(x27, x3, Operand(x1, ROR, 8)); | 3427 __ Sub(x27, x3, Operand(x1, ROR, 8)); |
3428 END(); | 3428 END(); |
3429 | 3429 |
3430 RUN(); | 3430 RUN(); |
3431 | 3431 |
3432 ASSERT_EQUAL_64(0xffffffffffffffffL, x10); | 3432 CHECK_EQUAL_64(0xffffffffffffffffL, x10); |
3433 ASSERT_EQUAL_64(0x23456789abcdef00L, x11); | 3433 CHECK_EQUAL_64(0x23456789abcdef00L, x11); |
3434 ASSERT_EQUAL_64(0x000123456789abcdL, x12); | 3434 CHECK_EQUAL_64(0x000123456789abcdL, x12); |
3435 ASSERT_EQUAL_64(0x000123456789abcdL, x13); | 3435 CHECK_EQUAL_64(0x000123456789abcdL, x13); |
3436 ASSERT_EQUAL_64(0xfffedcba98765432L, x14); | 3436 CHECK_EQUAL_64(0xfffedcba98765432L, x14); |
3437 ASSERT_EQUAL_64(0xff89abcd, x15); | 3437 CHECK_EQUAL_64(0xff89abcd, x15); |
3438 ASSERT_EQUAL_64(0xef89abcc, x18); | 3438 CHECK_EQUAL_64(0xef89abcc, x18); |
3439 ASSERT_EQUAL_64(0xef0123456789abccL, x19); | 3439 CHECK_EQUAL_64(0xef0123456789abccL, x19); |
3440 | 3440 |
3441 ASSERT_EQUAL_64(0x0123456789abcdefL, x20); | 3441 CHECK_EQUAL_64(0x0123456789abcdefL, x20); |
3442 ASSERT_EQUAL_64(0xdcba9876543210ffL, x21); | 3442 CHECK_EQUAL_64(0xdcba9876543210ffL, x21); |
3443 ASSERT_EQUAL_64(0xfffedcba98765432L, x22); | 3443 CHECK_EQUAL_64(0xfffedcba98765432L, x22); |
3444 ASSERT_EQUAL_64(0xfffedcba98765432L, x23); | 3444 CHECK_EQUAL_64(0xfffedcba98765432L, x23); |
3445 ASSERT_EQUAL_64(0x000123456789abcdL, x24); | 3445 CHECK_EQUAL_64(0x000123456789abcdL, x24); |
3446 ASSERT_EQUAL_64(0x00765432, x25); | 3446 CHECK_EQUAL_64(0x00765432, x25); |
3447 ASSERT_EQUAL_64(0x10765432, x26); | 3447 CHECK_EQUAL_64(0x10765432, x26); |
3448 ASSERT_EQUAL_64(0x10fedcba98765432L, x27); | 3448 CHECK_EQUAL_64(0x10fedcba98765432L, x27); |
3449 | 3449 |
3450 TEARDOWN(); | 3450 TEARDOWN(); |
3451 } | 3451 } |
3452 | 3452 |
3453 | 3453 |
3454 TEST(add_sub_extended) { | 3454 TEST(add_sub_extended) { |
3455 INIT_V8(); | 3455 INIT_V8(); |
3456 SETUP(); | 3456 SETUP(); |
3457 | 3457 |
3458 START(); | 3458 START(); |
(...skipping 25 matching lines...) Expand all Loading... |
3484 __ Add(w27, w2, Operand(w1, SXTW, 3)); | 3484 __ Add(w27, w2, Operand(w1, SXTW, 3)); |
3485 | 3485 |
3486 __ Add(w28, w0, Operand(w1, SXTW, 3)); | 3486 __ Add(w28, w0, Operand(w1, SXTW, 3)); |
3487 __ Add(x29, x0, Operand(w1, SXTW, 3)); | 3487 __ Add(x29, x0, Operand(w1, SXTW, 3)); |
3488 | 3488 |
3489 __ Sub(x30, x0, Operand(w3, SXTB, 1)); | 3489 __ Sub(x30, x0, Operand(w3, SXTB, 1)); |
3490 END(); | 3490 END(); |
3491 | 3491 |
3492 RUN(); | 3492 RUN(); |
3493 | 3493 |
3494 ASSERT_EQUAL_64(0xefL, x10); | 3494 CHECK_EQUAL_64(0xefL, x10); |
3495 ASSERT_EQUAL_64(0x1deL, x11); | 3495 CHECK_EQUAL_64(0x1deL, x11); |
3496 ASSERT_EQUAL_64(0x337bcL, x12); | 3496 CHECK_EQUAL_64(0x337bcL, x12); |
3497 ASSERT_EQUAL_64(0x89abcdef0L, x13); | 3497 CHECK_EQUAL_64(0x89abcdef0L, x13); |
3498 | 3498 |
3499 ASSERT_EQUAL_64(0xffffffffffffffefL, x14); | 3499 CHECK_EQUAL_64(0xffffffffffffffefL, x14); |
3500 ASSERT_EQUAL_64(0xffffffffffffffdeL, x15); | 3500 CHECK_EQUAL_64(0xffffffffffffffdeL, x15); |
3501 ASSERT_EQUAL_64(0xffffffffffff37bcL, x16); | 3501 CHECK_EQUAL_64(0xffffffffffff37bcL, x16); |
3502 ASSERT_EQUAL_64(0xfffffffc4d5e6f78L, x17); | 3502 CHECK_EQUAL_64(0xfffffffc4d5e6f78L, x17); |
3503 ASSERT_EQUAL_64(0x10L, x18); | 3503 CHECK_EQUAL_64(0x10L, x18); |
3504 ASSERT_EQUAL_64(0x20L, x19); | 3504 CHECK_EQUAL_64(0x20L, x19); |
3505 ASSERT_EQUAL_64(0xc840L, x20); | 3505 CHECK_EQUAL_64(0xc840L, x20); |
3506 ASSERT_EQUAL_64(0x3b2a19080L, x21); | 3506 CHECK_EQUAL_64(0x3b2a19080L, x21); |
3507 | 3507 |
3508 ASSERT_EQUAL_64(0x0123456789abce0fL, x22); | 3508 CHECK_EQUAL_64(0x0123456789abce0fL, x22); |
3509 ASSERT_EQUAL_64(0x0123456789abcdcfL, x23); | 3509 CHECK_EQUAL_64(0x0123456789abcdcfL, x23); |
3510 | 3510 |
3511 ASSERT_EQUAL_32(0x89abce2f, w24); | 3511 CHECK_EQUAL_32(0x89abce2f, w24); |
3512 ASSERT_EQUAL_32(0xffffffef, w25); | 3512 CHECK_EQUAL_32(0xffffffef, w25); |
3513 ASSERT_EQUAL_32(0xffffffde, w26); | 3513 CHECK_EQUAL_32(0xffffffde, w26); |
3514 ASSERT_EQUAL_32(0xc3b2a188, w27); | 3514 CHECK_EQUAL_32(0xc3b2a188, w27); |
3515 | 3515 |
3516 ASSERT_EQUAL_32(0x4d5e6f78, w28); | 3516 CHECK_EQUAL_32(0x4d5e6f78, w28); |
3517 ASSERT_EQUAL_64(0xfffffffc4d5e6f78L, x29); | 3517 CHECK_EQUAL_64(0xfffffffc4d5e6f78L, x29); |
3518 | 3518 |
3519 ASSERT_EQUAL_64(256, x30); | 3519 CHECK_EQUAL_64(256, x30); |
3520 | 3520 |
3521 TEARDOWN(); | 3521 TEARDOWN(); |
3522 } | 3522 } |
3523 | 3523 |
3524 | 3524 |
3525 TEST(add_sub_negative) { | 3525 TEST(add_sub_negative) { |
3526 INIT_V8(); | 3526 INIT_V8(); |
3527 SETUP(); | 3527 SETUP(); |
3528 | 3528 |
3529 START(); | 3529 START(); |
(...skipping 13 matching lines...) Expand all Loading... |
3543 | 3543 |
3544 __ Add(w19, w3, -0x344); | 3544 __ Add(w19, w3, -0x344); |
3545 __ Add(w20, w4, -2000); | 3545 __ Add(w20, w4, -2000); |
3546 | 3546 |
3547 __ Sub(w21, w3, -0xbc); | 3547 __ Sub(w21, w3, -0xbc); |
3548 __ Sub(w22, w4, -2000); | 3548 __ Sub(w22, w4, -2000); |
3549 END(); | 3549 END(); |
3550 | 3550 |
3551 RUN(); | 3551 RUN(); |
3552 | 3552 |
3553 ASSERT_EQUAL_64(-42, x10); | 3553 CHECK_EQUAL_64(-42, x10); |
3554 ASSERT_EQUAL_64(4000, x11); | 3554 CHECK_EQUAL_64(4000, x11); |
3555 ASSERT_EQUAL_64(0x1122334455667700, x12); | 3555 CHECK_EQUAL_64(0x1122334455667700, x12); |
3556 | 3556 |
3557 ASSERT_EQUAL_64(600, x13); | 3557 CHECK_EQUAL_64(600, x13); |
3558 ASSERT_EQUAL_64(5000, x14); | 3558 CHECK_EQUAL_64(5000, x14); |
3559 ASSERT_EQUAL_64(0x1122334455667cdd, x15); | 3559 CHECK_EQUAL_64(0x1122334455667cdd, x15); |
3560 | 3560 |
3561 ASSERT_EQUAL_32(0x11223000, w19); | 3561 CHECK_EQUAL_32(0x11223000, w19); |
3562 ASSERT_EQUAL_32(398000, w20); | 3562 CHECK_EQUAL_32(398000, w20); |
3563 | 3563 |
3564 ASSERT_EQUAL_32(0x11223400, w21); | 3564 CHECK_EQUAL_32(0x11223400, w21); |
3565 ASSERT_EQUAL_32(402000, w22); | 3565 CHECK_EQUAL_32(402000, w22); |
3566 | 3566 |
3567 TEARDOWN(); | 3567 TEARDOWN(); |
3568 } | 3568 } |
3569 | 3569 |
3570 | 3570 |
3571 TEST(add_sub_zero) { | 3571 TEST(add_sub_zero) { |
3572 INIT_V8(); | 3572 INIT_V8(); |
3573 SETUP(); | 3573 SETUP(); |
3574 | 3574 |
3575 START(); | 3575 START(); |
(...skipping 15 matching lines...) Expand all Loading... |
3591 | 3591 |
3592 Label blob3; | 3592 Label blob3; |
3593 __ Bind(&blob3); | 3593 __ Bind(&blob3); |
3594 __ Sub(w3, w3, wzr); | 3594 __ Sub(w3, w3, wzr); |
3595 CHECK_NE(0, __ SizeOfCodeGeneratedSince(&blob3)); | 3595 CHECK_NE(0, __ SizeOfCodeGeneratedSince(&blob3)); |
3596 | 3596 |
3597 END(); | 3597 END(); |
3598 | 3598 |
3599 RUN(); | 3599 RUN(); |
3600 | 3600 |
3601 ASSERT_EQUAL_64(0, x0); | 3601 CHECK_EQUAL_64(0, x0); |
3602 ASSERT_EQUAL_64(0, x1); | 3602 CHECK_EQUAL_64(0, x1); |
3603 ASSERT_EQUAL_64(0, x2); | 3603 CHECK_EQUAL_64(0, x2); |
3604 | 3604 |
3605 TEARDOWN(); | 3605 TEARDOWN(); |
3606 } | 3606 } |
3607 | 3607 |
3608 | 3608 |
3609 TEST(claim_drop_zero) { | 3609 TEST(claim_drop_zero) { |
3610 INIT_V8(); | 3610 INIT_V8(); |
3611 SETUP(); | 3611 SETUP(); |
3612 | 3612 |
3613 START(); | 3613 START(); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3659 __ Neg(w9, Operand(w0, UXTB)); | 3659 __ Neg(w9, Operand(w0, UXTB)); |
3660 __ Neg(x10, Operand(x0, SXTB, 1)); | 3660 __ Neg(x10, Operand(x0, SXTB, 1)); |
3661 __ Neg(w11, Operand(w0, UXTH, 2)); | 3661 __ Neg(w11, Operand(w0, UXTH, 2)); |
3662 __ Neg(x12, Operand(x0, SXTH, 3)); | 3662 __ Neg(x12, Operand(x0, SXTH, 3)); |
3663 __ Neg(w13, Operand(w0, UXTW, 4)); | 3663 __ Neg(w13, Operand(w0, UXTW, 4)); |
3664 __ Neg(x14, Operand(x0, SXTW, 4)); | 3664 __ Neg(x14, Operand(x0, SXTW, 4)); |
3665 END(); | 3665 END(); |
3666 | 3666 |
3667 RUN(); | 3667 RUN(); |
3668 | 3668 |
3669 ASSERT_EQUAL_64(0xfffffffffffffeddUL, x1); | 3669 CHECK_EQUAL_64(0xfffffffffffffeddUL, x1); |
3670 ASSERT_EQUAL_64(0xfffffedd, x2); | 3670 CHECK_EQUAL_64(0xfffffedd, x2); |
3671 ASSERT_EQUAL_64(0x1db97530eca86422UL, x3); | 3671 CHECK_EQUAL_64(0x1db97530eca86422UL, x3); |
3672 ASSERT_EQUAL_64(0xd950c844, x4); | 3672 CHECK_EQUAL_64(0xd950c844, x4); |
3673 ASSERT_EQUAL_64(0xe1db97530eca8643UL, x5); | 3673 CHECK_EQUAL_64(0xe1db97530eca8643UL, x5); |
3674 ASSERT_EQUAL_64(0xf7654322, x6); | 3674 CHECK_EQUAL_64(0xf7654322, x6); |
3675 ASSERT_EQUAL_64(0x0076e5d4c3b2a191UL, x7); | 3675 CHECK_EQUAL_64(0x0076e5d4c3b2a191UL, x7); |
3676 ASSERT_EQUAL_64(0x01d950c9, x8); | 3676 CHECK_EQUAL_64(0x01d950c9, x8); |
3677 ASSERT_EQUAL_64(0xffffff11, x9); | 3677 CHECK_EQUAL_64(0xffffff11, x9); |
3678 ASSERT_EQUAL_64(0x0000000000000022UL, x10); | 3678 CHECK_EQUAL_64(0x0000000000000022UL, x10); |
3679 ASSERT_EQUAL_64(0xfffcc844, x11); | 3679 CHECK_EQUAL_64(0xfffcc844, x11); |
3680 ASSERT_EQUAL_64(0x0000000000019088UL, x12); | 3680 CHECK_EQUAL_64(0x0000000000019088UL, x12); |
3681 ASSERT_EQUAL_64(0x65432110, x13); | 3681 CHECK_EQUAL_64(0x65432110, x13); |
3682 ASSERT_EQUAL_64(0x0000000765432110UL, x14); | 3682 CHECK_EQUAL_64(0x0000000765432110UL, x14); |
3683 | 3683 |
3684 TEARDOWN(); | 3684 TEARDOWN(); |
3685 } | 3685 } |
3686 | 3686 |
3687 | 3687 |
3688 TEST(adc_sbc_shift) { | 3688 TEST(adc_sbc_shift) { |
3689 INIT_V8(); | 3689 INIT_V8(); |
3690 SETUP(); | 3690 SETUP(); |
3691 | 3691 |
3692 START(); | 3692 START(); |
(...skipping 29 matching lines...) Expand all Loading... |
3722 | 3722 |
3723 __ Adc(w23, w2, Operand(w3)); | 3723 __ Adc(w23, w2, Operand(w3)); |
3724 __ Adc(w24, w0, Operand(w1, LSL, 30)); | 3724 __ Adc(w24, w0, Operand(w1, LSL, 30)); |
3725 __ Sbc(w25, w4, Operand(w3, LSR, 4)); | 3725 __ Sbc(w25, w4, Operand(w3, LSR, 4)); |
3726 __ Adc(w26, w2, Operand(w3, ASR, 4)); | 3726 __ Adc(w26, w2, Operand(w3, ASR, 4)); |
3727 __ Adc(w27, w2, Operand(w3, ROR, 8)); | 3727 __ Adc(w27, w2, Operand(w3, ROR, 8)); |
3728 END(); | 3728 END(); |
3729 | 3729 |
3730 RUN(); | 3730 RUN(); |
3731 | 3731 |
3732 ASSERT_EQUAL_64(0xffffffffffffffffL, x5); | 3732 CHECK_EQUAL_64(0xffffffffffffffffL, x5); |
3733 ASSERT_EQUAL_64(1L << 60, x6); | 3733 CHECK_EQUAL_64(1L << 60, x6); |
3734 ASSERT_EQUAL_64(0xf0123456789abcddL, x7); | 3734 CHECK_EQUAL_64(0xf0123456789abcddL, x7); |
3735 ASSERT_EQUAL_64(0x0111111111111110L, x8); | 3735 CHECK_EQUAL_64(0x0111111111111110L, x8); |
3736 ASSERT_EQUAL_64(0x1222222222222221L, x9); | 3736 CHECK_EQUAL_64(0x1222222222222221L, x9); |
3737 | 3737 |
3738 ASSERT_EQUAL_32(0xffffffff, w10); | 3738 CHECK_EQUAL_32(0xffffffff, w10); |
3739 ASSERT_EQUAL_32(1 << 30, w11); | 3739 CHECK_EQUAL_32(1 << 30, w11); |
3740 ASSERT_EQUAL_32(0xf89abcdd, w12); | 3740 CHECK_EQUAL_32(0xf89abcdd, w12); |
3741 ASSERT_EQUAL_32(0x91111110, w13); | 3741 CHECK_EQUAL_32(0x91111110, w13); |
3742 ASSERT_EQUAL_32(0x9a222221, w14); | 3742 CHECK_EQUAL_32(0x9a222221, w14); |
3743 | 3743 |
3744 ASSERT_EQUAL_64(0xffffffffffffffffL + 1, x18); | 3744 CHECK_EQUAL_64(0xffffffffffffffffL + 1, x18); |
3745 ASSERT_EQUAL_64((1L << 60) + 1, x19); | 3745 CHECK_EQUAL_64((1L << 60) + 1, x19); |
3746 ASSERT_EQUAL_64(0xf0123456789abcddL + 1, x20); | 3746 CHECK_EQUAL_64(0xf0123456789abcddL + 1, x20); |
3747 ASSERT_EQUAL_64(0x0111111111111110L + 1, x21); | 3747 CHECK_EQUAL_64(0x0111111111111110L + 1, x21); |
3748 ASSERT_EQUAL_64(0x1222222222222221L + 1, x22); | 3748 CHECK_EQUAL_64(0x1222222222222221L + 1, x22); |
3749 | 3749 |
3750 ASSERT_EQUAL_32(0xffffffff + 1, w23); | 3750 CHECK_EQUAL_32(0xffffffff + 1, w23); |
3751 ASSERT_EQUAL_32((1 << 30) + 1, w24); | 3751 CHECK_EQUAL_32((1 << 30) + 1, w24); |
3752 ASSERT_EQUAL_32(0xf89abcdd + 1, w25); | 3752 CHECK_EQUAL_32(0xf89abcdd + 1, w25); |
3753 ASSERT_EQUAL_32(0x91111110 + 1, w26); | 3753 CHECK_EQUAL_32(0x91111110 + 1, w26); |
3754 ASSERT_EQUAL_32(0x9a222221 + 1, w27); | 3754 CHECK_EQUAL_32(0x9a222221 + 1, w27); |
3755 | 3755 |
3756 // Check that adc correctly sets the condition flags. | 3756 // Check that adc correctly sets the condition flags. |
3757 START(); | 3757 START(); |
3758 __ Mov(x0, 1); | 3758 __ Mov(x0, 1); |
3759 __ Mov(x1, 0xffffffffffffffffL); | 3759 __ Mov(x1, 0xffffffffffffffffL); |
3760 // Clear the C flag. | 3760 // Clear the C flag. |
3761 __ Adds(x0, x0, Operand(0)); | 3761 __ Adds(x0, x0, Operand(0)); |
3762 __ Adcs(x10, x0, Operand(x1)); | 3762 __ Adcs(x10, x0, Operand(x1)); |
3763 END(); | 3763 END(); |
3764 | 3764 |
3765 RUN(); | 3765 RUN(); |
3766 | 3766 |
3767 ASSERT_EQUAL_NZCV(ZCFlag); | 3767 CHECK_EQUAL_NZCV(ZCFlag); |
3768 ASSERT_EQUAL_64(0, x10); | 3768 CHECK_EQUAL_64(0, x10); |
3769 | 3769 |
3770 START(); | 3770 START(); |
3771 __ Mov(x0, 1); | 3771 __ Mov(x0, 1); |
3772 __ Mov(x1, 0x8000000000000000L); | 3772 __ Mov(x1, 0x8000000000000000L); |
3773 // Clear the C flag. | 3773 // Clear the C flag. |
3774 __ Adds(x0, x0, Operand(0)); | 3774 __ Adds(x0, x0, Operand(0)); |
3775 __ Adcs(x10, x0, Operand(x1, ASR, 63)); | 3775 __ Adcs(x10, x0, Operand(x1, ASR, 63)); |
3776 END(); | 3776 END(); |
3777 | 3777 |
3778 RUN(); | 3778 RUN(); |
3779 | 3779 |
3780 ASSERT_EQUAL_NZCV(ZCFlag); | 3780 CHECK_EQUAL_NZCV(ZCFlag); |
3781 ASSERT_EQUAL_64(0, x10); | 3781 CHECK_EQUAL_64(0, x10); |
3782 | 3782 |
3783 START(); | 3783 START(); |
3784 __ Mov(x0, 0x10); | 3784 __ Mov(x0, 0x10); |
3785 __ Mov(x1, 0x07ffffffffffffffL); | 3785 __ Mov(x1, 0x07ffffffffffffffL); |
3786 // Clear the C flag. | 3786 // Clear the C flag. |
3787 __ Adds(x0, x0, Operand(0)); | 3787 __ Adds(x0, x0, Operand(0)); |
3788 __ Adcs(x10, x0, Operand(x1, LSL, 4)); | 3788 __ Adcs(x10, x0, Operand(x1, LSL, 4)); |
3789 END(); | 3789 END(); |
3790 | 3790 |
3791 RUN(); | 3791 RUN(); |
3792 | 3792 |
3793 ASSERT_EQUAL_NZCV(NVFlag); | 3793 CHECK_EQUAL_NZCV(NVFlag); |
3794 ASSERT_EQUAL_64(0x8000000000000000L, x10); | 3794 CHECK_EQUAL_64(0x8000000000000000L, x10); |
3795 | 3795 |
3796 // Check that sbc correctly sets the condition flags. | 3796 // Check that sbc correctly sets the condition flags. |
3797 START(); | 3797 START(); |
3798 __ Mov(x0, 0); | 3798 __ Mov(x0, 0); |
3799 __ Mov(x1, 0xffffffffffffffffL); | 3799 __ Mov(x1, 0xffffffffffffffffL); |
3800 // Clear the C flag. | 3800 // Clear the C flag. |
3801 __ Adds(x0, x0, Operand(0)); | 3801 __ Adds(x0, x0, Operand(0)); |
3802 __ Sbcs(x10, x0, Operand(x1)); | 3802 __ Sbcs(x10, x0, Operand(x1)); |
3803 END(); | 3803 END(); |
3804 | 3804 |
3805 RUN(); | 3805 RUN(); |
3806 | 3806 |
3807 ASSERT_EQUAL_NZCV(ZFlag); | 3807 CHECK_EQUAL_NZCV(ZFlag); |
3808 ASSERT_EQUAL_64(0, x10); | 3808 CHECK_EQUAL_64(0, x10); |
3809 | 3809 |
3810 START(); | 3810 START(); |
3811 __ Mov(x0, 1); | 3811 __ Mov(x0, 1); |
3812 __ Mov(x1, 0xffffffffffffffffL); | 3812 __ Mov(x1, 0xffffffffffffffffL); |
3813 // Clear the C flag. | 3813 // Clear the C flag. |
3814 __ Adds(x0, x0, Operand(0)); | 3814 __ Adds(x0, x0, Operand(0)); |
3815 __ Sbcs(x10, x0, Operand(x1, LSR, 1)); | 3815 __ Sbcs(x10, x0, Operand(x1, LSR, 1)); |
3816 END(); | 3816 END(); |
3817 | 3817 |
3818 RUN(); | 3818 RUN(); |
3819 | 3819 |
3820 ASSERT_EQUAL_NZCV(NFlag); | 3820 CHECK_EQUAL_NZCV(NFlag); |
3821 ASSERT_EQUAL_64(0x8000000000000001L, x10); | 3821 CHECK_EQUAL_64(0x8000000000000001L, x10); |
3822 | 3822 |
3823 START(); | 3823 START(); |
3824 __ Mov(x0, 0); | 3824 __ Mov(x0, 0); |
3825 // Clear the C flag. | 3825 // Clear the C flag. |
3826 __ Adds(x0, x0, Operand(0)); | 3826 __ Adds(x0, x0, Operand(0)); |
3827 __ Sbcs(x10, x0, Operand(0xffffffffffffffffL)); | 3827 __ Sbcs(x10, x0, Operand(0xffffffffffffffffL)); |
3828 END(); | 3828 END(); |
3829 | 3829 |
3830 RUN(); | 3830 RUN(); |
3831 | 3831 |
3832 ASSERT_EQUAL_NZCV(ZFlag); | 3832 CHECK_EQUAL_NZCV(ZFlag); |
3833 ASSERT_EQUAL_64(0, x10); | 3833 CHECK_EQUAL_64(0, x10); |
3834 | 3834 |
3835 START() | 3835 START() |
3836 __ Mov(w0, 0x7fffffff); | 3836 __ Mov(w0, 0x7fffffff); |
3837 // Clear the C flag. | 3837 // Clear the C flag. |
3838 __ Adds(x0, x0, Operand(0)); | 3838 __ Adds(x0, x0, Operand(0)); |
3839 __ Ngcs(w10, w0); | 3839 __ Ngcs(w10, w0); |
3840 END(); | 3840 END(); |
3841 | 3841 |
3842 RUN(); | 3842 RUN(); |
3843 | 3843 |
3844 ASSERT_EQUAL_NZCV(NFlag); | 3844 CHECK_EQUAL_NZCV(NFlag); |
3845 ASSERT_EQUAL_64(0x80000000, x10); | 3845 CHECK_EQUAL_64(0x80000000, x10); |
3846 | 3846 |
3847 START(); | 3847 START(); |
3848 // Clear the C flag. | 3848 // Clear the C flag. |
3849 __ Adds(x0, x0, Operand(0)); | 3849 __ Adds(x0, x0, Operand(0)); |
3850 __ Ngcs(x10, 0x7fffffffffffffffL); | 3850 __ Ngcs(x10, 0x7fffffffffffffffL); |
3851 END(); | 3851 END(); |
3852 | 3852 |
3853 RUN(); | 3853 RUN(); |
3854 | 3854 |
3855 ASSERT_EQUAL_NZCV(NFlag); | 3855 CHECK_EQUAL_NZCV(NFlag); |
3856 ASSERT_EQUAL_64(0x8000000000000000L, x10); | 3856 CHECK_EQUAL_64(0x8000000000000000L, x10); |
3857 | 3857 |
3858 START() | 3858 START() |
3859 __ Mov(x0, 0); | 3859 __ Mov(x0, 0); |
3860 // Set the C flag. | 3860 // Set the C flag. |
3861 __ Cmp(x0, Operand(x0)); | 3861 __ Cmp(x0, Operand(x0)); |
3862 __ Sbcs(x10, x0, Operand(1)); | 3862 __ Sbcs(x10, x0, Operand(1)); |
3863 END(); | 3863 END(); |
3864 | 3864 |
3865 RUN(); | 3865 RUN(); |
3866 | 3866 |
3867 ASSERT_EQUAL_NZCV(NFlag); | 3867 CHECK_EQUAL_NZCV(NFlag); |
3868 ASSERT_EQUAL_64(0xffffffffffffffffL, x10); | 3868 CHECK_EQUAL_64(0xffffffffffffffffL, x10); |
3869 | 3869 |
3870 START() | 3870 START() |
3871 __ Mov(x0, 0); | 3871 __ Mov(x0, 0); |
3872 // Set the C flag. | 3872 // Set the C flag. |
3873 __ Cmp(x0, Operand(x0)); | 3873 __ Cmp(x0, Operand(x0)); |
3874 __ Ngcs(x10, 0x7fffffffffffffffL); | 3874 __ Ngcs(x10, 0x7fffffffffffffffL); |
3875 END(); | 3875 END(); |
3876 | 3876 |
3877 RUN(); | 3877 RUN(); |
3878 | 3878 |
3879 ASSERT_EQUAL_NZCV(NFlag); | 3879 CHECK_EQUAL_NZCV(NFlag); |
3880 ASSERT_EQUAL_64(0x8000000000000001L, x10); | 3880 CHECK_EQUAL_64(0x8000000000000001L, x10); |
3881 | 3881 |
3882 TEARDOWN(); | 3882 TEARDOWN(); |
3883 } | 3883 } |
3884 | 3884 |
3885 | 3885 |
3886 TEST(adc_sbc_extend) { | 3886 TEST(adc_sbc_extend) { |
3887 INIT_V8(); | 3887 INIT_V8(); |
3888 SETUP(); | 3888 SETUP(); |
3889 | 3889 |
3890 START(); | 3890 START(); |
(...skipping 21 matching lines...) Expand all Loading... |
3912 __ Sbc(x22, x1, Operand(w2, UXTW, 4)); | 3912 __ Sbc(x22, x1, Operand(w2, UXTW, 4)); |
3913 __ Adc(x23, x1, Operand(x2, UXTX, 4)); | 3913 __ Adc(x23, x1, Operand(x2, UXTX, 4)); |
3914 | 3914 |
3915 __ Adc(w24, w1, Operand(w2, UXTB, 1)); | 3915 __ Adc(w24, w1, Operand(w2, UXTB, 1)); |
3916 __ Adc(w25, w1, Operand(w2, SXTH, 2)); | 3916 __ Adc(w25, w1, Operand(w2, SXTH, 2)); |
3917 __ Adc(w26, w1, Operand(w2, UXTW, 4)); | 3917 __ Adc(w26, w1, Operand(w2, UXTW, 4)); |
3918 END(); | 3918 END(); |
3919 | 3919 |
3920 RUN(); | 3920 RUN(); |
3921 | 3921 |
3922 ASSERT_EQUAL_64(0x1df, x10); | 3922 CHECK_EQUAL_64(0x1df, x10); |
3923 ASSERT_EQUAL_64(0xffffffffffff37bdL, x11); | 3923 CHECK_EQUAL_64(0xffffffffffff37bdL, x11); |
3924 ASSERT_EQUAL_64(0xfffffff765432110L, x12); | 3924 CHECK_EQUAL_64(0xfffffff765432110L, x12); |
3925 ASSERT_EQUAL_64(0x123456789abcdef1L, x13); | 3925 CHECK_EQUAL_64(0x123456789abcdef1L, x13); |
3926 | 3926 |
3927 ASSERT_EQUAL_32(0x1df, w14); | 3927 CHECK_EQUAL_32(0x1df, w14); |
3928 ASSERT_EQUAL_32(0xffff37bd, w15); | 3928 CHECK_EQUAL_32(0xffff37bd, w15); |
3929 ASSERT_EQUAL_32(0x9abcdef1, w9); | 3929 CHECK_EQUAL_32(0x9abcdef1, w9); |
3930 | 3930 |
3931 ASSERT_EQUAL_64(0x1df + 1, x20); | 3931 CHECK_EQUAL_64(0x1df + 1, x20); |
3932 ASSERT_EQUAL_64(0xffffffffffff37bdL + 1, x21); | 3932 CHECK_EQUAL_64(0xffffffffffff37bdL + 1, x21); |
3933 ASSERT_EQUAL_64(0xfffffff765432110L + 1, x22); | 3933 CHECK_EQUAL_64(0xfffffff765432110L + 1, x22); |
3934 ASSERT_EQUAL_64(0x123456789abcdef1L + 1, x23); | 3934 CHECK_EQUAL_64(0x123456789abcdef1L + 1, x23); |
3935 | 3935 |
3936 ASSERT_EQUAL_32(0x1df + 1, w24); | 3936 CHECK_EQUAL_32(0x1df + 1, w24); |
3937 ASSERT_EQUAL_32(0xffff37bd + 1, w25); | 3937 CHECK_EQUAL_32(0xffff37bd + 1, w25); |
3938 ASSERT_EQUAL_32(0x9abcdef1 + 1, w26); | 3938 CHECK_EQUAL_32(0x9abcdef1 + 1, w26); |
3939 | 3939 |
3940 // Check that adc correctly sets the condition flags. | 3940 // Check that adc correctly sets the condition flags. |
3941 START(); | 3941 START(); |
3942 __ Mov(x0, 0xff); | 3942 __ Mov(x0, 0xff); |
3943 __ Mov(x1, 0xffffffffffffffffL); | 3943 __ Mov(x1, 0xffffffffffffffffL); |
3944 // Clear the C flag. | 3944 // Clear the C flag. |
3945 __ Adds(x0, x0, Operand(0)); | 3945 __ Adds(x0, x0, Operand(0)); |
3946 __ Adcs(x10, x0, Operand(x1, SXTX, 1)); | 3946 __ Adcs(x10, x0, Operand(x1, SXTX, 1)); |
3947 END(); | 3947 END(); |
3948 | 3948 |
3949 RUN(); | 3949 RUN(); |
3950 | 3950 |
3951 ASSERT_EQUAL_NZCV(CFlag); | 3951 CHECK_EQUAL_NZCV(CFlag); |
3952 | 3952 |
3953 START(); | 3953 START(); |
3954 __ Mov(x0, 0x7fffffffffffffffL); | 3954 __ Mov(x0, 0x7fffffffffffffffL); |
3955 __ Mov(x1, 1); | 3955 __ Mov(x1, 1); |
3956 // Clear the C flag. | 3956 // Clear the C flag. |
3957 __ Adds(x0, x0, Operand(0)); | 3957 __ Adds(x0, x0, Operand(0)); |
3958 __ Adcs(x10, x0, Operand(x1, UXTB, 2)); | 3958 __ Adcs(x10, x0, Operand(x1, UXTB, 2)); |
3959 END(); | 3959 END(); |
3960 | 3960 |
3961 RUN(); | 3961 RUN(); |
3962 | 3962 |
3963 ASSERT_EQUAL_NZCV(NVFlag); | 3963 CHECK_EQUAL_NZCV(NVFlag); |
3964 | 3964 |
3965 START(); | 3965 START(); |
3966 __ Mov(x0, 0x7fffffffffffffffL); | 3966 __ Mov(x0, 0x7fffffffffffffffL); |
3967 // Clear the C flag. | 3967 // Clear the C flag. |
3968 __ Adds(x0, x0, Operand(0)); | 3968 __ Adds(x0, x0, Operand(0)); |
3969 __ Adcs(x10, x0, Operand(1)); | 3969 __ Adcs(x10, x0, Operand(1)); |
3970 END(); | 3970 END(); |
3971 | 3971 |
3972 RUN(); | 3972 RUN(); |
3973 | 3973 |
3974 ASSERT_EQUAL_NZCV(NVFlag); | 3974 CHECK_EQUAL_NZCV(NVFlag); |
3975 | 3975 |
3976 TEARDOWN(); | 3976 TEARDOWN(); |
3977 } | 3977 } |
3978 | 3978 |
3979 | 3979 |
3980 TEST(adc_sbc_wide_imm) { | 3980 TEST(adc_sbc_wide_imm) { |
3981 INIT_V8(); | 3981 INIT_V8(); |
3982 SETUP(); | 3982 SETUP(); |
3983 | 3983 |
3984 START(); | 3984 START(); |
(...skipping 15 matching lines...) Expand all Loading... |
4000 __ Adc(x18, x0, Operand(0x1234567890abcdefUL)); | 4000 __ Adc(x18, x0, Operand(0x1234567890abcdefUL)); |
4001 __ Adc(w19, w0, Operand(0xffffffff)); | 4001 __ Adc(w19, w0, Operand(0xffffffff)); |
4002 __ Sbc(x20, x0, Operand(0x1234567890abcdefUL)); | 4002 __ Sbc(x20, x0, Operand(0x1234567890abcdefUL)); |
4003 __ Sbc(w21, w0, Operand(0xffffffff)); | 4003 __ Sbc(w21, w0, Operand(0xffffffff)); |
4004 __ Ngc(x22, Operand(0xffffffff00000000UL)); | 4004 __ Ngc(x22, Operand(0xffffffff00000000UL)); |
4005 __ Ngc(w23, Operand(0xffff0000)); | 4005 __ Ngc(w23, Operand(0xffff0000)); |
4006 END(); | 4006 END(); |
4007 | 4007 |
4008 RUN(); | 4008 RUN(); |
4009 | 4009 |
4010 ASSERT_EQUAL_64(0x1234567890abcdefUL, x7); | 4010 CHECK_EQUAL_64(0x1234567890abcdefUL, x7); |
4011 ASSERT_EQUAL_64(0xffffffff, x8); | 4011 CHECK_EQUAL_64(0xffffffff, x8); |
4012 ASSERT_EQUAL_64(0xedcba9876f543210UL, x9); | 4012 CHECK_EQUAL_64(0xedcba9876f543210UL, x9); |
4013 ASSERT_EQUAL_64(0, x10); | 4013 CHECK_EQUAL_64(0, x10); |
4014 ASSERT_EQUAL_64(0xffffffff, x11); | 4014 CHECK_EQUAL_64(0xffffffff, x11); |
4015 ASSERT_EQUAL_64(0xffff, x12); | 4015 CHECK_EQUAL_64(0xffff, x12); |
4016 | 4016 |
4017 ASSERT_EQUAL_64(0x1234567890abcdefUL + 1, x18); | 4017 CHECK_EQUAL_64(0x1234567890abcdefUL + 1, x18); |
4018 ASSERT_EQUAL_64(0, x19); | 4018 CHECK_EQUAL_64(0, x19); |
4019 ASSERT_EQUAL_64(0xedcba9876f543211UL, x20); | 4019 CHECK_EQUAL_64(0xedcba9876f543211UL, x20); |
4020 ASSERT_EQUAL_64(1, x21); | 4020 CHECK_EQUAL_64(1, x21); |
4021 ASSERT_EQUAL_64(0x100000000UL, x22); | 4021 CHECK_EQUAL_64(0x100000000UL, x22); |
4022 ASSERT_EQUAL_64(0x10000, x23); | 4022 CHECK_EQUAL_64(0x10000, x23); |
4023 | 4023 |
4024 TEARDOWN(); | 4024 TEARDOWN(); |
4025 } | 4025 } |
4026 | 4026 |
4027 | 4027 |
4028 TEST(flags) { | 4028 TEST(flags) { |
4029 INIT_V8(); | 4029 INIT_V8(); |
4030 SETUP(); | 4030 SETUP(); |
4031 | 4031 |
4032 START(); | 4032 START(); |
4033 __ Mov(x0, 0); | 4033 __ Mov(x0, 0); |
4034 __ Mov(x1, 0x1111111111111111L); | 4034 __ Mov(x1, 0x1111111111111111L); |
4035 __ Neg(x10, Operand(x0)); | 4035 __ Neg(x10, Operand(x0)); |
4036 __ Neg(x11, Operand(x1)); | 4036 __ Neg(x11, Operand(x1)); |
4037 __ Neg(w12, Operand(w1)); | 4037 __ Neg(w12, Operand(w1)); |
4038 // Clear the C flag. | 4038 // Clear the C flag. |
4039 __ Adds(x0, x0, Operand(0)); | 4039 __ Adds(x0, x0, Operand(0)); |
4040 __ Ngc(x13, Operand(x0)); | 4040 __ Ngc(x13, Operand(x0)); |
4041 // Set the C flag. | 4041 // Set the C flag. |
4042 __ Cmp(x0, Operand(x0)); | 4042 __ Cmp(x0, Operand(x0)); |
4043 __ Ngc(w14, Operand(w0)); | 4043 __ Ngc(w14, Operand(w0)); |
4044 END(); | 4044 END(); |
4045 | 4045 |
4046 RUN(); | 4046 RUN(); |
4047 | 4047 |
4048 ASSERT_EQUAL_64(0, x10); | 4048 CHECK_EQUAL_64(0, x10); |
4049 ASSERT_EQUAL_64(-0x1111111111111111L, x11); | 4049 CHECK_EQUAL_64(-0x1111111111111111L, x11); |
4050 ASSERT_EQUAL_32(-0x11111111, w12); | 4050 CHECK_EQUAL_32(-0x11111111, w12); |
4051 ASSERT_EQUAL_64(-1L, x13); | 4051 CHECK_EQUAL_64(-1L, x13); |
4052 ASSERT_EQUAL_32(0, w14); | 4052 CHECK_EQUAL_32(0, w14); |
4053 | 4053 |
4054 START(); | 4054 START(); |
4055 __ Mov(x0, 0); | 4055 __ Mov(x0, 0); |
4056 __ Cmp(x0, Operand(x0)); | 4056 __ Cmp(x0, Operand(x0)); |
4057 END(); | 4057 END(); |
4058 | 4058 |
4059 RUN(); | 4059 RUN(); |
4060 | 4060 |
4061 ASSERT_EQUAL_NZCV(ZCFlag); | 4061 CHECK_EQUAL_NZCV(ZCFlag); |
4062 | 4062 |
4063 START(); | 4063 START(); |
4064 __ Mov(w0, 0); | 4064 __ Mov(w0, 0); |
4065 __ Cmp(w0, Operand(w0)); | 4065 __ Cmp(w0, Operand(w0)); |
4066 END(); | 4066 END(); |
4067 | 4067 |
4068 RUN(); | 4068 RUN(); |
4069 | 4069 |
4070 ASSERT_EQUAL_NZCV(ZCFlag); | 4070 CHECK_EQUAL_NZCV(ZCFlag); |
4071 | 4071 |
4072 START(); | 4072 START(); |
4073 __ Mov(x0, 0); | 4073 __ Mov(x0, 0); |
4074 __ Mov(x1, 0x1111111111111111L); | 4074 __ Mov(x1, 0x1111111111111111L); |
4075 __ Cmp(x0, Operand(x1)); | 4075 __ Cmp(x0, Operand(x1)); |
4076 END(); | 4076 END(); |
4077 | 4077 |
4078 RUN(); | 4078 RUN(); |
4079 | 4079 |
4080 ASSERT_EQUAL_NZCV(NFlag); | 4080 CHECK_EQUAL_NZCV(NFlag); |
4081 | 4081 |
4082 START(); | 4082 START(); |
4083 __ Mov(w0, 0); | 4083 __ Mov(w0, 0); |
4084 __ Mov(w1, 0x11111111); | 4084 __ Mov(w1, 0x11111111); |
4085 __ Cmp(w0, Operand(w1)); | 4085 __ Cmp(w0, Operand(w1)); |
4086 END(); | 4086 END(); |
4087 | 4087 |
4088 RUN(); | 4088 RUN(); |
4089 | 4089 |
4090 ASSERT_EQUAL_NZCV(NFlag); | 4090 CHECK_EQUAL_NZCV(NFlag); |
4091 | 4091 |
4092 START(); | 4092 START(); |
4093 __ Mov(x1, 0x1111111111111111L); | 4093 __ Mov(x1, 0x1111111111111111L); |
4094 __ Cmp(x1, Operand(0)); | 4094 __ Cmp(x1, Operand(0)); |
4095 END(); | 4095 END(); |
4096 | 4096 |
4097 RUN(); | 4097 RUN(); |
4098 | 4098 |
4099 ASSERT_EQUAL_NZCV(CFlag); | 4099 CHECK_EQUAL_NZCV(CFlag); |
4100 | 4100 |
4101 START(); | 4101 START(); |
4102 __ Mov(w1, 0x11111111); | 4102 __ Mov(w1, 0x11111111); |
4103 __ Cmp(w1, Operand(0)); | 4103 __ Cmp(w1, Operand(0)); |
4104 END(); | 4104 END(); |
4105 | 4105 |
4106 RUN(); | 4106 RUN(); |
4107 | 4107 |
4108 ASSERT_EQUAL_NZCV(CFlag); | 4108 CHECK_EQUAL_NZCV(CFlag); |
4109 | 4109 |
4110 START(); | 4110 START(); |
4111 __ Mov(x0, 1); | 4111 __ Mov(x0, 1); |
4112 __ Mov(x1, 0x7fffffffffffffffL); | 4112 __ Mov(x1, 0x7fffffffffffffffL); |
4113 __ Cmn(x1, Operand(x0)); | 4113 __ Cmn(x1, Operand(x0)); |
4114 END(); | 4114 END(); |
4115 | 4115 |
4116 RUN(); | 4116 RUN(); |
4117 | 4117 |
4118 ASSERT_EQUAL_NZCV(NVFlag); | 4118 CHECK_EQUAL_NZCV(NVFlag); |
4119 | 4119 |
4120 START(); | 4120 START(); |
4121 __ Mov(w0, 1); | 4121 __ Mov(w0, 1); |
4122 __ Mov(w1, 0x7fffffff); | 4122 __ Mov(w1, 0x7fffffff); |
4123 __ Cmn(w1, Operand(w0)); | 4123 __ Cmn(w1, Operand(w0)); |
4124 END(); | 4124 END(); |
4125 | 4125 |
4126 RUN(); | 4126 RUN(); |
4127 | 4127 |
4128 ASSERT_EQUAL_NZCV(NVFlag); | 4128 CHECK_EQUAL_NZCV(NVFlag); |
4129 | 4129 |
4130 START(); | 4130 START(); |
4131 __ Mov(x0, 1); | 4131 __ Mov(x0, 1); |
4132 __ Mov(x1, 0xffffffffffffffffL); | 4132 __ Mov(x1, 0xffffffffffffffffL); |
4133 __ Cmn(x1, Operand(x0)); | 4133 __ Cmn(x1, Operand(x0)); |
4134 END(); | 4134 END(); |
4135 | 4135 |
4136 RUN(); | 4136 RUN(); |
4137 | 4137 |
4138 ASSERT_EQUAL_NZCV(ZCFlag); | 4138 CHECK_EQUAL_NZCV(ZCFlag); |
4139 | 4139 |
4140 START(); | 4140 START(); |
4141 __ Mov(w0, 1); | 4141 __ Mov(w0, 1); |
4142 __ Mov(w1, 0xffffffff); | 4142 __ Mov(w1, 0xffffffff); |
4143 __ Cmn(w1, Operand(w0)); | 4143 __ Cmn(w1, Operand(w0)); |
4144 END(); | 4144 END(); |
4145 | 4145 |
4146 RUN(); | 4146 RUN(); |
4147 | 4147 |
4148 ASSERT_EQUAL_NZCV(ZCFlag); | 4148 CHECK_EQUAL_NZCV(ZCFlag); |
4149 | 4149 |
4150 START(); | 4150 START(); |
4151 __ Mov(w0, 0); | 4151 __ Mov(w0, 0); |
4152 __ Mov(w1, 1); | 4152 __ Mov(w1, 1); |
4153 // Clear the C flag. | 4153 // Clear the C flag. |
4154 __ Adds(w0, w0, Operand(0)); | 4154 __ Adds(w0, w0, Operand(0)); |
4155 __ Ngcs(w0, Operand(w1)); | 4155 __ Ngcs(w0, Operand(w1)); |
4156 END(); | 4156 END(); |
4157 | 4157 |
4158 RUN(); | 4158 RUN(); |
4159 | 4159 |
4160 ASSERT_EQUAL_NZCV(NFlag); | 4160 CHECK_EQUAL_NZCV(NFlag); |
4161 | 4161 |
4162 START(); | 4162 START(); |
4163 __ Mov(w0, 0); | 4163 __ Mov(w0, 0); |
4164 __ Mov(w1, 0); | 4164 __ Mov(w1, 0); |
4165 // Set the C flag. | 4165 // Set the C flag. |
4166 __ Cmp(w0, Operand(w0)); | 4166 __ Cmp(w0, Operand(w0)); |
4167 __ Ngcs(w0, Operand(w1)); | 4167 __ Ngcs(w0, Operand(w1)); |
4168 END(); | 4168 END(); |
4169 | 4169 |
4170 RUN(); | 4170 RUN(); |
4171 | 4171 |
4172 ASSERT_EQUAL_NZCV(ZCFlag); | 4172 CHECK_EQUAL_NZCV(ZCFlag); |
4173 | 4173 |
4174 TEARDOWN(); | 4174 TEARDOWN(); |
4175 } | 4175 } |
4176 | 4176 |
4177 | 4177 |
4178 TEST(cmp_shift) { | 4178 TEST(cmp_shift) { |
4179 INIT_V8(); | 4179 INIT_V8(); |
4180 SETUP(); | 4180 SETUP(); |
4181 | 4181 |
4182 START(); | 4182 START(); |
(...skipping 28 matching lines...) Expand all Loading... |
4211 | 4211 |
4212 __ Cmp(w27, Operand(w22, ROR, 28)); | 4212 __ Cmp(w27, Operand(w22, ROR, 28)); |
4213 __ Mrs(x6, NZCV); | 4213 __ Mrs(x6, NZCV); |
4214 | 4214 |
4215 __ Cmp(x20, Operand(x21, ROR, 31)); | 4215 __ Cmp(x20, Operand(x21, ROR, 31)); |
4216 __ Mrs(x7, NZCV); | 4216 __ Mrs(x7, NZCV); |
4217 END(); | 4217 END(); |
4218 | 4218 |
4219 RUN(); | 4219 RUN(); |
4220 | 4220 |
4221 ASSERT_EQUAL_32(ZCFlag, w0); | 4221 CHECK_EQUAL_32(ZCFlag, w0); |
4222 ASSERT_EQUAL_32(ZCFlag, w1); | 4222 CHECK_EQUAL_32(ZCFlag, w1); |
4223 ASSERT_EQUAL_32(ZCFlag, w2); | 4223 CHECK_EQUAL_32(ZCFlag, w2); |
4224 ASSERT_EQUAL_32(ZCFlag, w3); | 4224 CHECK_EQUAL_32(ZCFlag, w3); |
4225 ASSERT_EQUAL_32(ZCFlag, w4); | 4225 CHECK_EQUAL_32(ZCFlag, w4); |
4226 ASSERT_EQUAL_32(ZCFlag, w5); | 4226 CHECK_EQUAL_32(ZCFlag, w5); |
4227 ASSERT_EQUAL_32(ZCFlag, w6); | 4227 CHECK_EQUAL_32(ZCFlag, w6); |
4228 ASSERT_EQUAL_32(ZCFlag, w7); | 4228 CHECK_EQUAL_32(ZCFlag, w7); |
4229 | 4229 |
4230 TEARDOWN(); | 4230 TEARDOWN(); |
4231 } | 4231 } |
4232 | 4232 |
4233 | 4233 |
4234 TEST(cmp_extend) { | 4234 TEST(cmp_extend) { |
4235 INIT_V8(); | 4235 INIT_V8(); |
4236 SETUP(); | 4236 SETUP(); |
4237 | 4237 |
4238 START(); | 4238 START(); |
(...skipping 25 matching lines...) Expand all Loading... |
4264 | 4264 |
4265 __ Cmp(x22, Operand(x26, UXTW)); | 4265 __ Cmp(x22, Operand(x26, UXTW)); |
4266 __ Mrs(x6, NZCV); | 4266 __ Mrs(x6, NZCV); |
4267 | 4267 |
4268 __ Cmp(x24, Operand(x26, SXTW, 1)); | 4268 __ Cmp(x24, Operand(x26, SXTW, 1)); |
4269 __ Mrs(x7, NZCV); | 4269 __ Mrs(x7, NZCV); |
4270 END(); | 4270 END(); |
4271 | 4271 |
4272 RUN(); | 4272 RUN(); |
4273 | 4273 |
4274 ASSERT_EQUAL_32(ZCFlag, w0); | 4274 CHECK_EQUAL_32(ZCFlag, w0); |
4275 ASSERT_EQUAL_32(ZCFlag, w1); | 4275 CHECK_EQUAL_32(ZCFlag, w1); |
4276 ASSERT_EQUAL_32(ZCFlag, w2); | 4276 CHECK_EQUAL_32(ZCFlag, w2); |
4277 ASSERT_EQUAL_32(NCFlag, w3); | 4277 CHECK_EQUAL_32(NCFlag, w3); |
4278 ASSERT_EQUAL_32(NCFlag, w4); | 4278 CHECK_EQUAL_32(NCFlag, w4); |
4279 ASSERT_EQUAL_32(ZCFlag, w5); | 4279 CHECK_EQUAL_32(ZCFlag, w5); |
4280 ASSERT_EQUAL_32(NCFlag, w6); | 4280 CHECK_EQUAL_32(NCFlag, w6); |
4281 ASSERT_EQUAL_32(ZCFlag, w7); | 4281 CHECK_EQUAL_32(ZCFlag, w7); |
4282 | 4282 |
4283 TEARDOWN(); | 4283 TEARDOWN(); |
4284 } | 4284 } |
4285 | 4285 |
4286 | 4286 |
4287 TEST(ccmp) { | 4287 TEST(ccmp) { |
4288 INIT_V8(); | 4288 INIT_V8(); |
4289 SETUP(); | 4289 SETUP(); |
4290 | 4290 |
4291 START(); | 4291 START(); |
(...skipping 18 matching lines...) Expand all Loading... |
4310 __ ccmp(x16, x16, NZCVFlag, al); | 4310 __ ccmp(x16, x16, NZCVFlag, al); |
4311 __ Mrs(x4, NZCV); | 4311 __ Mrs(x4, NZCV); |
4312 | 4312 |
4313 __ ccmp(x16, x16, NZCVFlag, nv); | 4313 __ ccmp(x16, x16, NZCVFlag, nv); |
4314 __ Mrs(x5, NZCV); | 4314 __ Mrs(x5, NZCV); |
4315 | 4315 |
4316 END(); | 4316 END(); |
4317 | 4317 |
4318 RUN(); | 4318 RUN(); |
4319 | 4319 |
4320 ASSERT_EQUAL_32(NFlag, w0); | 4320 CHECK_EQUAL_32(NFlag, w0); |
4321 ASSERT_EQUAL_32(NCFlag, w1); | 4321 CHECK_EQUAL_32(NCFlag, w1); |
4322 ASSERT_EQUAL_32(NoFlag, w2); | 4322 CHECK_EQUAL_32(NoFlag, w2); |
4323 ASSERT_EQUAL_32(NZCVFlag, w3); | 4323 CHECK_EQUAL_32(NZCVFlag, w3); |
4324 ASSERT_EQUAL_32(ZCFlag, w4); | 4324 CHECK_EQUAL_32(ZCFlag, w4); |
4325 ASSERT_EQUAL_32(ZCFlag, w5); | 4325 CHECK_EQUAL_32(ZCFlag, w5); |
4326 | 4326 |
4327 TEARDOWN(); | 4327 TEARDOWN(); |
4328 } | 4328 } |
4329 | 4329 |
4330 | 4330 |
4331 TEST(ccmp_wide_imm) { | 4331 TEST(ccmp_wide_imm) { |
4332 INIT_V8(); | 4332 INIT_V8(); |
4333 SETUP(); | 4333 SETUP(); |
4334 | 4334 |
4335 START(); | 4335 START(); |
4336 __ Mov(w20, 0); | 4336 __ Mov(w20, 0); |
4337 | 4337 |
4338 __ Cmp(w20, Operand(w20)); | 4338 __ Cmp(w20, Operand(w20)); |
4339 __ Ccmp(w20, Operand(0x12345678), NZCVFlag, eq); | 4339 __ Ccmp(w20, Operand(0x12345678), NZCVFlag, eq); |
4340 __ Mrs(x0, NZCV); | 4340 __ Mrs(x0, NZCV); |
4341 | 4341 |
4342 __ Cmp(w20, Operand(w20)); | 4342 __ Cmp(w20, Operand(w20)); |
4343 __ Ccmp(x20, Operand(0xffffffffffffffffUL), NZCVFlag, eq); | 4343 __ Ccmp(x20, Operand(0xffffffffffffffffUL), NZCVFlag, eq); |
4344 __ Mrs(x1, NZCV); | 4344 __ Mrs(x1, NZCV); |
4345 END(); | 4345 END(); |
4346 | 4346 |
4347 RUN(); | 4347 RUN(); |
4348 | 4348 |
4349 ASSERT_EQUAL_32(NFlag, w0); | 4349 CHECK_EQUAL_32(NFlag, w0); |
4350 ASSERT_EQUAL_32(NoFlag, w1); | 4350 CHECK_EQUAL_32(NoFlag, w1); |
4351 | 4351 |
4352 TEARDOWN(); | 4352 TEARDOWN(); |
4353 } | 4353 } |
4354 | 4354 |
4355 | 4355 |
4356 TEST(ccmp_shift_extend) { | 4356 TEST(ccmp_shift_extend) { |
4357 INIT_V8(); | 4357 INIT_V8(); |
4358 SETUP(); | 4358 SETUP(); |
4359 | 4359 |
4360 START(); | 4360 START(); |
(...skipping 19 matching lines...) Expand all Loading... |
4380 __ Ccmp(x24, Operand(x23, UXTB, 1), NZCVFlag, eq); | 4380 __ Ccmp(x24, Operand(x23, UXTB, 1), NZCVFlag, eq); |
4381 __ Mrs(x3, NZCV); | 4381 __ Mrs(x3, NZCV); |
4382 | 4382 |
4383 __ Cmp(w20, Operand(w20)); | 4383 __ Cmp(w20, Operand(w20)); |
4384 __ Ccmp(x24, Operand(x23, UXTB, 1), NZCVFlag, ne); | 4384 __ Ccmp(x24, Operand(x23, UXTB, 1), NZCVFlag, ne); |
4385 __ Mrs(x4, NZCV); | 4385 __ Mrs(x4, NZCV); |
4386 END(); | 4386 END(); |
4387 | 4387 |
4388 RUN(); | 4388 RUN(); |
4389 | 4389 |
4390 ASSERT_EQUAL_32(ZCFlag, w0); | 4390 CHECK_EQUAL_32(ZCFlag, w0); |
4391 ASSERT_EQUAL_32(ZCFlag, w1); | 4391 CHECK_EQUAL_32(ZCFlag, w1); |
4392 ASSERT_EQUAL_32(ZCFlag, w2); | 4392 CHECK_EQUAL_32(ZCFlag, w2); |
4393 ASSERT_EQUAL_32(NCFlag, w3); | 4393 CHECK_EQUAL_32(NCFlag, w3); |
4394 ASSERT_EQUAL_32(NZCVFlag, w4); | 4394 CHECK_EQUAL_32(NZCVFlag, w4); |
4395 | 4395 |
4396 TEARDOWN(); | 4396 TEARDOWN(); |
4397 } | 4397 } |
4398 | 4398 |
4399 | 4399 |
4400 TEST(csel) { | 4400 TEST(csel) { |
4401 INIT_V8(); | 4401 INIT_V8(); |
4402 SETUP(); | 4402 SETUP(); |
4403 | 4403 |
4404 START(); | 4404 START(); |
(...skipping 29 matching lines...) Expand all Loading... |
4434 | 4434 |
4435 __ CzeroX(x24, ne); | 4435 __ CzeroX(x24, ne); |
4436 __ CzeroX(x25, eq); | 4436 __ CzeroX(x25, eq); |
4437 | 4437 |
4438 __ CmovX(x26, x25, ne); | 4438 __ CmovX(x26, x25, ne); |
4439 __ CmovX(x27, x25, eq); | 4439 __ CmovX(x27, x25, eq); |
4440 END(); | 4440 END(); |
4441 | 4441 |
4442 RUN(); | 4442 RUN(); |
4443 | 4443 |
4444 ASSERT_EQUAL_64(0x0000000f, x0); | 4444 CHECK_EQUAL_64(0x0000000f, x0); |
4445 ASSERT_EQUAL_64(0x0000001f, x1); | 4445 CHECK_EQUAL_64(0x0000001f, x1); |
4446 ASSERT_EQUAL_64(0x00000020, x2); | 4446 CHECK_EQUAL_64(0x00000020, x2); |
4447 ASSERT_EQUAL_64(0x0000000f, x3); | 4447 CHECK_EQUAL_64(0x0000000f, x3); |
4448 ASSERT_EQUAL_64(0xffffffe0ffffffe0UL, x4); | 4448 CHECK_EQUAL_64(0xffffffe0ffffffe0UL, x4); |
4449 ASSERT_EQUAL_64(0x0000000f0000000fUL, x5); | 4449 CHECK_EQUAL_64(0x0000000f0000000fUL, x5); |
4450 ASSERT_EQUAL_64(0xffffffe0ffffffe1UL, x6); | 4450 CHECK_EQUAL_64(0xffffffe0ffffffe1UL, x6); |
4451 ASSERT_EQUAL_64(0x0000000f0000000fUL, x7); | 4451 CHECK_EQUAL_64(0x0000000f0000000fUL, x7); |
4452 ASSERT_EQUAL_64(0x00000001, x8); | 4452 CHECK_EQUAL_64(0x00000001, x8); |
4453 ASSERT_EQUAL_64(0xffffffff, x9); | 4453 CHECK_EQUAL_64(0xffffffff, x9); |
4454 ASSERT_EQUAL_64(0x0000001f00000020UL, x10); | 4454 CHECK_EQUAL_64(0x0000001f00000020UL, x10); |
4455 ASSERT_EQUAL_64(0xfffffff0fffffff0UL, x11); | 4455 CHECK_EQUAL_64(0xfffffff0fffffff0UL, x11); |
4456 ASSERT_EQUAL_64(0xfffffff0fffffff1UL, x12); | 4456 CHECK_EQUAL_64(0xfffffff0fffffff1UL, x12); |
4457 ASSERT_EQUAL_64(0x0000000f, x13); | 4457 CHECK_EQUAL_64(0x0000000f, x13); |
4458 ASSERT_EQUAL_64(0x0000000f0000000fUL, x14); | 4458 CHECK_EQUAL_64(0x0000000f0000000fUL, x14); |
4459 ASSERT_EQUAL_64(0x0000000f, x15); | 4459 CHECK_EQUAL_64(0x0000000f, x15); |
4460 ASSERT_EQUAL_64(0x0000000f0000000fUL, x18); | 4460 CHECK_EQUAL_64(0x0000000f0000000fUL, x18); |
4461 ASSERT_EQUAL_64(0, x24); | 4461 CHECK_EQUAL_64(0, x24); |
4462 ASSERT_EQUAL_64(0x0000001f0000001fUL, x25); | 4462 CHECK_EQUAL_64(0x0000001f0000001fUL, x25); |
4463 ASSERT_EQUAL_64(0x0000001f0000001fUL, x26); | 4463 CHECK_EQUAL_64(0x0000001f0000001fUL, x26); |
4464 ASSERT_EQUAL_64(0, x27); | 4464 CHECK_EQUAL_64(0, x27); |
4465 | 4465 |
4466 TEARDOWN(); | 4466 TEARDOWN(); |
4467 } | 4467 } |
4468 | 4468 |
4469 | 4469 |
4470 TEST(csel_imm) { | 4470 TEST(csel_imm) { |
4471 INIT_V8(); | 4471 INIT_V8(); |
4472 SETUP(); | 4472 SETUP(); |
4473 | 4473 |
4474 START(); | 4474 START(); |
(...skipping 17 matching lines...) Expand all Loading... |
4492 __ Csel(x11, x20, 1, ne); | 4492 __ Csel(x11, x20, 1, ne); |
4493 __ Csel(x12, x20, 2, ne); | 4493 __ Csel(x12, x20, 2, ne); |
4494 __ Csel(x13, x20, Operand(x20, ASR, 63), ne); | 4494 __ Csel(x13, x20, Operand(x20, ASR, 63), ne); |
4495 __ Csel(x14, x20, Operand(x20, ROR, 1), ne); | 4495 __ Csel(x14, x20, Operand(x20, ROR, 1), ne); |
4496 __ Csel(x15, x20, 3, eq); | 4496 __ Csel(x15, x20, 3, eq); |
4497 | 4497 |
4498 END(); | 4498 END(); |
4499 | 4499 |
4500 RUN(); | 4500 RUN(); |
4501 | 4501 |
4502 ASSERT_EQUAL_32(-2, w0); | 4502 CHECK_EQUAL_32(-2, w0); |
4503 ASSERT_EQUAL_32(-1, w1); | 4503 CHECK_EQUAL_32(-1, w1); |
4504 ASSERT_EQUAL_32(0, w2); | 4504 CHECK_EQUAL_32(0, w2); |
4505 ASSERT_EQUAL_32(1, w3); | 4505 CHECK_EQUAL_32(1, w3); |
4506 ASSERT_EQUAL_32(2, w4); | 4506 CHECK_EQUAL_32(2, w4); |
4507 ASSERT_EQUAL_32(-1, w5); | 4507 CHECK_EQUAL_32(-1, w5); |
4508 ASSERT_EQUAL_32(0x40000000, w6); | 4508 CHECK_EQUAL_32(0x40000000, w6); |
4509 ASSERT_EQUAL_32(0x80000000, w7); | 4509 CHECK_EQUAL_32(0x80000000, w7); |
4510 | 4510 |
4511 ASSERT_EQUAL_64(-2, x8); | 4511 CHECK_EQUAL_64(-2, x8); |
4512 ASSERT_EQUAL_64(-1, x9); | 4512 CHECK_EQUAL_64(-1, x9); |
4513 ASSERT_EQUAL_64(0, x10); | 4513 CHECK_EQUAL_64(0, x10); |
4514 ASSERT_EQUAL_64(1, x11); | 4514 CHECK_EQUAL_64(1, x11); |
4515 ASSERT_EQUAL_64(2, x12); | 4515 CHECK_EQUAL_64(2, x12); |
4516 ASSERT_EQUAL_64(-1, x13); | 4516 CHECK_EQUAL_64(-1, x13); |
4517 ASSERT_EQUAL_64(0x4000000000000000UL, x14); | 4517 CHECK_EQUAL_64(0x4000000000000000UL, x14); |
4518 ASSERT_EQUAL_64(0x8000000000000000UL, x15); | 4518 CHECK_EQUAL_64(0x8000000000000000UL, x15); |
4519 | 4519 |
4520 TEARDOWN(); | 4520 TEARDOWN(); |
4521 } | 4521 } |
4522 | 4522 |
4523 | 4523 |
4524 TEST(lslv) { | 4524 TEST(lslv) { |
4525 INIT_V8(); | 4525 INIT_V8(); |
4526 SETUP(); | 4526 SETUP(); |
4527 | 4527 |
4528 uint64_t value = 0x0123456789abcdefUL; | 4528 uint64_t value = 0x0123456789abcdefUL; |
(...skipping 20 matching lines...) Expand all Loading... |
4549 __ Lsl(w22, w0, w1); | 4549 __ Lsl(w22, w0, w1); |
4550 __ Lsl(w23, w0, w2); | 4550 __ Lsl(w23, w0, w2); |
4551 __ Lsl(w24, w0, w3); | 4551 __ Lsl(w24, w0, w3); |
4552 __ Lsl(w25, w0, w4); | 4552 __ Lsl(w25, w0, w4); |
4553 __ Lsl(w26, w0, w5); | 4553 __ Lsl(w26, w0, w5); |
4554 __ Lsl(w27, w0, w6); | 4554 __ Lsl(w27, w0, w6); |
4555 END(); | 4555 END(); |
4556 | 4556 |
4557 RUN(); | 4557 RUN(); |
4558 | 4558 |
4559 ASSERT_EQUAL_64(value, x0); | 4559 CHECK_EQUAL_64(value, x0); |
4560 ASSERT_EQUAL_64(value << (shift[0] & 63), x16); | 4560 CHECK_EQUAL_64(value << (shift[0] & 63), x16); |
4561 ASSERT_EQUAL_64(value << (shift[1] & 63), x17); | 4561 CHECK_EQUAL_64(value << (shift[1] & 63), x17); |
4562 ASSERT_EQUAL_64(value << (shift[2] & 63), x18); | 4562 CHECK_EQUAL_64(value << (shift[2] & 63), x18); |
4563 ASSERT_EQUAL_64(value << (shift[3] & 63), x19); | 4563 CHECK_EQUAL_64(value << (shift[3] & 63), x19); |
4564 ASSERT_EQUAL_64(value << (shift[4] & 63), x20); | 4564 CHECK_EQUAL_64(value << (shift[4] & 63), x20); |
4565 ASSERT_EQUAL_64(value << (shift[5] & 63), x21); | 4565 CHECK_EQUAL_64(value << (shift[5] & 63), x21); |
4566 ASSERT_EQUAL_32(value << (shift[0] & 31), w22); | 4566 CHECK_EQUAL_32(value << (shift[0] & 31), w22); |
4567 ASSERT_EQUAL_32(value << (shift[1] & 31), w23); | 4567 CHECK_EQUAL_32(value << (shift[1] & 31), w23); |
4568 ASSERT_EQUAL_32(value << (shift[2] & 31), w24); | 4568 CHECK_EQUAL_32(value << (shift[2] & 31), w24); |
4569 ASSERT_EQUAL_32(value << (shift[3] & 31), w25); | 4569 CHECK_EQUAL_32(value << (shift[3] & 31), w25); |
4570 ASSERT_EQUAL_32(value << (shift[4] & 31), w26); | 4570 CHECK_EQUAL_32(value << (shift[4] & 31), w26); |
4571 ASSERT_EQUAL_32(value << (shift[5] & 31), w27); | 4571 CHECK_EQUAL_32(value << (shift[5] & 31), w27); |
4572 | 4572 |
4573 TEARDOWN(); | 4573 TEARDOWN(); |
4574 } | 4574 } |
4575 | 4575 |
4576 | 4576 |
4577 TEST(lsrv) { | 4577 TEST(lsrv) { |
4578 INIT_V8(); | 4578 INIT_V8(); |
4579 SETUP(); | 4579 SETUP(); |
4580 | 4580 |
4581 uint64_t value = 0x0123456789abcdefUL; | 4581 uint64_t value = 0x0123456789abcdefUL; |
(...skipping 20 matching lines...) Expand all Loading... |
4602 __ Lsr(w22, w0, w1); | 4602 __ Lsr(w22, w0, w1); |
4603 __ Lsr(w23, w0, w2); | 4603 __ Lsr(w23, w0, w2); |
4604 __ Lsr(w24, w0, w3); | 4604 __ Lsr(w24, w0, w3); |
4605 __ Lsr(w25, w0, w4); | 4605 __ Lsr(w25, w0, w4); |
4606 __ Lsr(w26, w0, w5); | 4606 __ Lsr(w26, w0, w5); |
4607 __ Lsr(w27, w0, w6); | 4607 __ Lsr(w27, w0, w6); |
4608 END(); | 4608 END(); |
4609 | 4609 |
4610 RUN(); | 4610 RUN(); |
4611 | 4611 |
4612 ASSERT_EQUAL_64(value, x0); | 4612 CHECK_EQUAL_64(value, x0); |
4613 ASSERT_EQUAL_64(value >> (shift[0] & 63), x16); | 4613 CHECK_EQUAL_64(value >> (shift[0] & 63), x16); |
4614 ASSERT_EQUAL_64(value >> (shift[1] & 63), x17); | 4614 CHECK_EQUAL_64(value >> (shift[1] & 63), x17); |
4615 ASSERT_EQUAL_64(value >> (shift[2] & 63), x18); | 4615 CHECK_EQUAL_64(value >> (shift[2] & 63), x18); |
4616 ASSERT_EQUAL_64(value >> (shift[3] & 63), x19); | 4616 CHECK_EQUAL_64(value >> (shift[3] & 63), x19); |
4617 ASSERT_EQUAL_64(value >> (shift[4] & 63), x20); | 4617 CHECK_EQUAL_64(value >> (shift[4] & 63), x20); |
4618 ASSERT_EQUAL_64(value >> (shift[5] & 63), x21); | 4618 CHECK_EQUAL_64(value >> (shift[5] & 63), x21); |
4619 | 4619 |
4620 value &= 0xffffffffUL; | 4620 value &= 0xffffffffUL; |
4621 ASSERT_EQUAL_32(value >> (shift[0] & 31), w22); | 4621 CHECK_EQUAL_32(value >> (shift[0] & 31), w22); |
4622 ASSERT_EQUAL_32(value >> (shift[1] & 31), w23); | 4622 CHECK_EQUAL_32(value >> (shift[1] & 31), w23); |
4623 ASSERT_EQUAL_32(value >> (shift[2] & 31), w24); | 4623 CHECK_EQUAL_32(value >> (shift[2] & 31), w24); |
4624 ASSERT_EQUAL_32(value >> (shift[3] & 31), w25); | 4624 CHECK_EQUAL_32(value >> (shift[3] & 31), w25); |
4625 ASSERT_EQUAL_32(value >> (shift[4] & 31), w26); | 4625 CHECK_EQUAL_32(value >> (shift[4] & 31), w26); |
4626 ASSERT_EQUAL_32(value >> (shift[5] & 31), w27); | 4626 CHECK_EQUAL_32(value >> (shift[5] & 31), w27); |
4627 | 4627 |
4628 TEARDOWN(); | 4628 TEARDOWN(); |
4629 } | 4629 } |
4630 | 4630 |
4631 | 4631 |
4632 TEST(asrv) { | 4632 TEST(asrv) { |
4633 INIT_V8(); | 4633 INIT_V8(); |
4634 SETUP(); | 4634 SETUP(); |
4635 | 4635 |
4636 int64_t value = 0xfedcba98fedcba98UL; | 4636 int64_t value = 0xfedcba98fedcba98UL; |
(...skipping 20 matching lines...) Expand all Loading... |
4657 __ Asr(w22, w0, w1); | 4657 __ Asr(w22, w0, w1); |
4658 __ Asr(w23, w0, w2); | 4658 __ Asr(w23, w0, w2); |
4659 __ Asr(w24, w0, w3); | 4659 __ Asr(w24, w0, w3); |
4660 __ Asr(w25, w0, w4); | 4660 __ Asr(w25, w0, w4); |
4661 __ Asr(w26, w0, w5); | 4661 __ Asr(w26, w0, w5); |
4662 __ Asr(w27, w0, w6); | 4662 __ Asr(w27, w0, w6); |
4663 END(); | 4663 END(); |
4664 | 4664 |
4665 RUN(); | 4665 RUN(); |
4666 | 4666 |
4667 ASSERT_EQUAL_64(value, x0); | 4667 CHECK_EQUAL_64(value, x0); |
4668 ASSERT_EQUAL_64(value >> (shift[0] & 63), x16); | 4668 CHECK_EQUAL_64(value >> (shift[0] & 63), x16); |
4669 ASSERT_EQUAL_64(value >> (shift[1] & 63), x17); | 4669 CHECK_EQUAL_64(value >> (shift[1] & 63), x17); |
4670 ASSERT_EQUAL_64(value >> (shift[2] & 63), x18); | 4670 CHECK_EQUAL_64(value >> (shift[2] & 63), x18); |
4671 ASSERT_EQUAL_64(value >> (shift[3] & 63), x19); | 4671 CHECK_EQUAL_64(value >> (shift[3] & 63), x19); |
4672 ASSERT_EQUAL_64(value >> (shift[4] & 63), x20); | 4672 CHECK_EQUAL_64(value >> (shift[4] & 63), x20); |
4673 ASSERT_EQUAL_64(value >> (shift[5] & 63), x21); | 4673 CHECK_EQUAL_64(value >> (shift[5] & 63), x21); |
4674 | 4674 |
4675 int32_t value32 = static_cast<int32_t>(value & 0xffffffffUL); | 4675 int32_t value32 = static_cast<int32_t>(value & 0xffffffffUL); |
4676 ASSERT_EQUAL_32(value32 >> (shift[0] & 31), w22); | 4676 CHECK_EQUAL_32(value32 >> (shift[0] & 31), w22); |
4677 ASSERT_EQUAL_32(value32 >> (shift[1] & 31), w23); | 4677 CHECK_EQUAL_32(value32 >> (shift[1] & 31), w23); |
4678 ASSERT_EQUAL_32(value32 >> (shift[2] & 31), w24); | 4678 CHECK_EQUAL_32(value32 >> (shift[2] & 31), w24); |
4679 ASSERT_EQUAL_32(value32 >> (shift[3] & 31), w25); | 4679 CHECK_EQUAL_32(value32 >> (shift[3] & 31), w25); |
4680 ASSERT_EQUAL_32(value32 >> (shift[4] & 31), w26); | 4680 CHECK_EQUAL_32(value32 >> (shift[4] & 31), w26); |
4681 ASSERT_EQUAL_32(value32 >> (shift[5] & 31), w27); | 4681 CHECK_EQUAL_32(value32 >> (shift[5] & 31), w27); |
4682 | 4682 |
4683 TEARDOWN(); | 4683 TEARDOWN(); |
4684 } | 4684 } |
4685 | 4685 |
4686 | 4686 |
4687 TEST(rorv) { | 4687 TEST(rorv) { |
4688 INIT_V8(); | 4688 INIT_V8(); |
4689 SETUP(); | 4689 SETUP(); |
4690 | 4690 |
4691 uint64_t value = 0x0123456789abcdefUL; | 4691 uint64_t value = 0x0123456789abcdefUL; |
(...skipping 20 matching lines...) Expand all Loading... |
4712 __ Ror(w22, w0, w1); | 4712 __ Ror(w22, w0, w1); |
4713 __ Ror(w23, w0, w2); | 4713 __ Ror(w23, w0, w2); |
4714 __ Ror(w24, w0, w3); | 4714 __ Ror(w24, w0, w3); |
4715 __ Ror(w25, w0, w4); | 4715 __ Ror(w25, w0, w4); |
4716 __ Ror(w26, w0, w5); | 4716 __ Ror(w26, w0, w5); |
4717 __ Ror(w27, w0, w6); | 4717 __ Ror(w27, w0, w6); |
4718 END(); | 4718 END(); |
4719 | 4719 |
4720 RUN(); | 4720 RUN(); |
4721 | 4721 |
4722 ASSERT_EQUAL_64(value, x0); | 4722 CHECK_EQUAL_64(value, x0); |
4723 ASSERT_EQUAL_64(0xf0123456789abcdeUL, x16); | 4723 CHECK_EQUAL_64(0xf0123456789abcdeUL, x16); |
4724 ASSERT_EQUAL_64(0xef0123456789abcdUL, x17); | 4724 CHECK_EQUAL_64(0xef0123456789abcdUL, x17); |
4725 ASSERT_EQUAL_64(0xdef0123456789abcUL, x18); | 4725 CHECK_EQUAL_64(0xdef0123456789abcUL, x18); |
4726 ASSERT_EQUAL_64(0xcdef0123456789abUL, x19); | 4726 CHECK_EQUAL_64(0xcdef0123456789abUL, x19); |
4727 ASSERT_EQUAL_64(0xabcdef0123456789UL, x20); | 4727 CHECK_EQUAL_64(0xabcdef0123456789UL, x20); |
4728 ASSERT_EQUAL_64(0x789abcdef0123456UL, x21); | 4728 CHECK_EQUAL_64(0x789abcdef0123456UL, x21); |
4729 ASSERT_EQUAL_32(0xf89abcde, w22); | 4729 CHECK_EQUAL_32(0xf89abcde, w22); |
4730 ASSERT_EQUAL_32(0xef89abcd, w23); | 4730 CHECK_EQUAL_32(0xef89abcd, w23); |
4731 ASSERT_EQUAL_32(0xdef89abc, w24); | 4731 CHECK_EQUAL_32(0xdef89abc, w24); |
4732 ASSERT_EQUAL_32(0xcdef89ab, w25); | 4732 CHECK_EQUAL_32(0xcdef89ab, w25); |
4733 ASSERT_EQUAL_32(0xabcdef89, w26); | 4733 CHECK_EQUAL_32(0xabcdef89, w26); |
4734 ASSERT_EQUAL_32(0xf89abcde, w27); | 4734 CHECK_EQUAL_32(0xf89abcde, w27); |
4735 | 4735 |
4736 TEARDOWN(); | 4736 TEARDOWN(); |
4737 } | 4737 } |
4738 | 4738 |
4739 | 4739 |
4740 TEST(bfm) { | 4740 TEST(bfm) { |
4741 INIT_V8(); | 4741 INIT_V8(); |
4742 SETUP(); | 4742 SETUP(); |
4743 | 4743 |
4744 START(); | 4744 START(); |
(...skipping 13 matching lines...) Expand all Loading... |
4758 __ bfm(w21, w1, 24, 15); | 4758 __ bfm(w21, w1, 24, 15); |
4759 | 4759 |
4760 // Aliases. | 4760 // Aliases. |
4761 __ Bfi(x12, x1, 16, 8); | 4761 __ Bfi(x12, x1, 16, 8); |
4762 __ Bfxil(x13, x1, 16, 8); | 4762 __ Bfxil(x13, x1, 16, 8); |
4763 END(); | 4763 END(); |
4764 | 4764 |
4765 RUN(); | 4765 RUN(); |
4766 | 4766 |
4767 | 4767 |
4768 ASSERT_EQUAL_64(0x88888888888889abL, x10); | 4768 CHECK_EQUAL_64(0x88888888888889abL, x10); |
4769 ASSERT_EQUAL_64(0x8888cdef88888888L, x11); | 4769 CHECK_EQUAL_64(0x8888cdef88888888L, x11); |
4770 | 4770 |
4771 ASSERT_EQUAL_32(0x888888ab, w20); | 4771 CHECK_EQUAL_32(0x888888ab, w20); |
4772 ASSERT_EQUAL_32(0x88cdef88, w21); | 4772 CHECK_EQUAL_32(0x88cdef88, w21); |
4773 | 4773 |
4774 ASSERT_EQUAL_64(0x8888888888ef8888L, x12); | 4774 CHECK_EQUAL_64(0x8888888888ef8888L, x12); |
4775 ASSERT_EQUAL_64(0x88888888888888abL, x13); | 4775 CHECK_EQUAL_64(0x88888888888888abL, x13); |
4776 | 4776 |
4777 TEARDOWN(); | 4777 TEARDOWN(); |
4778 } | 4778 } |
4779 | 4779 |
4780 | 4780 |
4781 TEST(sbfm) { | 4781 TEST(sbfm) { |
4782 INIT_V8(); | 4782 INIT_V8(); |
4783 SETUP(); | 4783 SETUP(); |
4784 | 4784 |
4785 START(); | 4785 START(); |
(...skipping 21 matching lines...) Expand all Loading... |
4807 __ Sxtb(x25, x2); | 4807 __ Sxtb(x25, x2); |
4808 __ Sxth(x26, w1); | 4808 __ Sxth(x26, w1); |
4809 __ Sxth(x27, x2); | 4809 __ Sxth(x27, x2); |
4810 __ Sxtw(x28, w1); | 4810 __ Sxtw(x28, w1); |
4811 __ Sxtw(x29, x2); | 4811 __ Sxtw(x29, x2); |
4812 END(); | 4812 END(); |
4813 | 4813 |
4814 RUN(); | 4814 RUN(); |
4815 | 4815 |
4816 | 4816 |
4817 ASSERT_EQUAL_64(0xffffffffffff89abL, x10); | 4817 CHECK_EQUAL_64(0xffffffffffff89abL, x10); |
4818 ASSERT_EQUAL_64(0xffffcdef00000000L, x11); | 4818 CHECK_EQUAL_64(0xffffcdef00000000L, x11); |
4819 ASSERT_EQUAL_64(0x4567L, x12); | 4819 CHECK_EQUAL_64(0x4567L, x12); |
4820 ASSERT_EQUAL_64(0x789abcdef0000L, x13); | 4820 CHECK_EQUAL_64(0x789abcdef0000L, x13); |
4821 | 4821 |
4822 ASSERT_EQUAL_32(0xffffffab, w14); | 4822 CHECK_EQUAL_32(0xffffffab, w14); |
4823 ASSERT_EQUAL_32(0xffcdef00, w15); | 4823 CHECK_EQUAL_32(0xffcdef00, w15); |
4824 ASSERT_EQUAL_32(0x54, w16); | 4824 CHECK_EQUAL_32(0x54, w16); |
4825 ASSERT_EQUAL_32(0x00321000, w17); | 4825 CHECK_EQUAL_32(0x00321000, w17); |
4826 | 4826 |
4827 ASSERT_EQUAL_64(0x01234567L, x18); | 4827 CHECK_EQUAL_64(0x01234567L, x18); |
4828 ASSERT_EQUAL_64(0xfffffffffedcba98L, x19); | 4828 CHECK_EQUAL_64(0xfffffffffedcba98L, x19); |
4829 ASSERT_EQUAL_64(0xffffffffffcdef00L, x20); | 4829 CHECK_EQUAL_64(0xffffffffffcdef00L, x20); |
4830 ASSERT_EQUAL_64(0x321000L, x21); | 4830 CHECK_EQUAL_64(0x321000L, x21); |
4831 ASSERT_EQUAL_64(0xffffffffffffabcdL, x22); | 4831 CHECK_EQUAL_64(0xffffffffffffabcdL, x22); |
4832 ASSERT_EQUAL_64(0x5432L, x23); | 4832 CHECK_EQUAL_64(0x5432L, x23); |
4833 ASSERT_EQUAL_64(0xffffffffffffffefL, x24); | 4833 CHECK_EQUAL_64(0xffffffffffffffefL, x24); |
4834 ASSERT_EQUAL_64(0x10, x25); | 4834 CHECK_EQUAL_64(0x10, x25); |
4835 ASSERT_EQUAL_64(0xffffffffffffcdefL, x26); | 4835 CHECK_EQUAL_64(0xffffffffffffcdefL, x26); |
4836 ASSERT_EQUAL_64(0x3210, x27); | 4836 CHECK_EQUAL_64(0x3210, x27); |
4837 ASSERT_EQUAL_64(0xffffffff89abcdefL, x28); | 4837 CHECK_EQUAL_64(0xffffffff89abcdefL, x28); |
4838 ASSERT_EQUAL_64(0x76543210, x29); | 4838 CHECK_EQUAL_64(0x76543210, x29); |
4839 | 4839 |
4840 TEARDOWN(); | 4840 TEARDOWN(); |
4841 } | 4841 } |
4842 | 4842 |
4843 | 4843 |
4844 TEST(ubfm) { | 4844 TEST(ubfm) { |
4845 INIT_V8(); | 4845 INIT_V8(); |
4846 SETUP(); | 4846 SETUP(); |
4847 | 4847 |
4848 START(); | 4848 START(); |
(...skipping 19 matching lines...) Expand all Loading... |
4868 __ Lsr(x17, x1, 32); | 4868 __ Lsr(x17, x1, 32); |
4869 __ Ubfiz(x18, x1, 8, 16); | 4869 __ Ubfiz(x18, x1, 8, 16); |
4870 __ Ubfx(x19, x1, 8, 16); | 4870 __ Ubfx(x19, x1, 8, 16); |
4871 __ Uxtb(x20, x1); | 4871 __ Uxtb(x20, x1); |
4872 __ Uxth(x21, x1); | 4872 __ Uxth(x21, x1); |
4873 __ Uxtw(x22, x1); | 4873 __ Uxtw(x22, x1); |
4874 END(); | 4874 END(); |
4875 | 4875 |
4876 RUN(); | 4876 RUN(); |
4877 | 4877 |
4878 ASSERT_EQUAL_64(0x00000000000089abL, x10); | 4878 CHECK_EQUAL_64(0x00000000000089abL, x10); |
4879 ASSERT_EQUAL_64(0x0000cdef00000000L, x11); | 4879 CHECK_EQUAL_64(0x0000cdef00000000L, x11); |
4880 ASSERT_EQUAL_64(0x4567L, x12); | 4880 CHECK_EQUAL_64(0x4567L, x12); |
4881 ASSERT_EQUAL_64(0x789abcdef0000L, x13); | 4881 CHECK_EQUAL_64(0x789abcdef0000L, x13); |
4882 | 4882 |
4883 ASSERT_EQUAL_32(0x000000ab, w25); | 4883 CHECK_EQUAL_32(0x000000ab, w25); |
4884 ASSERT_EQUAL_32(0x00cdef00, w26); | 4884 CHECK_EQUAL_32(0x00cdef00, w26); |
4885 ASSERT_EQUAL_32(0x54, w27); | 4885 CHECK_EQUAL_32(0x54, w27); |
4886 ASSERT_EQUAL_32(0x00321000, w28); | 4886 CHECK_EQUAL_32(0x00321000, w28); |
4887 | 4887 |
4888 ASSERT_EQUAL_64(0x8000000000000000L, x15); | 4888 CHECK_EQUAL_64(0x8000000000000000L, x15); |
4889 ASSERT_EQUAL_64(0x0123456789abcdefL, x16); | 4889 CHECK_EQUAL_64(0x0123456789abcdefL, x16); |
4890 ASSERT_EQUAL_64(0x01234567L, x17); | 4890 CHECK_EQUAL_64(0x01234567L, x17); |
4891 ASSERT_EQUAL_64(0xcdef00L, x18); | 4891 CHECK_EQUAL_64(0xcdef00L, x18); |
4892 ASSERT_EQUAL_64(0xabcdL, x19); | 4892 CHECK_EQUAL_64(0xabcdL, x19); |
4893 ASSERT_EQUAL_64(0xefL, x20); | 4893 CHECK_EQUAL_64(0xefL, x20); |
4894 ASSERT_EQUAL_64(0xcdefL, x21); | 4894 CHECK_EQUAL_64(0xcdefL, x21); |
4895 ASSERT_EQUAL_64(0x89abcdefL, x22); | 4895 CHECK_EQUAL_64(0x89abcdefL, x22); |
4896 | 4896 |
4897 TEARDOWN(); | 4897 TEARDOWN(); |
4898 } | 4898 } |
4899 | 4899 |
4900 | 4900 |
4901 TEST(extr) { | 4901 TEST(extr) { |
4902 INIT_V8(); | 4902 INIT_V8(); |
4903 SETUP(); | 4903 SETUP(); |
4904 | 4904 |
4905 START(); | 4905 START(); |
4906 __ Mov(x1, 0x0123456789abcdefL); | 4906 __ Mov(x1, 0x0123456789abcdefL); |
4907 __ Mov(x2, 0xfedcba9876543210L); | 4907 __ Mov(x2, 0xfedcba9876543210L); |
4908 | 4908 |
4909 __ Extr(w10, w1, w2, 0); | 4909 __ Extr(w10, w1, w2, 0); |
4910 __ Extr(x11, x1, x2, 0); | 4910 __ Extr(x11, x1, x2, 0); |
4911 __ Extr(w12, w1, w2, 1); | 4911 __ Extr(w12, w1, w2, 1); |
4912 __ Extr(x13, x2, x1, 2); | 4912 __ Extr(x13, x2, x1, 2); |
4913 | 4913 |
4914 __ Ror(w20, w1, 0); | 4914 __ Ror(w20, w1, 0); |
4915 __ Ror(x21, x1, 0); | 4915 __ Ror(x21, x1, 0); |
4916 __ Ror(w22, w2, 17); | 4916 __ Ror(w22, w2, 17); |
4917 __ Ror(w23, w1, 31); | 4917 __ Ror(w23, w1, 31); |
4918 __ Ror(x24, x2, 1); | 4918 __ Ror(x24, x2, 1); |
4919 __ Ror(x25, x1, 63); | 4919 __ Ror(x25, x1, 63); |
4920 END(); | 4920 END(); |
4921 | 4921 |
4922 RUN(); | 4922 RUN(); |
4923 | 4923 |
4924 ASSERT_EQUAL_64(0x76543210, x10); | 4924 CHECK_EQUAL_64(0x76543210, x10); |
4925 ASSERT_EQUAL_64(0xfedcba9876543210L, x11); | 4925 CHECK_EQUAL_64(0xfedcba9876543210L, x11); |
4926 ASSERT_EQUAL_64(0xbb2a1908, x12); | 4926 CHECK_EQUAL_64(0xbb2a1908, x12); |
4927 ASSERT_EQUAL_64(0x0048d159e26af37bUL, x13); | 4927 CHECK_EQUAL_64(0x0048d159e26af37bUL, x13); |
4928 ASSERT_EQUAL_64(0x89abcdef, x20); | 4928 CHECK_EQUAL_64(0x89abcdef, x20); |
4929 ASSERT_EQUAL_64(0x0123456789abcdefL, x21); | 4929 CHECK_EQUAL_64(0x0123456789abcdefL, x21); |
4930 ASSERT_EQUAL_64(0x19083b2a, x22); | 4930 CHECK_EQUAL_64(0x19083b2a, x22); |
4931 ASSERT_EQUAL_64(0x13579bdf, x23); | 4931 CHECK_EQUAL_64(0x13579bdf, x23); |
4932 ASSERT_EQUAL_64(0x7f6e5d4c3b2a1908UL, x24); | 4932 CHECK_EQUAL_64(0x7f6e5d4c3b2a1908UL, x24); |
4933 ASSERT_EQUAL_64(0x02468acf13579bdeUL, x25); | 4933 CHECK_EQUAL_64(0x02468acf13579bdeUL, x25); |
4934 | 4934 |
4935 TEARDOWN(); | 4935 TEARDOWN(); |
4936 } | 4936 } |
4937 | 4937 |
4938 | 4938 |
4939 TEST(fmov_imm) { | 4939 TEST(fmov_imm) { |
4940 INIT_V8(); | 4940 INIT_V8(); |
4941 SETUP(); | 4941 SETUP(); |
4942 | 4942 |
4943 START(); | 4943 START(); |
4944 __ Fmov(s11, 1.0); | 4944 __ Fmov(s11, 1.0); |
4945 __ Fmov(d22, -13.0); | 4945 __ Fmov(d22, -13.0); |
4946 __ Fmov(s1, 255.0); | 4946 __ Fmov(s1, 255.0); |
4947 __ Fmov(d2, 12.34567); | 4947 __ Fmov(d2, 12.34567); |
4948 __ Fmov(s3, 0.0); | 4948 __ Fmov(s3, 0.0); |
4949 __ Fmov(d4, 0.0); | 4949 __ Fmov(d4, 0.0); |
4950 __ Fmov(s5, kFP32PositiveInfinity); | 4950 __ Fmov(s5, kFP32PositiveInfinity); |
4951 __ Fmov(d6, kFP64NegativeInfinity); | 4951 __ Fmov(d6, kFP64NegativeInfinity); |
4952 END(); | 4952 END(); |
4953 | 4953 |
4954 RUN(); | 4954 RUN(); |
4955 | 4955 |
4956 ASSERT_EQUAL_FP32(1.0, s11); | 4956 CHECK_EQUAL_FP32(1.0, s11); |
4957 ASSERT_EQUAL_FP64(-13.0, d22); | 4957 CHECK_EQUAL_FP64(-13.0, d22); |
4958 ASSERT_EQUAL_FP32(255.0, s1); | 4958 CHECK_EQUAL_FP32(255.0, s1); |
4959 ASSERT_EQUAL_FP64(12.34567, d2); | 4959 CHECK_EQUAL_FP64(12.34567, d2); |
4960 ASSERT_EQUAL_FP32(0.0, s3); | 4960 CHECK_EQUAL_FP32(0.0, s3); |
4961 ASSERT_EQUAL_FP64(0.0, d4); | 4961 CHECK_EQUAL_FP64(0.0, d4); |
4962 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s5); | 4962 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s5); |
4963 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d6); | 4963 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d6); |
4964 | 4964 |
4965 TEARDOWN(); | 4965 TEARDOWN(); |
4966 } | 4966 } |
4967 | 4967 |
4968 | 4968 |
4969 TEST(fmov_reg) { | 4969 TEST(fmov_reg) { |
4970 INIT_V8(); | 4970 INIT_V8(); |
4971 SETUP(); | 4971 SETUP(); |
4972 | 4972 |
4973 START(); | 4973 START(); |
4974 __ Fmov(s20, 1.0); | 4974 __ Fmov(s20, 1.0); |
4975 __ Fmov(w10, s20); | 4975 __ Fmov(w10, s20); |
4976 __ Fmov(s30, w10); | 4976 __ Fmov(s30, w10); |
4977 __ Fmov(s5, s20); | 4977 __ Fmov(s5, s20); |
4978 __ Fmov(d1, -13.0); | 4978 __ Fmov(d1, -13.0); |
4979 __ Fmov(x1, d1); | 4979 __ Fmov(x1, d1); |
4980 __ Fmov(d2, x1); | 4980 __ Fmov(d2, x1); |
4981 __ Fmov(d4, d1); | 4981 __ Fmov(d4, d1); |
4982 __ Fmov(d6, rawbits_to_double(0x0123456789abcdefL)); | 4982 __ Fmov(d6, rawbits_to_double(0x0123456789abcdefL)); |
4983 __ Fmov(s6, s6); | 4983 __ Fmov(s6, s6); |
4984 END(); | 4984 END(); |
4985 | 4985 |
4986 RUN(); | 4986 RUN(); |
4987 | 4987 |
4988 ASSERT_EQUAL_32(float_to_rawbits(1.0), w10); | 4988 CHECK_EQUAL_32(float_to_rawbits(1.0), w10); |
4989 ASSERT_EQUAL_FP32(1.0, s30); | 4989 CHECK_EQUAL_FP32(1.0, s30); |
4990 ASSERT_EQUAL_FP32(1.0, s5); | 4990 CHECK_EQUAL_FP32(1.0, s5); |
4991 ASSERT_EQUAL_64(double_to_rawbits(-13.0), x1); | 4991 CHECK_EQUAL_64(double_to_rawbits(-13.0), x1); |
4992 ASSERT_EQUAL_FP64(-13.0, d2); | 4992 CHECK_EQUAL_FP64(-13.0, d2); |
4993 ASSERT_EQUAL_FP64(-13.0, d4); | 4993 CHECK_EQUAL_FP64(-13.0, d4); |
4994 ASSERT_EQUAL_FP32(rawbits_to_float(0x89abcdef), s6); | 4994 CHECK_EQUAL_FP32(rawbits_to_float(0x89abcdef), s6); |
4995 | 4995 |
4996 TEARDOWN(); | 4996 TEARDOWN(); |
4997 } | 4997 } |
4998 | 4998 |
4999 | 4999 |
5000 TEST(fadd) { | 5000 TEST(fadd) { |
5001 INIT_V8(); | 5001 INIT_V8(); |
5002 SETUP(); | 5002 SETUP(); |
5003 | 5003 |
5004 START(); | 5004 START(); |
(...skipping 23 matching lines...) Expand all Loading... |
5028 __ Fadd(d8, d29, d31); | 5028 __ Fadd(d8, d29, d31); |
5029 __ Fadd(d9, d26, d31); | 5029 __ Fadd(d9, d26, d31); |
5030 __ Fadd(d10, d27, d31); | 5030 __ Fadd(d10, d27, d31); |
5031 __ Fadd(d11, d28, d31); | 5031 __ Fadd(d11, d28, d31); |
5032 __ Fadd(d12, d27, d28); | 5032 __ Fadd(d12, d27, d28); |
5033 __ Fadd(d13, d28, d27); | 5033 __ Fadd(d13, d28, d27); |
5034 END(); | 5034 END(); |
5035 | 5035 |
5036 RUN(); | 5036 RUN(); |
5037 | 5037 |
5038 ASSERT_EQUAL_FP32(4.25, s0); | 5038 CHECK_EQUAL_FP32(4.25, s0); |
5039 ASSERT_EQUAL_FP32(1.0, s1); | 5039 CHECK_EQUAL_FP32(1.0, s1); |
5040 ASSERT_EQUAL_FP32(1.0, s2); | 5040 CHECK_EQUAL_FP32(1.0, s2); |
5041 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s3); | 5041 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s3); |
5042 ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s4); | 5042 CHECK_EQUAL_FP32(kFP32NegativeInfinity, s4); |
5043 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s5); | 5043 CHECK_EQUAL_FP32(kFP32DefaultNaN, s5); |
5044 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6); | 5044 CHECK_EQUAL_FP32(kFP32DefaultNaN, s6); |
5045 ASSERT_EQUAL_FP64(0.25, d7); | 5045 CHECK_EQUAL_FP64(0.25, d7); |
5046 ASSERT_EQUAL_FP64(2.25, d8); | 5046 CHECK_EQUAL_FP64(2.25, d8); |
5047 ASSERT_EQUAL_FP64(2.25, d9); | 5047 CHECK_EQUAL_FP64(2.25, d9); |
5048 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d10); | 5048 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d10); |
5049 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d11); | 5049 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d11); |
5050 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d12); | 5050 CHECK_EQUAL_FP64(kFP64DefaultNaN, d12); |
5051 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13); | 5051 CHECK_EQUAL_FP64(kFP64DefaultNaN, d13); |
5052 | 5052 |
5053 TEARDOWN(); | 5053 TEARDOWN(); |
5054 } | 5054 } |
5055 | 5055 |
5056 | 5056 |
5057 TEST(fsub) { | 5057 TEST(fsub) { |
5058 INIT_V8(); | 5058 INIT_V8(); |
5059 SETUP(); | 5059 SETUP(); |
5060 | 5060 |
5061 START(); | 5061 START(); |
(...skipping 23 matching lines...) Expand all Loading... |
5085 __ Fsub(d8, d29, d31); | 5085 __ Fsub(d8, d29, d31); |
5086 __ Fsub(d9, d26, d31); | 5086 __ Fsub(d9, d26, d31); |
5087 __ Fsub(d10, d31, d27); | 5087 __ Fsub(d10, d31, d27); |
5088 __ Fsub(d11, d31, d28); | 5088 __ Fsub(d11, d31, d28); |
5089 __ Fsub(d12, d27, d27); | 5089 __ Fsub(d12, d27, d27); |
5090 __ Fsub(d13, d28, d28); | 5090 __ Fsub(d13, d28, d28); |
5091 END(); | 5091 END(); |
5092 | 5092 |
5093 RUN(); | 5093 RUN(); |
5094 | 5094 |
5095 ASSERT_EQUAL_FP32(2.25, s0); | 5095 CHECK_EQUAL_FP32(2.25, s0); |
5096 ASSERT_EQUAL_FP32(1.0, s1); | 5096 CHECK_EQUAL_FP32(1.0, s1); |
5097 ASSERT_EQUAL_FP32(-1.0, s2); | 5097 CHECK_EQUAL_FP32(-1.0, s2); |
5098 ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s3); | 5098 CHECK_EQUAL_FP32(kFP32NegativeInfinity, s3); |
5099 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s4); | 5099 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s4); |
5100 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s5); | 5100 CHECK_EQUAL_FP32(kFP32DefaultNaN, s5); |
5101 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6); | 5101 CHECK_EQUAL_FP32(kFP32DefaultNaN, s6); |
5102 ASSERT_EQUAL_FP64(-4.25, d7); | 5102 CHECK_EQUAL_FP64(-4.25, d7); |
5103 ASSERT_EQUAL_FP64(-2.25, d8); | 5103 CHECK_EQUAL_FP64(-2.25, d8); |
5104 ASSERT_EQUAL_FP64(-2.25, d9); | 5104 CHECK_EQUAL_FP64(-2.25, d9); |
5105 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d10); | 5105 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d10); |
5106 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d11); | 5106 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d11); |
5107 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d12); | 5107 CHECK_EQUAL_FP64(kFP64DefaultNaN, d12); |
5108 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13); | 5108 CHECK_EQUAL_FP64(kFP64DefaultNaN, d13); |
5109 | 5109 |
5110 TEARDOWN(); | 5110 TEARDOWN(); |
5111 } | 5111 } |
5112 | 5112 |
5113 | 5113 |
5114 TEST(fmul) { | 5114 TEST(fmul) { |
5115 INIT_V8(); | 5115 INIT_V8(); |
5116 SETUP(); | 5116 SETUP(); |
5117 | 5117 |
5118 START(); | 5118 START(); |
(...skipping 24 matching lines...) Expand all Loading... |
5143 __ Fmul(d8, d29, d31); | 5143 __ Fmul(d8, d29, d31); |
5144 __ Fmul(d9, d26, d26); | 5144 __ Fmul(d9, d26, d26); |
5145 __ Fmul(d10, d27, d30); | 5145 __ Fmul(d10, d27, d30); |
5146 __ Fmul(d11, d28, d30); | 5146 __ Fmul(d11, d28, d30); |
5147 __ Fmul(d12, d27, d29); | 5147 __ Fmul(d12, d27, d29); |
5148 __ Fmul(d13, d29, d28); | 5148 __ Fmul(d13, d29, d28); |
5149 END(); | 5149 END(); |
5150 | 5150 |
5151 RUN(); | 5151 RUN(); |
5152 | 5152 |
5153 ASSERT_EQUAL_FP32(6.5, s0); | 5153 CHECK_EQUAL_FP32(6.5, s0); |
5154 ASSERT_EQUAL_FP32(0.0, s1); | 5154 CHECK_EQUAL_FP32(0.0, s1); |
5155 ASSERT_EQUAL_FP32(0.0, s2); | 5155 CHECK_EQUAL_FP32(0.0, s2); |
5156 ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s3); | 5156 CHECK_EQUAL_FP32(kFP32NegativeInfinity, s3); |
5157 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s4); | 5157 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s4); |
5158 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s5); | 5158 CHECK_EQUAL_FP32(kFP32DefaultNaN, s5); |
5159 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6); | 5159 CHECK_EQUAL_FP32(kFP32DefaultNaN, s6); |
5160 ASSERT_EQUAL_FP64(-4.5, d7); | 5160 CHECK_EQUAL_FP64(-4.5, d7); |
5161 ASSERT_EQUAL_FP64(0.0, d8); | 5161 CHECK_EQUAL_FP64(0.0, d8); |
5162 ASSERT_EQUAL_FP64(0.0, d9); | 5162 CHECK_EQUAL_FP64(0.0, d9); |
5163 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d10); | 5163 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d10); |
5164 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d11); | 5164 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d11); |
5165 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d12); | 5165 CHECK_EQUAL_FP64(kFP64DefaultNaN, d12); |
5166 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13); | 5166 CHECK_EQUAL_FP64(kFP64DefaultNaN, d13); |
5167 | 5167 |
5168 TEARDOWN(); | 5168 TEARDOWN(); |
5169 } | 5169 } |
5170 | 5170 |
5171 | 5171 |
5172 static void FmaddFmsubHelper(double n, double m, double a, | 5172 static void FmaddFmsubHelper(double n, double m, double a, |
5173 double fmadd, double fmsub, | 5173 double fmadd, double fmsub, |
5174 double fnmadd, double fnmsub) { | 5174 double fnmadd, double fnmsub) { |
5175 SETUP(); | 5175 SETUP(); |
5176 START(); | 5176 START(); |
5177 | 5177 |
5178 __ Fmov(d0, n); | 5178 __ Fmov(d0, n); |
5179 __ Fmov(d1, m); | 5179 __ Fmov(d1, m); |
5180 __ Fmov(d2, a); | 5180 __ Fmov(d2, a); |
5181 __ Fmadd(d28, d0, d1, d2); | 5181 __ Fmadd(d28, d0, d1, d2); |
5182 __ Fmsub(d29, d0, d1, d2); | 5182 __ Fmsub(d29, d0, d1, d2); |
5183 __ Fnmadd(d30, d0, d1, d2); | 5183 __ Fnmadd(d30, d0, d1, d2); |
5184 __ Fnmsub(d31, d0, d1, d2); | 5184 __ Fnmsub(d31, d0, d1, d2); |
5185 | 5185 |
5186 END(); | 5186 END(); |
5187 RUN(); | 5187 RUN(); |
5188 | 5188 |
5189 ASSERT_EQUAL_FP64(fmadd, d28); | 5189 CHECK_EQUAL_FP64(fmadd, d28); |
5190 ASSERT_EQUAL_FP64(fmsub, d29); | 5190 CHECK_EQUAL_FP64(fmsub, d29); |
5191 ASSERT_EQUAL_FP64(fnmadd, d30); | 5191 CHECK_EQUAL_FP64(fnmadd, d30); |
5192 ASSERT_EQUAL_FP64(fnmsub, d31); | 5192 CHECK_EQUAL_FP64(fnmsub, d31); |
5193 | 5193 |
5194 TEARDOWN(); | 5194 TEARDOWN(); |
5195 } | 5195 } |
5196 | 5196 |
5197 | 5197 |
5198 TEST(fmadd_fmsub_double) { | 5198 TEST(fmadd_fmsub_double) { |
5199 INIT_V8(); | 5199 INIT_V8(); |
5200 | 5200 |
5201 // It's hard to check the result of fused operations because the only way to | 5201 // It's hard to check the result of fused operations because the only way to |
5202 // calculate the result is using fma, which is what the simulator uses anyway. | 5202 // calculate the result is using fma, which is what the simulator uses anyway. |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5247 __ Fmov(s1, m); | 5247 __ Fmov(s1, m); |
5248 __ Fmov(s2, a); | 5248 __ Fmov(s2, a); |
5249 __ Fmadd(s28, s0, s1, s2); | 5249 __ Fmadd(s28, s0, s1, s2); |
5250 __ Fmsub(s29, s0, s1, s2); | 5250 __ Fmsub(s29, s0, s1, s2); |
5251 __ Fnmadd(s30, s0, s1, s2); | 5251 __ Fnmadd(s30, s0, s1, s2); |
5252 __ Fnmsub(s31, s0, s1, s2); | 5252 __ Fnmsub(s31, s0, s1, s2); |
5253 | 5253 |
5254 END(); | 5254 END(); |
5255 RUN(); | 5255 RUN(); |
5256 | 5256 |
5257 ASSERT_EQUAL_FP32(fmadd, s28); | 5257 CHECK_EQUAL_FP32(fmadd, s28); |
5258 ASSERT_EQUAL_FP32(fmsub, s29); | 5258 CHECK_EQUAL_FP32(fmsub, s29); |
5259 ASSERT_EQUAL_FP32(fnmadd, s30); | 5259 CHECK_EQUAL_FP32(fnmadd, s30); |
5260 ASSERT_EQUAL_FP32(fnmsub, s31); | 5260 CHECK_EQUAL_FP32(fnmsub, s31); |
5261 | 5261 |
5262 TEARDOWN(); | 5262 TEARDOWN(); |
5263 } | 5263 } |
5264 | 5264 |
5265 | 5265 |
5266 TEST(fmadd_fmsub_float) { | 5266 TEST(fmadd_fmsub_float) { |
5267 INIT_V8(); | 5267 INIT_V8(); |
5268 // It's hard to check the result of fused operations because the only way to | 5268 // It's hard to check the result of fused operations because the only way to |
5269 // calculate the result is using fma, which is what the simulator uses anyway. | 5269 // calculate the result is using fma, which is what the simulator uses anyway. |
5270 // TODO(jbramley): Add tests to check behaviour against a hardware trace. | 5270 // TODO(jbramley): Add tests to check behaviour against a hardware trace. |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5306 | 5306 |
5307 TEST(fmadd_fmsub_double_nans) { | 5307 TEST(fmadd_fmsub_double_nans) { |
5308 INIT_V8(); | 5308 INIT_V8(); |
5309 // Make sure that NaN propagation works correctly. | 5309 // Make sure that NaN propagation works correctly. |
5310 double s1 = rawbits_to_double(0x7ff5555511111111); | 5310 double s1 = rawbits_to_double(0x7ff5555511111111); |
5311 double s2 = rawbits_to_double(0x7ff5555522222222); | 5311 double s2 = rawbits_to_double(0x7ff5555522222222); |
5312 double sa = rawbits_to_double(0x7ff55555aaaaaaaa); | 5312 double sa = rawbits_to_double(0x7ff55555aaaaaaaa); |
5313 double q1 = rawbits_to_double(0x7ffaaaaa11111111); | 5313 double q1 = rawbits_to_double(0x7ffaaaaa11111111); |
5314 double q2 = rawbits_to_double(0x7ffaaaaa22222222); | 5314 double q2 = rawbits_to_double(0x7ffaaaaa22222222); |
5315 double qa = rawbits_to_double(0x7ffaaaaaaaaaaaaa); | 5315 double qa = rawbits_to_double(0x7ffaaaaaaaaaaaaa); |
5316 ASSERT(IsSignallingNaN(s1)); | 5316 DCHECK(IsSignallingNaN(s1)); |
5317 ASSERT(IsSignallingNaN(s2)); | 5317 DCHECK(IsSignallingNaN(s2)); |
5318 ASSERT(IsSignallingNaN(sa)); | 5318 DCHECK(IsSignallingNaN(sa)); |
5319 ASSERT(IsQuietNaN(q1)); | 5319 DCHECK(IsQuietNaN(q1)); |
5320 ASSERT(IsQuietNaN(q2)); | 5320 DCHECK(IsQuietNaN(q2)); |
5321 ASSERT(IsQuietNaN(qa)); | 5321 DCHECK(IsQuietNaN(qa)); |
5322 | 5322 |
5323 // The input NaNs after passing through ProcessNaN. | 5323 // The input NaNs after passing through ProcessNaN. |
5324 double s1_proc = rawbits_to_double(0x7ffd555511111111); | 5324 double s1_proc = rawbits_to_double(0x7ffd555511111111); |
5325 double s2_proc = rawbits_to_double(0x7ffd555522222222); | 5325 double s2_proc = rawbits_to_double(0x7ffd555522222222); |
5326 double sa_proc = rawbits_to_double(0x7ffd5555aaaaaaaa); | 5326 double sa_proc = rawbits_to_double(0x7ffd5555aaaaaaaa); |
5327 double q1_proc = q1; | 5327 double q1_proc = q1; |
5328 double q2_proc = q2; | 5328 double q2_proc = q2; |
5329 double qa_proc = qa; | 5329 double qa_proc = qa; |
5330 ASSERT(IsQuietNaN(s1_proc)); | 5330 DCHECK(IsQuietNaN(s1_proc)); |
5331 ASSERT(IsQuietNaN(s2_proc)); | 5331 DCHECK(IsQuietNaN(s2_proc)); |
5332 ASSERT(IsQuietNaN(sa_proc)); | 5332 DCHECK(IsQuietNaN(sa_proc)); |
5333 ASSERT(IsQuietNaN(q1_proc)); | 5333 DCHECK(IsQuietNaN(q1_proc)); |
5334 ASSERT(IsQuietNaN(q2_proc)); | 5334 DCHECK(IsQuietNaN(q2_proc)); |
5335 ASSERT(IsQuietNaN(qa_proc)); | 5335 DCHECK(IsQuietNaN(qa_proc)); |
5336 | 5336 |
5337 // Negated NaNs as it would be done on ARMv8 hardware. | 5337 // Negated NaNs as it would be done on ARMv8 hardware. |
5338 double s1_proc_neg = rawbits_to_double(0xfffd555511111111); | 5338 double s1_proc_neg = rawbits_to_double(0xfffd555511111111); |
5339 double sa_proc_neg = rawbits_to_double(0xfffd5555aaaaaaaa); | 5339 double sa_proc_neg = rawbits_to_double(0xfffd5555aaaaaaaa); |
5340 double q1_proc_neg = rawbits_to_double(0xfffaaaaa11111111); | 5340 double q1_proc_neg = rawbits_to_double(0xfffaaaaa11111111); |
5341 double qa_proc_neg = rawbits_to_double(0xfffaaaaaaaaaaaaa); | 5341 double qa_proc_neg = rawbits_to_double(0xfffaaaaaaaaaaaaa); |
5342 ASSERT(IsQuietNaN(s1_proc_neg)); | 5342 DCHECK(IsQuietNaN(s1_proc_neg)); |
5343 ASSERT(IsQuietNaN(sa_proc_neg)); | 5343 DCHECK(IsQuietNaN(sa_proc_neg)); |
5344 ASSERT(IsQuietNaN(q1_proc_neg)); | 5344 DCHECK(IsQuietNaN(q1_proc_neg)); |
5345 ASSERT(IsQuietNaN(qa_proc_neg)); | 5345 DCHECK(IsQuietNaN(qa_proc_neg)); |
5346 | 5346 |
5347 // Quiet NaNs are propagated. | 5347 // Quiet NaNs are propagated. |
5348 FmaddFmsubHelper(q1, 0, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc); | 5348 FmaddFmsubHelper(q1, 0, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc); |
5349 FmaddFmsubHelper(0, q2, 0, q2_proc, q2_proc, q2_proc, q2_proc); | 5349 FmaddFmsubHelper(0, q2, 0, q2_proc, q2_proc, q2_proc, q2_proc); |
5350 FmaddFmsubHelper(0, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); | 5350 FmaddFmsubHelper(0, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); |
5351 FmaddFmsubHelper(q1, q2, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc); | 5351 FmaddFmsubHelper(q1, q2, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc); |
5352 FmaddFmsubHelper(0, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); | 5352 FmaddFmsubHelper(0, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); |
5353 FmaddFmsubHelper(q1, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); | 5353 FmaddFmsubHelper(q1, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); |
5354 FmaddFmsubHelper(q1, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); | 5354 FmaddFmsubHelper(q1, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); |
5355 | 5355 |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5389 | 5389 |
5390 TEST(fmadd_fmsub_float_nans) { | 5390 TEST(fmadd_fmsub_float_nans) { |
5391 INIT_V8(); | 5391 INIT_V8(); |
5392 // Make sure that NaN propagation works correctly. | 5392 // Make sure that NaN propagation works correctly. |
5393 float s1 = rawbits_to_float(0x7f951111); | 5393 float s1 = rawbits_to_float(0x7f951111); |
5394 float s2 = rawbits_to_float(0x7f952222); | 5394 float s2 = rawbits_to_float(0x7f952222); |
5395 float sa = rawbits_to_float(0x7f95aaaa); | 5395 float sa = rawbits_to_float(0x7f95aaaa); |
5396 float q1 = rawbits_to_float(0x7fea1111); | 5396 float q1 = rawbits_to_float(0x7fea1111); |
5397 float q2 = rawbits_to_float(0x7fea2222); | 5397 float q2 = rawbits_to_float(0x7fea2222); |
5398 float qa = rawbits_to_float(0x7feaaaaa); | 5398 float qa = rawbits_to_float(0x7feaaaaa); |
5399 ASSERT(IsSignallingNaN(s1)); | 5399 DCHECK(IsSignallingNaN(s1)); |
5400 ASSERT(IsSignallingNaN(s2)); | 5400 DCHECK(IsSignallingNaN(s2)); |
5401 ASSERT(IsSignallingNaN(sa)); | 5401 DCHECK(IsSignallingNaN(sa)); |
5402 ASSERT(IsQuietNaN(q1)); | 5402 DCHECK(IsQuietNaN(q1)); |
5403 ASSERT(IsQuietNaN(q2)); | 5403 DCHECK(IsQuietNaN(q2)); |
5404 ASSERT(IsQuietNaN(qa)); | 5404 DCHECK(IsQuietNaN(qa)); |
5405 | 5405 |
5406 // The input NaNs after passing through ProcessNaN. | 5406 // The input NaNs after passing through ProcessNaN. |
5407 float s1_proc = rawbits_to_float(0x7fd51111); | 5407 float s1_proc = rawbits_to_float(0x7fd51111); |
5408 float s2_proc = rawbits_to_float(0x7fd52222); | 5408 float s2_proc = rawbits_to_float(0x7fd52222); |
5409 float sa_proc = rawbits_to_float(0x7fd5aaaa); | 5409 float sa_proc = rawbits_to_float(0x7fd5aaaa); |
5410 float q1_proc = q1; | 5410 float q1_proc = q1; |
5411 float q2_proc = q2; | 5411 float q2_proc = q2; |
5412 float qa_proc = qa; | 5412 float qa_proc = qa; |
5413 ASSERT(IsQuietNaN(s1_proc)); | 5413 DCHECK(IsQuietNaN(s1_proc)); |
5414 ASSERT(IsQuietNaN(s2_proc)); | 5414 DCHECK(IsQuietNaN(s2_proc)); |
5415 ASSERT(IsQuietNaN(sa_proc)); | 5415 DCHECK(IsQuietNaN(sa_proc)); |
5416 ASSERT(IsQuietNaN(q1_proc)); | 5416 DCHECK(IsQuietNaN(q1_proc)); |
5417 ASSERT(IsQuietNaN(q2_proc)); | 5417 DCHECK(IsQuietNaN(q2_proc)); |
5418 ASSERT(IsQuietNaN(qa_proc)); | 5418 DCHECK(IsQuietNaN(qa_proc)); |
5419 | 5419 |
5420 // Negated NaNs as it would be done on ARMv8 hardware. | 5420 // Negated NaNs as it would be done on ARMv8 hardware. |
5421 float s1_proc_neg = rawbits_to_float(0xffd51111); | 5421 float s1_proc_neg = rawbits_to_float(0xffd51111); |
5422 float sa_proc_neg = rawbits_to_float(0xffd5aaaa); | 5422 float sa_proc_neg = rawbits_to_float(0xffd5aaaa); |
5423 float q1_proc_neg = rawbits_to_float(0xffea1111); | 5423 float q1_proc_neg = rawbits_to_float(0xffea1111); |
5424 float qa_proc_neg = rawbits_to_float(0xffeaaaaa); | 5424 float qa_proc_neg = rawbits_to_float(0xffeaaaaa); |
5425 ASSERT(IsQuietNaN(s1_proc_neg)); | 5425 DCHECK(IsQuietNaN(s1_proc_neg)); |
5426 ASSERT(IsQuietNaN(sa_proc_neg)); | 5426 DCHECK(IsQuietNaN(sa_proc_neg)); |
5427 ASSERT(IsQuietNaN(q1_proc_neg)); | 5427 DCHECK(IsQuietNaN(q1_proc_neg)); |
5428 ASSERT(IsQuietNaN(qa_proc_neg)); | 5428 DCHECK(IsQuietNaN(qa_proc_neg)); |
5429 | 5429 |
5430 // Quiet NaNs are propagated. | 5430 // Quiet NaNs are propagated. |
5431 FmaddFmsubHelper(q1, 0, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc); | 5431 FmaddFmsubHelper(q1, 0, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc); |
5432 FmaddFmsubHelper(0, q2, 0, q2_proc, q2_proc, q2_proc, q2_proc); | 5432 FmaddFmsubHelper(0, q2, 0, q2_proc, q2_proc, q2_proc, q2_proc); |
5433 FmaddFmsubHelper(0, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); | 5433 FmaddFmsubHelper(0, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); |
5434 FmaddFmsubHelper(q1, q2, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc); | 5434 FmaddFmsubHelper(q1, q2, 0, q1_proc, q1_proc_neg, q1_proc_neg, q1_proc); |
5435 FmaddFmsubHelper(0, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); | 5435 FmaddFmsubHelper(0, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); |
5436 FmaddFmsubHelper(q1, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); | 5436 FmaddFmsubHelper(q1, 0, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); |
5437 FmaddFmsubHelper(q1, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); | 5437 FmaddFmsubHelper(q1, q2, qa, qa_proc, qa_proc, qa_proc_neg, qa_proc_neg); |
5438 | 5438 |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5502 __ Fdiv(d8, d29, d31); | 5502 __ Fdiv(d8, d29, d31); |
5503 __ Fdiv(d9, d26, d31); | 5503 __ Fdiv(d9, d26, d31); |
5504 __ Fdiv(d10, d31, d27); | 5504 __ Fdiv(d10, d31, d27); |
5505 __ Fdiv(d11, d31, d28); | 5505 __ Fdiv(d11, d31, d28); |
5506 __ Fdiv(d12, d28, d27); | 5506 __ Fdiv(d12, d28, d27); |
5507 __ Fdiv(d13, d29, d29); | 5507 __ Fdiv(d13, d29, d29); |
5508 END(); | 5508 END(); |
5509 | 5509 |
5510 RUN(); | 5510 RUN(); |
5511 | 5511 |
5512 ASSERT_EQUAL_FP32(1.625f, s0); | 5512 CHECK_EQUAL_FP32(1.625f, s0); |
5513 ASSERT_EQUAL_FP32(1.0f, s1); | 5513 CHECK_EQUAL_FP32(1.0f, s1); |
5514 ASSERT_EQUAL_FP32(-0.0f, s2); | 5514 CHECK_EQUAL_FP32(-0.0f, s2); |
5515 ASSERT_EQUAL_FP32(0.0f, s3); | 5515 CHECK_EQUAL_FP32(0.0f, s3); |
5516 ASSERT_EQUAL_FP32(-0.0f, s4); | 5516 CHECK_EQUAL_FP32(-0.0f, s4); |
5517 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s5); | 5517 CHECK_EQUAL_FP32(kFP32DefaultNaN, s5); |
5518 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6); | 5518 CHECK_EQUAL_FP32(kFP32DefaultNaN, s6); |
5519 ASSERT_EQUAL_FP64(-1.125, d7); | 5519 CHECK_EQUAL_FP64(-1.125, d7); |
5520 ASSERT_EQUAL_FP64(0.0, d8); | 5520 CHECK_EQUAL_FP64(0.0, d8); |
5521 ASSERT_EQUAL_FP64(-0.0, d9); | 5521 CHECK_EQUAL_FP64(-0.0, d9); |
5522 ASSERT_EQUAL_FP64(0.0, d10); | 5522 CHECK_EQUAL_FP64(0.0, d10); |
5523 ASSERT_EQUAL_FP64(-0.0, d11); | 5523 CHECK_EQUAL_FP64(-0.0, d11); |
5524 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d12); | 5524 CHECK_EQUAL_FP64(kFP64DefaultNaN, d12); |
5525 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13); | 5525 CHECK_EQUAL_FP64(kFP64DefaultNaN, d13); |
5526 | 5526 |
5527 TEARDOWN(); | 5527 TEARDOWN(); |
5528 } | 5528 } |
5529 | 5529 |
5530 | 5530 |
5531 static float MinMaxHelper(float n, | 5531 static float MinMaxHelper(float n, |
5532 float m, | 5532 float m, |
5533 bool min, | 5533 bool min, |
5534 float quiet_nan_substitute = 0.0) { | 5534 float quiet_nan_substitute = 0.0) { |
5535 uint32_t raw_n = float_to_rawbits(n); | 5535 uint32_t raw_n = float_to_rawbits(n); |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5618 __ Fmov(d0, n); | 5618 __ Fmov(d0, n); |
5619 __ Fmov(d1, m); | 5619 __ Fmov(d1, m); |
5620 __ Fmin(d28, d0, d1); | 5620 __ Fmin(d28, d0, d1); |
5621 __ Fmax(d29, d0, d1); | 5621 __ Fmax(d29, d0, d1); |
5622 __ Fminnm(d30, d0, d1); | 5622 __ Fminnm(d30, d0, d1); |
5623 __ Fmaxnm(d31, d0, d1); | 5623 __ Fmaxnm(d31, d0, d1); |
5624 END(); | 5624 END(); |
5625 | 5625 |
5626 RUN(); | 5626 RUN(); |
5627 | 5627 |
5628 ASSERT_EQUAL_FP64(min, d28); | 5628 CHECK_EQUAL_FP64(min, d28); |
5629 ASSERT_EQUAL_FP64(max, d29); | 5629 CHECK_EQUAL_FP64(max, d29); |
5630 ASSERT_EQUAL_FP64(minnm, d30); | 5630 CHECK_EQUAL_FP64(minnm, d30); |
5631 ASSERT_EQUAL_FP64(maxnm, d31); | 5631 CHECK_EQUAL_FP64(maxnm, d31); |
5632 | 5632 |
5633 TEARDOWN(); | 5633 TEARDOWN(); |
5634 } | 5634 } |
5635 | 5635 |
5636 | 5636 |
5637 TEST(fmax_fmin_d) { | 5637 TEST(fmax_fmin_d) { |
5638 INIT_V8(); | 5638 INIT_V8(); |
5639 // Use non-standard NaNs to check that the payload bits are preserved. | 5639 // Use non-standard NaNs to check that the payload bits are preserved. |
5640 double snan = rawbits_to_double(0x7ff5555512345678); | 5640 double snan = rawbits_to_double(0x7ff5555512345678); |
5641 double qnan = rawbits_to_double(0x7ffaaaaa87654321); | 5641 double qnan = rawbits_to_double(0x7ffaaaaa87654321); |
5642 | 5642 |
5643 double snan_processed = rawbits_to_double(0x7ffd555512345678); | 5643 double snan_processed = rawbits_to_double(0x7ffd555512345678); |
5644 double qnan_processed = qnan; | 5644 double qnan_processed = qnan; |
5645 | 5645 |
5646 ASSERT(IsSignallingNaN(snan)); | 5646 DCHECK(IsSignallingNaN(snan)); |
5647 ASSERT(IsQuietNaN(qnan)); | 5647 DCHECK(IsQuietNaN(qnan)); |
5648 ASSERT(IsQuietNaN(snan_processed)); | 5648 DCHECK(IsQuietNaN(snan_processed)); |
5649 ASSERT(IsQuietNaN(qnan_processed)); | 5649 DCHECK(IsQuietNaN(qnan_processed)); |
5650 | 5650 |
5651 // Bootstrap tests. | 5651 // Bootstrap tests. |
5652 FminFmaxDoubleHelper(0, 0, 0, 0, 0, 0); | 5652 FminFmaxDoubleHelper(0, 0, 0, 0, 0, 0); |
5653 FminFmaxDoubleHelper(0, 1, 0, 1, 0, 1); | 5653 FminFmaxDoubleHelper(0, 1, 0, 1, 0, 1); |
5654 FminFmaxDoubleHelper(kFP64PositiveInfinity, kFP64NegativeInfinity, | 5654 FminFmaxDoubleHelper(kFP64PositiveInfinity, kFP64NegativeInfinity, |
5655 kFP64NegativeInfinity, kFP64PositiveInfinity, | 5655 kFP64NegativeInfinity, kFP64PositiveInfinity, |
5656 kFP64NegativeInfinity, kFP64PositiveInfinity); | 5656 kFP64NegativeInfinity, kFP64PositiveInfinity); |
5657 FminFmaxDoubleHelper(snan, 0, | 5657 FminFmaxDoubleHelper(snan, 0, |
5658 snan_processed, snan_processed, | 5658 snan_processed, snan_processed, |
5659 snan_processed, snan_processed); | 5659 snan_processed, snan_processed); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5703 __ Fmov(s0, n); | 5703 __ Fmov(s0, n); |
5704 __ Fmov(s1, m); | 5704 __ Fmov(s1, m); |
5705 __ Fmin(s28, s0, s1); | 5705 __ Fmin(s28, s0, s1); |
5706 __ Fmax(s29, s0, s1); | 5706 __ Fmax(s29, s0, s1); |
5707 __ Fminnm(s30, s0, s1); | 5707 __ Fminnm(s30, s0, s1); |
5708 __ Fmaxnm(s31, s0, s1); | 5708 __ Fmaxnm(s31, s0, s1); |
5709 END(); | 5709 END(); |
5710 | 5710 |
5711 RUN(); | 5711 RUN(); |
5712 | 5712 |
5713 ASSERT_EQUAL_FP32(min, s28); | 5713 CHECK_EQUAL_FP32(min, s28); |
5714 ASSERT_EQUAL_FP32(max, s29); | 5714 CHECK_EQUAL_FP32(max, s29); |
5715 ASSERT_EQUAL_FP32(minnm, s30); | 5715 CHECK_EQUAL_FP32(minnm, s30); |
5716 ASSERT_EQUAL_FP32(maxnm, s31); | 5716 CHECK_EQUAL_FP32(maxnm, s31); |
5717 | 5717 |
5718 TEARDOWN(); | 5718 TEARDOWN(); |
5719 } | 5719 } |
5720 | 5720 |
5721 | 5721 |
5722 TEST(fmax_fmin_s) { | 5722 TEST(fmax_fmin_s) { |
5723 INIT_V8(); | 5723 INIT_V8(); |
5724 // Use non-standard NaNs to check that the payload bits are preserved. | 5724 // Use non-standard NaNs to check that the payload bits are preserved. |
5725 float snan = rawbits_to_float(0x7f951234); | 5725 float snan = rawbits_to_float(0x7f951234); |
5726 float qnan = rawbits_to_float(0x7fea8765); | 5726 float qnan = rawbits_to_float(0x7fea8765); |
5727 | 5727 |
5728 float snan_processed = rawbits_to_float(0x7fd51234); | 5728 float snan_processed = rawbits_to_float(0x7fd51234); |
5729 float qnan_processed = qnan; | 5729 float qnan_processed = qnan; |
5730 | 5730 |
5731 ASSERT(IsSignallingNaN(snan)); | 5731 DCHECK(IsSignallingNaN(snan)); |
5732 ASSERT(IsQuietNaN(qnan)); | 5732 DCHECK(IsQuietNaN(qnan)); |
5733 ASSERT(IsQuietNaN(snan_processed)); | 5733 DCHECK(IsQuietNaN(snan_processed)); |
5734 ASSERT(IsQuietNaN(qnan_processed)); | 5734 DCHECK(IsQuietNaN(qnan_processed)); |
5735 | 5735 |
5736 // Bootstrap tests. | 5736 // Bootstrap tests. |
5737 FminFmaxFloatHelper(0, 0, 0, 0, 0, 0); | 5737 FminFmaxFloatHelper(0, 0, 0, 0, 0, 0); |
5738 FminFmaxFloatHelper(0, 1, 0, 1, 0, 1); | 5738 FminFmaxFloatHelper(0, 1, 0, 1, 0, 1); |
5739 FminFmaxFloatHelper(kFP32PositiveInfinity, kFP32NegativeInfinity, | 5739 FminFmaxFloatHelper(kFP32PositiveInfinity, kFP32NegativeInfinity, |
5740 kFP32NegativeInfinity, kFP32PositiveInfinity, | 5740 kFP32NegativeInfinity, kFP32PositiveInfinity, |
5741 kFP32NegativeInfinity, kFP32PositiveInfinity); | 5741 kFP32NegativeInfinity, kFP32PositiveInfinity); |
5742 FminFmaxFloatHelper(snan, 0, | 5742 FminFmaxFloatHelper(snan, 0, |
5743 snan_processed, snan_processed, | 5743 snan_processed, snan_processed, |
5744 snan_processed, snan_processed); | 5744 snan_processed, snan_processed); |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5826 __ fccmp(s16, s16, NFlag, al); | 5826 __ fccmp(s16, s16, NFlag, al); |
5827 __ Mrs(x8, NZCV); | 5827 __ Mrs(x8, NZCV); |
5828 | 5828 |
5829 __ fccmp(d18, d18, NFlag, nv); | 5829 __ fccmp(d18, d18, NFlag, nv); |
5830 __ Mrs(x9, NZCV); | 5830 __ Mrs(x9, NZCV); |
5831 | 5831 |
5832 END(); | 5832 END(); |
5833 | 5833 |
5834 RUN(); | 5834 RUN(); |
5835 | 5835 |
5836 ASSERT_EQUAL_32(ZCFlag, w0); | 5836 CHECK_EQUAL_32(ZCFlag, w0); |
5837 ASSERT_EQUAL_32(VFlag, w1); | 5837 CHECK_EQUAL_32(VFlag, w1); |
5838 ASSERT_EQUAL_32(NFlag, w2); | 5838 CHECK_EQUAL_32(NFlag, w2); |
5839 ASSERT_EQUAL_32(CVFlag, w3); | 5839 CHECK_EQUAL_32(CVFlag, w3); |
5840 ASSERT_EQUAL_32(ZCFlag, w4); | 5840 CHECK_EQUAL_32(ZCFlag, w4); |
5841 ASSERT_EQUAL_32(ZVFlag, w5); | 5841 CHECK_EQUAL_32(ZVFlag, w5); |
5842 ASSERT_EQUAL_32(CFlag, w6); | 5842 CHECK_EQUAL_32(CFlag, w6); |
5843 ASSERT_EQUAL_32(NFlag, w7); | 5843 CHECK_EQUAL_32(NFlag, w7); |
5844 ASSERT_EQUAL_32(ZCFlag, w8); | 5844 CHECK_EQUAL_32(ZCFlag, w8); |
5845 ASSERT_EQUAL_32(ZCFlag, w9); | 5845 CHECK_EQUAL_32(ZCFlag, w9); |
5846 | 5846 |
5847 TEARDOWN(); | 5847 TEARDOWN(); |
5848 } | 5848 } |
5849 | 5849 |
5850 | 5850 |
5851 TEST(fcmp) { | 5851 TEST(fcmp) { |
5852 INIT_V8(); | 5852 INIT_V8(); |
5853 SETUP(); | 5853 SETUP(); |
5854 | 5854 |
5855 START(); | 5855 START(); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5905 masm.FPTmpList()->set_list(d0.Bit()); | 5905 masm.FPTmpList()->set_list(d0.Bit()); |
5906 __ Fcmp(d19, 12.3456); | 5906 __ Fcmp(d19, 12.3456); |
5907 masm.FPTmpList()->set_list(0); | 5907 masm.FPTmpList()->set_list(0); |
5908 __ Mrs(x16, NZCV); | 5908 __ Mrs(x16, NZCV); |
5909 } | 5909 } |
5910 | 5910 |
5911 END(); | 5911 END(); |
5912 | 5912 |
5913 RUN(); | 5913 RUN(); |
5914 | 5914 |
5915 ASSERT_EQUAL_32(ZCFlag, w0); | 5915 CHECK_EQUAL_32(ZCFlag, w0); |
5916 ASSERT_EQUAL_32(NFlag, w1); | 5916 CHECK_EQUAL_32(NFlag, w1); |
5917 ASSERT_EQUAL_32(CFlag, w2); | 5917 CHECK_EQUAL_32(CFlag, w2); |
5918 ASSERT_EQUAL_32(CVFlag, w3); | 5918 CHECK_EQUAL_32(CVFlag, w3); |
5919 ASSERT_EQUAL_32(CVFlag, w4); | 5919 CHECK_EQUAL_32(CVFlag, w4); |
5920 ASSERT_EQUAL_32(ZCFlag, w5); | 5920 CHECK_EQUAL_32(ZCFlag, w5); |
5921 ASSERT_EQUAL_32(NFlag, w6); | 5921 CHECK_EQUAL_32(NFlag, w6); |
5922 ASSERT_EQUAL_32(ZCFlag, w10); | 5922 CHECK_EQUAL_32(ZCFlag, w10); |
5923 ASSERT_EQUAL_32(NFlag, w11); | 5923 CHECK_EQUAL_32(NFlag, w11); |
5924 ASSERT_EQUAL_32(CFlag, w12); | 5924 CHECK_EQUAL_32(CFlag, w12); |
5925 ASSERT_EQUAL_32(CVFlag, w13); | 5925 CHECK_EQUAL_32(CVFlag, w13); |
5926 ASSERT_EQUAL_32(CVFlag, w14); | 5926 CHECK_EQUAL_32(CVFlag, w14); |
5927 ASSERT_EQUAL_32(ZCFlag, w15); | 5927 CHECK_EQUAL_32(ZCFlag, w15); |
5928 ASSERT_EQUAL_32(NFlag, w16); | 5928 CHECK_EQUAL_32(NFlag, w16); |
5929 | 5929 |
5930 TEARDOWN(); | 5930 TEARDOWN(); |
5931 } | 5931 } |
5932 | 5932 |
5933 | 5933 |
5934 TEST(fcsel) { | 5934 TEST(fcsel) { |
5935 INIT_V8(); | 5935 INIT_V8(); |
5936 SETUP(); | 5936 SETUP(); |
5937 | 5937 |
5938 START(); | 5938 START(); |
5939 __ Mov(x16, 0); | 5939 __ Mov(x16, 0); |
5940 __ Fmov(s16, 1.0); | 5940 __ Fmov(s16, 1.0); |
5941 __ Fmov(s17, 2.0); | 5941 __ Fmov(s17, 2.0); |
5942 __ Fmov(d18, 3.0); | 5942 __ Fmov(d18, 3.0); |
5943 __ Fmov(d19, 4.0); | 5943 __ Fmov(d19, 4.0); |
5944 | 5944 |
5945 __ Cmp(x16, 0); | 5945 __ Cmp(x16, 0); |
5946 __ Fcsel(s0, s16, s17, eq); | 5946 __ Fcsel(s0, s16, s17, eq); |
5947 __ Fcsel(s1, s16, s17, ne); | 5947 __ Fcsel(s1, s16, s17, ne); |
5948 __ Fcsel(d2, d18, d19, eq); | 5948 __ Fcsel(d2, d18, d19, eq); |
5949 __ Fcsel(d3, d18, d19, ne); | 5949 __ Fcsel(d3, d18, d19, ne); |
5950 __ fcsel(s4, s16, s17, al); | 5950 __ fcsel(s4, s16, s17, al); |
5951 __ fcsel(d5, d18, d19, nv); | 5951 __ fcsel(d5, d18, d19, nv); |
5952 END(); | 5952 END(); |
5953 | 5953 |
5954 RUN(); | 5954 RUN(); |
5955 | 5955 |
5956 ASSERT_EQUAL_FP32(1.0, s0); | 5956 CHECK_EQUAL_FP32(1.0, s0); |
5957 ASSERT_EQUAL_FP32(2.0, s1); | 5957 CHECK_EQUAL_FP32(2.0, s1); |
5958 ASSERT_EQUAL_FP64(3.0, d2); | 5958 CHECK_EQUAL_FP64(3.0, d2); |
5959 ASSERT_EQUAL_FP64(4.0, d3); | 5959 CHECK_EQUAL_FP64(4.0, d3); |
5960 ASSERT_EQUAL_FP32(1.0, s4); | 5960 CHECK_EQUAL_FP32(1.0, s4); |
5961 ASSERT_EQUAL_FP64(3.0, d5); | 5961 CHECK_EQUAL_FP64(3.0, d5); |
5962 | 5962 |
5963 TEARDOWN(); | 5963 TEARDOWN(); |
5964 } | 5964 } |
5965 | 5965 |
5966 | 5966 |
5967 TEST(fneg) { | 5967 TEST(fneg) { |
5968 INIT_V8(); | 5968 INIT_V8(); |
5969 SETUP(); | 5969 SETUP(); |
5970 | 5970 |
5971 START(); | 5971 START(); |
(...skipping 13 matching lines...) Expand all Loading... |
5985 __ Fneg(d6, d19); | 5985 __ Fneg(d6, d19); |
5986 __ Fneg(d7, d6); | 5986 __ Fneg(d7, d6); |
5987 __ Fneg(d8, d20); | 5987 __ Fneg(d8, d20); |
5988 __ Fneg(d9, d8); | 5988 __ Fneg(d9, d8); |
5989 __ Fneg(d10, d21); | 5989 __ Fneg(d10, d21); |
5990 __ Fneg(d11, d10); | 5990 __ Fneg(d11, d10); |
5991 END(); | 5991 END(); |
5992 | 5992 |
5993 RUN(); | 5993 RUN(); |
5994 | 5994 |
5995 ASSERT_EQUAL_FP32(-1.0, s0); | 5995 CHECK_EQUAL_FP32(-1.0, s0); |
5996 ASSERT_EQUAL_FP32(1.0, s1); | 5996 CHECK_EQUAL_FP32(1.0, s1); |
5997 ASSERT_EQUAL_FP32(-0.0, s2); | 5997 CHECK_EQUAL_FP32(-0.0, s2); |
5998 ASSERT_EQUAL_FP32(0.0, s3); | 5998 CHECK_EQUAL_FP32(0.0, s3); |
5999 ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s4); | 5999 CHECK_EQUAL_FP32(kFP32NegativeInfinity, s4); |
6000 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s5); | 6000 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s5); |
6001 ASSERT_EQUAL_FP64(-1.0, d6); | 6001 CHECK_EQUAL_FP64(-1.0, d6); |
6002 ASSERT_EQUAL_FP64(1.0, d7); | 6002 CHECK_EQUAL_FP64(1.0, d7); |
6003 ASSERT_EQUAL_FP64(-0.0, d8); | 6003 CHECK_EQUAL_FP64(-0.0, d8); |
6004 ASSERT_EQUAL_FP64(0.0, d9); | 6004 CHECK_EQUAL_FP64(0.0, d9); |
6005 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d10); | 6005 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d10); |
6006 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d11); | 6006 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d11); |
6007 | 6007 |
6008 TEARDOWN(); | 6008 TEARDOWN(); |
6009 } | 6009 } |
6010 | 6010 |
6011 | 6011 |
6012 TEST(fabs) { | 6012 TEST(fabs) { |
6013 INIT_V8(); | 6013 INIT_V8(); |
6014 SETUP(); | 6014 SETUP(); |
6015 | 6015 |
6016 START(); | 6016 START(); |
6017 __ Fmov(s16, -1.0); | 6017 __ Fmov(s16, -1.0); |
6018 __ Fmov(s17, -0.0); | 6018 __ Fmov(s17, -0.0); |
6019 __ Fmov(s18, kFP32NegativeInfinity); | 6019 __ Fmov(s18, kFP32NegativeInfinity); |
6020 __ Fmov(d19, -1.0); | 6020 __ Fmov(d19, -1.0); |
6021 __ Fmov(d20, -0.0); | 6021 __ Fmov(d20, -0.0); |
6022 __ Fmov(d21, kFP64NegativeInfinity); | 6022 __ Fmov(d21, kFP64NegativeInfinity); |
6023 | 6023 |
6024 __ Fabs(s0, s16); | 6024 __ Fabs(s0, s16); |
6025 __ Fabs(s1, s0); | 6025 __ Fabs(s1, s0); |
6026 __ Fabs(s2, s17); | 6026 __ Fabs(s2, s17); |
6027 __ Fabs(s3, s18); | 6027 __ Fabs(s3, s18); |
6028 __ Fabs(d4, d19); | 6028 __ Fabs(d4, d19); |
6029 __ Fabs(d5, d4); | 6029 __ Fabs(d5, d4); |
6030 __ Fabs(d6, d20); | 6030 __ Fabs(d6, d20); |
6031 __ Fabs(d7, d21); | 6031 __ Fabs(d7, d21); |
6032 END(); | 6032 END(); |
6033 | 6033 |
6034 RUN(); | 6034 RUN(); |
6035 | 6035 |
6036 ASSERT_EQUAL_FP32(1.0, s0); | 6036 CHECK_EQUAL_FP32(1.0, s0); |
6037 ASSERT_EQUAL_FP32(1.0, s1); | 6037 CHECK_EQUAL_FP32(1.0, s1); |
6038 ASSERT_EQUAL_FP32(0.0, s2); | 6038 CHECK_EQUAL_FP32(0.0, s2); |
6039 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s3); | 6039 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s3); |
6040 ASSERT_EQUAL_FP64(1.0, d4); | 6040 CHECK_EQUAL_FP64(1.0, d4); |
6041 ASSERT_EQUAL_FP64(1.0, d5); | 6041 CHECK_EQUAL_FP64(1.0, d5); |
6042 ASSERT_EQUAL_FP64(0.0, d6); | 6042 CHECK_EQUAL_FP64(0.0, d6); |
6043 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d7); | 6043 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d7); |
6044 | 6044 |
6045 TEARDOWN(); | 6045 TEARDOWN(); |
6046 } | 6046 } |
6047 | 6047 |
6048 | 6048 |
6049 TEST(fsqrt) { | 6049 TEST(fsqrt) { |
6050 INIT_V8(); | 6050 INIT_V8(); |
6051 SETUP(); | 6051 SETUP(); |
6052 | 6052 |
6053 START(); | 6053 START(); |
(...skipping 23 matching lines...) Expand all Loading... |
6077 __ Fsqrt(d8, d24); | 6077 __ Fsqrt(d8, d24); |
6078 __ Fsqrt(d9, d25); | 6078 __ Fsqrt(d9, d25); |
6079 __ Fsqrt(d10, d26); | 6079 __ Fsqrt(d10, d26); |
6080 __ Fsqrt(d11, d27); | 6080 __ Fsqrt(d11, d27); |
6081 __ Fsqrt(d12, d28); | 6081 __ Fsqrt(d12, d28); |
6082 __ Fsqrt(d13, d29); | 6082 __ Fsqrt(d13, d29); |
6083 END(); | 6083 END(); |
6084 | 6084 |
6085 RUN(); | 6085 RUN(); |
6086 | 6086 |
6087 ASSERT_EQUAL_FP32(0.0, s0); | 6087 CHECK_EQUAL_FP32(0.0, s0); |
6088 ASSERT_EQUAL_FP32(1.0, s1); | 6088 CHECK_EQUAL_FP32(1.0, s1); |
6089 ASSERT_EQUAL_FP32(0.5, s2); | 6089 CHECK_EQUAL_FP32(0.5, s2); |
6090 ASSERT_EQUAL_FP32(256.0, s3); | 6090 CHECK_EQUAL_FP32(256.0, s3); |
6091 ASSERT_EQUAL_FP32(-0.0, s4); | 6091 CHECK_EQUAL_FP32(-0.0, s4); |
6092 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s5); | 6092 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s5); |
6093 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s6); | 6093 CHECK_EQUAL_FP32(kFP32DefaultNaN, s6); |
6094 ASSERT_EQUAL_FP64(0.0, d7); | 6094 CHECK_EQUAL_FP64(0.0, d7); |
6095 ASSERT_EQUAL_FP64(1.0, d8); | 6095 CHECK_EQUAL_FP64(1.0, d8); |
6096 ASSERT_EQUAL_FP64(0.5, d9); | 6096 CHECK_EQUAL_FP64(0.5, d9); |
6097 ASSERT_EQUAL_FP64(65536.0, d10); | 6097 CHECK_EQUAL_FP64(65536.0, d10); |
6098 ASSERT_EQUAL_FP64(-0.0, d11); | 6098 CHECK_EQUAL_FP64(-0.0, d11); |
6099 ASSERT_EQUAL_FP64(kFP32PositiveInfinity, d12); | 6099 CHECK_EQUAL_FP64(kFP32PositiveInfinity, d12); |
6100 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13); | 6100 CHECK_EQUAL_FP64(kFP64DefaultNaN, d13); |
6101 | 6101 |
6102 TEARDOWN(); | 6102 TEARDOWN(); |
6103 } | 6103 } |
6104 | 6104 |
6105 | 6105 |
6106 TEST(frinta) { | 6106 TEST(frinta) { |
6107 INIT_V8(); | 6107 INIT_V8(); |
6108 SETUP(); | 6108 SETUP(); |
6109 | 6109 |
6110 START(); | 6110 START(); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6156 __ Frinta(d18, d22); | 6156 __ Frinta(d18, d22); |
6157 __ Frinta(d19, d23); | 6157 __ Frinta(d19, d23); |
6158 __ Frinta(d20, d24); | 6158 __ Frinta(d20, d24); |
6159 __ Frinta(d21, d25); | 6159 __ Frinta(d21, d25); |
6160 __ Frinta(d22, d26); | 6160 __ Frinta(d22, d26); |
6161 __ Frinta(d23, d27); | 6161 __ Frinta(d23, d27); |
6162 END(); | 6162 END(); |
6163 | 6163 |
6164 RUN(); | 6164 RUN(); |
6165 | 6165 |
6166 ASSERT_EQUAL_FP32(1.0, s0); | 6166 CHECK_EQUAL_FP32(1.0, s0); |
6167 ASSERT_EQUAL_FP32(1.0, s1); | 6167 CHECK_EQUAL_FP32(1.0, s1); |
6168 ASSERT_EQUAL_FP32(2.0, s2); | 6168 CHECK_EQUAL_FP32(2.0, s2); |
6169 ASSERT_EQUAL_FP32(2.0, s3); | 6169 CHECK_EQUAL_FP32(2.0, s3); |
6170 ASSERT_EQUAL_FP32(3.0, s4); | 6170 CHECK_EQUAL_FP32(3.0, s4); |
6171 ASSERT_EQUAL_FP32(-2.0, s5); | 6171 CHECK_EQUAL_FP32(-2.0, s5); |
6172 ASSERT_EQUAL_FP32(-3.0, s6); | 6172 CHECK_EQUAL_FP32(-3.0, s6); |
6173 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7); | 6173 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s7); |
6174 ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8); | 6174 CHECK_EQUAL_FP32(kFP32NegativeInfinity, s8); |
6175 ASSERT_EQUAL_FP32(0.0, s9); | 6175 CHECK_EQUAL_FP32(0.0, s9); |
6176 ASSERT_EQUAL_FP32(-0.0, s10); | 6176 CHECK_EQUAL_FP32(-0.0, s10); |
6177 ASSERT_EQUAL_FP32(-0.0, s11); | 6177 CHECK_EQUAL_FP32(-0.0, s11); |
6178 ASSERT_EQUAL_FP64(1.0, d12); | 6178 CHECK_EQUAL_FP64(1.0, d12); |
6179 ASSERT_EQUAL_FP64(1.0, d13); | 6179 CHECK_EQUAL_FP64(1.0, d13); |
6180 ASSERT_EQUAL_FP64(2.0, d14); | 6180 CHECK_EQUAL_FP64(2.0, d14); |
6181 ASSERT_EQUAL_FP64(2.0, d15); | 6181 CHECK_EQUAL_FP64(2.0, d15); |
6182 ASSERT_EQUAL_FP64(3.0, d16); | 6182 CHECK_EQUAL_FP64(3.0, d16); |
6183 ASSERT_EQUAL_FP64(-2.0, d17); | 6183 CHECK_EQUAL_FP64(-2.0, d17); |
6184 ASSERT_EQUAL_FP64(-3.0, d18); | 6184 CHECK_EQUAL_FP64(-3.0, d18); |
6185 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19); | 6185 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d19); |
6186 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20); | 6186 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d20); |
6187 ASSERT_EQUAL_FP64(0.0, d21); | 6187 CHECK_EQUAL_FP64(0.0, d21); |
6188 ASSERT_EQUAL_FP64(-0.0, d22); | 6188 CHECK_EQUAL_FP64(-0.0, d22); |
6189 ASSERT_EQUAL_FP64(-0.0, d23); | 6189 CHECK_EQUAL_FP64(-0.0, d23); |
6190 | 6190 |
6191 TEARDOWN(); | 6191 TEARDOWN(); |
6192 } | 6192 } |
6193 | 6193 |
6194 | 6194 |
6195 TEST(frintm) { | 6195 TEST(frintm) { |
6196 INIT_V8(); | 6196 INIT_V8(); |
6197 SETUP(); | 6197 SETUP(); |
6198 | 6198 |
6199 START(); | 6199 START(); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6245 __ Frintm(d18, d22); | 6245 __ Frintm(d18, d22); |
6246 __ Frintm(d19, d23); | 6246 __ Frintm(d19, d23); |
6247 __ Frintm(d20, d24); | 6247 __ Frintm(d20, d24); |
6248 __ Frintm(d21, d25); | 6248 __ Frintm(d21, d25); |
6249 __ Frintm(d22, d26); | 6249 __ Frintm(d22, d26); |
6250 __ Frintm(d23, d27); | 6250 __ Frintm(d23, d27); |
6251 END(); | 6251 END(); |
6252 | 6252 |
6253 RUN(); | 6253 RUN(); |
6254 | 6254 |
6255 ASSERT_EQUAL_FP32(1.0, s0); | 6255 CHECK_EQUAL_FP32(1.0, s0); |
6256 ASSERT_EQUAL_FP32(1.0, s1); | 6256 CHECK_EQUAL_FP32(1.0, s1); |
6257 ASSERT_EQUAL_FP32(1.0, s2); | 6257 CHECK_EQUAL_FP32(1.0, s2); |
6258 ASSERT_EQUAL_FP32(1.0, s3); | 6258 CHECK_EQUAL_FP32(1.0, s3); |
6259 ASSERT_EQUAL_FP32(2.0, s4); | 6259 CHECK_EQUAL_FP32(2.0, s4); |
6260 ASSERT_EQUAL_FP32(-2.0, s5); | 6260 CHECK_EQUAL_FP32(-2.0, s5); |
6261 ASSERT_EQUAL_FP32(-3.0, s6); | 6261 CHECK_EQUAL_FP32(-3.0, s6); |
6262 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7); | 6262 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s7); |
6263 ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8); | 6263 CHECK_EQUAL_FP32(kFP32NegativeInfinity, s8); |
6264 ASSERT_EQUAL_FP32(0.0, s9); | 6264 CHECK_EQUAL_FP32(0.0, s9); |
6265 ASSERT_EQUAL_FP32(-0.0, s10); | 6265 CHECK_EQUAL_FP32(-0.0, s10); |
6266 ASSERT_EQUAL_FP32(-1.0, s11); | 6266 CHECK_EQUAL_FP32(-1.0, s11); |
6267 ASSERT_EQUAL_FP64(1.0, d12); | 6267 CHECK_EQUAL_FP64(1.0, d12); |
6268 ASSERT_EQUAL_FP64(1.0, d13); | 6268 CHECK_EQUAL_FP64(1.0, d13); |
6269 ASSERT_EQUAL_FP64(1.0, d14); | 6269 CHECK_EQUAL_FP64(1.0, d14); |
6270 ASSERT_EQUAL_FP64(1.0, d15); | 6270 CHECK_EQUAL_FP64(1.0, d15); |
6271 ASSERT_EQUAL_FP64(2.0, d16); | 6271 CHECK_EQUAL_FP64(2.0, d16); |
6272 ASSERT_EQUAL_FP64(-2.0, d17); | 6272 CHECK_EQUAL_FP64(-2.0, d17); |
6273 ASSERT_EQUAL_FP64(-3.0, d18); | 6273 CHECK_EQUAL_FP64(-3.0, d18); |
6274 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19); | 6274 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d19); |
6275 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20); | 6275 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d20); |
6276 ASSERT_EQUAL_FP64(0.0, d21); | 6276 CHECK_EQUAL_FP64(0.0, d21); |
6277 ASSERT_EQUAL_FP64(-0.0, d22); | 6277 CHECK_EQUAL_FP64(-0.0, d22); |
6278 ASSERT_EQUAL_FP64(-1.0, d23); | 6278 CHECK_EQUAL_FP64(-1.0, d23); |
6279 | 6279 |
6280 TEARDOWN(); | 6280 TEARDOWN(); |
6281 } | 6281 } |
6282 | 6282 |
6283 | 6283 |
6284 TEST(frintn) { | 6284 TEST(frintn) { |
6285 INIT_V8(); | 6285 INIT_V8(); |
6286 SETUP(); | 6286 SETUP(); |
6287 | 6287 |
6288 START(); | 6288 START(); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6334 __ Frintn(d18, d22); | 6334 __ Frintn(d18, d22); |
6335 __ Frintn(d19, d23); | 6335 __ Frintn(d19, d23); |
6336 __ Frintn(d20, d24); | 6336 __ Frintn(d20, d24); |
6337 __ Frintn(d21, d25); | 6337 __ Frintn(d21, d25); |
6338 __ Frintn(d22, d26); | 6338 __ Frintn(d22, d26); |
6339 __ Frintn(d23, d27); | 6339 __ Frintn(d23, d27); |
6340 END(); | 6340 END(); |
6341 | 6341 |
6342 RUN(); | 6342 RUN(); |
6343 | 6343 |
6344 ASSERT_EQUAL_FP32(1.0, s0); | 6344 CHECK_EQUAL_FP32(1.0, s0); |
6345 ASSERT_EQUAL_FP32(1.0, s1); | 6345 CHECK_EQUAL_FP32(1.0, s1); |
6346 ASSERT_EQUAL_FP32(2.0, s2); | 6346 CHECK_EQUAL_FP32(2.0, s2); |
6347 ASSERT_EQUAL_FP32(2.0, s3); | 6347 CHECK_EQUAL_FP32(2.0, s3); |
6348 ASSERT_EQUAL_FP32(2.0, s4); | 6348 CHECK_EQUAL_FP32(2.0, s4); |
6349 ASSERT_EQUAL_FP32(-2.0, s5); | 6349 CHECK_EQUAL_FP32(-2.0, s5); |
6350 ASSERT_EQUAL_FP32(-2.0, s6); | 6350 CHECK_EQUAL_FP32(-2.0, s6); |
6351 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7); | 6351 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s7); |
6352 ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8); | 6352 CHECK_EQUAL_FP32(kFP32NegativeInfinity, s8); |
6353 ASSERT_EQUAL_FP32(0.0, s9); | 6353 CHECK_EQUAL_FP32(0.0, s9); |
6354 ASSERT_EQUAL_FP32(-0.0, s10); | 6354 CHECK_EQUAL_FP32(-0.0, s10); |
6355 ASSERT_EQUAL_FP32(-0.0, s11); | 6355 CHECK_EQUAL_FP32(-0.0, s11); |
6356 ASSERT_EQUAL_FP64(1.0, d12); | 6356 CHECK_EQUAL_FP64(1.0, d12); |
6357 ASSERT_EQUAL_FP64(1.0, d13); | 6357 CHECK_EQUAL_FP64(1.0, d13); |
6358 ASSERT_EQUAL_FP64(2.0, d14); | 6358 CHECK_EQUAL_FP64(2.0, d14); |
6359 ASSERT_EQUAL_FP64(2.0, d15); | 6359 CHECK_EQUAL_FP64(2.0, d15); |
6360 ASSERT_EQUAL_FP64(2.0, d16); | 6360 CHECK_EQUAL_FP64(2.0, d16); |
6361 ASSERT_EQUAL_FP64(-2.0, d17); | 6361 CHECK_EQUAL_FP64(-2.0, d17); |
6362 ASSERT_EQUAL_FP64(-2.0, d18); | 6362 CHECK_EQUAL_FP64(-2.0, d18); |
6363 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d19); | 6363 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d19); |
6364 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d20); | 6364 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d20); |
6365 ASSERT_EQUAL_FP64(0.0, d21); | 6365 CHECK_EQUAL_FP64(0.0, d21); |
6366 ASSERT_EQUAL_FP64(-0.0, d22); | 6366 CHECK_EQUAL_FP64(-0.0, d22); |
6367 ASSERT_EQUAL_FP64(-0.0, d23); | 6367 CHECK_EQUAL_FP64(-0.0, d23); |
6368 | 6368 |
6369 TEARDOWN(); | 6369 TEARDOWN(); |
6370 } | 6370 } |
6371 | 6371 |
6372 | 6372 |
6373 TEST(frintz) { | 6373 TEST(frintz) { |
6374 INIT_V8(); | 6374 INIT_V8(); |
6375 SETUP(); | 6375 SETUP(); |
6376 | 6376 |
6377 START(); | 6377 START(); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6419 __ Frintz(d16, d21); | 6419 __ Frintz(d16, d21); |
6420 __ Frintz(d17, d22); | 6420 __ Frintz(d17, d22); |
6421 __ Frintz(d18, d23); | 6421 __ Frintz(d18, d23); |
6422 __ Frintz(d19, d24); | 6422 __ Frintz(d19, d24); |
6423 __ Frintz(d20, d25); | 6423 __ Frintz(d20, d25); |
6424 __ Frintz(d21, d26); | 6424 __ Frintz(d21, d26); |
6425 END(); | 6425 END(); |
6426 | 6426 |
6427 RUN(); | 6427 RUN(); |
6428 | 6428 |
6429 ASSERT_EQUAL_FP32(1.0, s0); | 6429 CHECK_EQUAL_FP32(1.0, s0); |
6430 ASSERT_EQUAL_FP32(1.0, s1); | 6430 CHECK_EQUAL_FP32(1.0, s1); |
6431 ASSERT_EQUAL_FP32(1.0, s2); | 6431 CHECK_EQUAL_FP32(1.0, s2); |
6432 ASSERT_EQUAL_FP32(1.0, s3); | 6432 CHECK_EQUAL_FP32(1.0, s3); |
6433 ASSERT_EQUAL_FP32(2.0, s4); | 6433 CHECK_EQUAL_FP32(2.0, s4); |
6434 ASSERT_EQUAL_FP32(-1.0, s5); | 6434 CHECK_EQUAL_FP32(-1.0, s5); |
6435 ASSERT_EQUAL_FP32(-2.0, s6); | 6435 CHECK_EQUAL_FP32(-2.0, s6); |
6436 ASSERT_EQUAL_FP32(kFP32PositiveInfinity, s7); | 6436 CHECK_EQUAL_FP32(kFP32PositiveInfinity, s7); |
6437 ASSERT_EQUAL_FP32(kFP32NegativeInfinity, s8); | 6437 CHECK_EQUAL_FP32(kFP32NegativeInfinity, s8); |
6438 ASSERT_EQUAL_FP32(0.0, s9); | 6438 CHECK_EQUAL_FP32(0.0, s9); |
6439 ASSERT_EQUAL_FP32(-0.0, s10); | 6439 CHECK_EQUAL_FP32(-0.0, s10); |
6440 ASSERT_EQUAL_FP64(1.0, d11); | 6440 CHECK_EQUAL_FP64(1.0, d11); |
6441 ASSERT_EQUAL_FP64(1.0, d12); | 6441 CHECK_EQUAL_FP64(1.0, d12); |
6442 ASSERT_EQUAL_FP64(1.0, d13); | 6442 CHECK_EQUAL_FP64(1.0, d13); |
6443 ASSERT_EQUAL_FP64(1.0, d14); | 6443 CHECK_EQUAL_FP64(1.0, d14); |
6444 ASSERT_EQUAL_FP64(2.0, d15); | 6444 CHECK_EQUAL_FP64(2.0, d15); |
6445 ASSERT_EQUAL_FP64(-1.0, d16); | 6445 CHECK_EQUAL_FP64(-1.0, d16); |
6446 ASSERT_EQUAL_FP64(-2.0, d17); | 6446 CHECK_EQUAL_FP64(-2.0, d17); |
6447 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d18); | 6447 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d18); |
6448 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d19); | 6448 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d19); |
6449 ASSERT_EQUAL_FP64(0.0, d20); | 6449 CHECK_EQUAL_FP64(0.0, d20); |
6450 ASSERT_EQUAL_FP64(-0.0, d21); | 6450 CHECK_EQUAL_FP64(-0.0, d21); |
6451 | 6451 |
6452 TEARDOWN(); | 6452 TEARDOWN(); |
6453 } | 6453 } |
6454 | 6454 |
6455 | 6455 |
6456 TEST(fcvt_ds) { | 6456 TEST(fcvt_ds) { |
6457 INIT_V8(); | 6457 INIT_V8(); |
6458 SETUP(); | 6458 SETUP(); |
6459 | 6459 |
6460 START(); | 6460 START(); |
(...skipping 25 matching lines...) Expand all Loading... |
6486 __ Fcvt(d9, s25); | 6486 __ Fcvt(d9, s25); |
6487 __ Fcvt(d10, s26); | 6487 __ Fcvt(d10, s26); |
6488 __ Fcvt(d11, s27); | 6488 __ Fcvt(d11, s27); |
6489 __ Fcvt(d12, s28); | 6489 __ Fcvt(d12, s28); |
6490 __ Fcvt(d13, s29); | 6490 __ Fcvt(d13, s29); |
6491 __ Fcvt(d14, s30); | 6491 __ Fcvt(d14, s30); |
6492 END(); | 6492 END(); |
6493 | 6493 |
6494 RUN(); | 6494 RUN(); |
6495 | 6495 |
6496 ASSERT_EQUAL_FP64(1.0f, d0); | 6496 CHECK_EQUAL_FP64(1.0f, d0); |
6497 ASSERT_EQUAL_FP64(1.1f, d1); | 6497 CHECK_EQUAL_FP64(1.1f, d1); |
6498 ASSERT_EQUAL_FP64(1.5f, d2); | 6498 CHECK_EQUAL_FP64(1.5f, d2); |
6499 ASSERT_EQUAL_FP64(1.9f, d3); | 6499 CHECK_EQUAL_FP64(1.9f, d3); |
6500 ASSERT_EQUAL_FP64(2.5f, d4); | 6500 CHECK_EQUAL_FP64(2.5f, d4); |
6501 ASSERT_EQUAL_FP64(-1.5f, d5); | 6501 CHECK_EQUAL_FP64(-1.5f, d5); |
6502 ASSERT_EQUAL_FP64(-2.5f, d6); | 6502 CHECK_EQUAL_FP64(-2.5f, d6); |
6503 ASSERT_EQUAL_FP64(kFP64PositiveInfinity, d7); | 6503 CHECK_EQUAL_FP64(kFP64PositiveInfinity, d7); |
6504 ASSERT_EQUAL_FP64(kFP64NegativeInfinity, d8); | 6504 CHECK_EQUAL_FP64(kFP64NegativeInfinity, d8); |
6505 ASSERT_EQUAL_FP64(0.0f, d9); | 6505 CHECK_EQUAL_FP64(0.0f, d9); |
6506 ASSERT_EQUAL_FP64(-0.0f, d10); | 6506 CHECK_EQUAL_FP64(-0.0f, d10); |
6507 ASSERT_EQUAL_FP64(FLT_MAX, d11); | 6507 CHECK_EQUAL_FP64(FLT_MAX, d11); |
6508 ASSERT_EQUAL_FP64(FLT_MIN, d12); | 6508 CHECK_EQUAL_FP64(FLT_MIN, d12); |
6509 | 6509 |
6510 // Check that the NaN payload is preserved according to ARM64 conversion | 6510 // Check that the NaN payload is preserved according to ARM64 conversion |
6511 // rules: | 6511 // rules: |
6512 // - The sign bit is preserved. | 6512 // - The sign bit is preserved. |
6513 // - The top bit of the mantissa is forced to 1 (making it a quiet NaN). | 6513 // - The top bit of the mantissa is forced to 1 (making it a quiet NaN). |
6514 // - The remaining mantissa bits are copied until they run out. | 6514 // - The remaining mantissa bits are copied until they run out. |
6515 // - The low-order bits that haven't already been assigned are set to 0. | 6515 // - The low-order bits that haven't already been assigned are set to 0. |
6516 ASSERT_EQUAL_FP64(rawbits_to_double(0x7ff82468a0000000), d13); | 6516 CHECK_EQUAL_FP64(rawbits_to_double(0x7ff82468a0000000), d13); |
6517 ASSERT_EQUAL_FP64(rawbits_to_double(0x7ff82468a0000000), d14); | 6517 CHECK_EQUAL_FP64(rawbits_to_double(0x7ff82468a0000000), d14); |
6518 | 6518 |
6519 TEARDOWN(); | 6519 TEARDOWN(); |
6520 } | 6520 } |
6521 | 6521 |
6522 | 6522 |
6523 TEST(fcvt_sd) { | 6523 TEST(fcvt_sd) { |
6524 INIT_V8(); | 6524 INIT_V8(); |
6525 // There are a huge number of corner-cases to check, so this test iterates | 6525 // There are a huge number of corner-cases to check, so this test iterates |
6526 // through a list. The list is then negated and checked again (since the sign | 6526 // through a list. The list is then negated and checked again (since the sign |
6527 // is irrelevant in ties-to-even rounding), so the list shouldn't include any | 6527 // is irrelevant in ties-to-even rounding), so the list shouldn't include any |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6607 {rawbits_to_double(0x7ff02468bfffffff), rawbits_to_float(0x7fc12345)}, | 6607 {rawbits_to_double(0x7ff02468bfffffff), rawbits_to_float(0x7fc12345)}, |
6608 {rawbits_to_double(0x7ff000001fffffff), rawbits_to_float(0x7fc00000)}, | 6608 {rawbits_to_double(0x7ff000001fffffff), rawbits_to_float(0x7fc00000)}, |
6609 }; | 6609 }; |
6610 int count = sizeof(test) / sizeof(test[0]); | 6610 int count = sizeof(test) / sizeof(test[0]); |
6611 | 6611 |
6612 for (int i = 0; i < count; i++) { | 6612 for (int i = 0; i < count; i++) { |
6613 double in = test[i].in; | 6613 double in = test[i].in; |
6614 float expected = test[i].expected; | 6614 float expected = test[i].expected; |
6615 | 6615 |
6616 // We only expect positive input. | 6616 // We only expect positive input. |
6617 ASSERT(std::signbit(in) == 0); | 6617 DCHECK(std::signbit(in) == 0); |
6618 ASSERT(std::signbit(expected) == 0); | 6618 DCHECK(std::signbit(expected) == 0); |
6619 | 6619 |
6620 SETUP(); | 6620 SETUP(); |
6621 START(); | 6621 START(); |
6622 | 6622 |
6623 __ Fmov(d10, in); | 6623 __ Fmov(d10, in); |
6624 __ Fcvt(s20, d10); | 6624 __ Fcvt(s20, d10); |
6625 | 6625 |
6626 __ Fmov(d11, -in); | 6626 __ Fmov(d11, -in); |
6627 __ Fcvt(s21, d11); | 6627 __ Fcvt(s21, d11); |
6628 | 6628 |
6629 END(); | 6629 END(); |
6630 RUN(); | 6630 RUN(); |
6631 ASSERT_EQUAL_FP32(expected, s20); | 6631 CHECK_EQUAL_FP32(expected, s20); |
6632 ASSERT_EQUAL_FP32(-expected, s21); | 6632 CHECK_EQUAL_FP32(-expected, s21); |
6633 TEARDOWN(); | 6633 TEARDOWN(); |
6634 } | 6634 } |
6635 } | 6635 } |
6636 | 6636 |
6637 | 6637 |
6638 TEST(fcvtas) { | 6638 TEST(fcvtas) { |
6639 INIT_V8(); | 6639 INIT_V8(); |
6640 SETUP(); | 6640 SETUP(); |
6641 | 6641 |
6642 START(); | 6642 START(); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6698 __ Fcvtas(x25, d25); | 6698 __ Fcvtas(x25, d25); |
6699 __ Fcvtas(x26, d26); | 6699 __ Fcvtas(x26, d26); |
6700 __ Fcvtas(x27, d27); | 6700 __ Fcvtas(x27, d27); |
6701 __ Fcvtas(x28, d28); | 6701 __ Fcvtas(x28, d28); |
6702 __ Fcvtas(x29, d29); | 6702 __ Fcvtas(x29, d29); |
6703 __ Fcvtas(x30, d30); | 6703 __ Fcvtas(x30, d30); |
6704 END(); | 6704 END(); |
6705 | 6705 |
6706 RUN(); | 6706 RUN(); |
6707 | 6707 |
6708 ASSERT_EQUAL_64(1, x0); | 6708 CHECK_EQUAL_64(1, x0); |
6709 ASSERT_EQUAL_64(1, x1); | 6709 CHECK_EQUAL_64(1, x1); |
6710 ASSERT_EQUAL_64(3, x2); | 6710 CHECK_EQUAL_64(3, x2); |
6711 ASSERT_EQUAL_64(0xfffffffd, x3); | 6711 CHECK_EQUAL_64(0xfffffffd, x3); |
6712 ASSERT_EQUAL_64(0x7fffffff, x4); | 6712 CHECK_EQUAL_64(0x7fffffff, x4); |
6713 ASSERT_EQUAL_64(0x80000000, x5); | 6713 CHECK_EQUAL_64(0x80000000, x5); |
6714 ASSERT_EQUAL_64(0x7fffff80, x6); | 6714 CHECK_EQUAL_64(0x7fffff80, x6); |
6715 ASSERT_EQUAL_64(0x80000080, x7); | 6715 CHECK_EQUAL_64(0x80000080, x7); |
6716 ASSERT_EQUAL_64(1, x8); | 6716 CHECK_EQUAL_64(1, x8); |
6717 ASSERT_EQUAL_64(1, x9); | 6717 CHECK_EQUAL_64(1, x9); |
6718 ASSERT_EQUAL_64(3, x10); | 6718 CHECK_EQUAL_64(3, x10); |
6719 ASSERT_EQUAL_64(0xfffffffd, x11); | 6719 CHECK_EQUAL_64(0xfffffffd, x11); |
6720 ASSERT_EQUAL_64(0x7fffffff, x12); | 6720 CHECK_EQUAL_64(0x7fffffff, x12); |
6721 ASSERT_EQUAL_64(0x80000000, x13); | 6721 CHECK_EQUAL_64(0x80000000, x13); |
6722 ASSERT_EQUAL_64(0x7ffffffe, x14); | 6722 CHECK_EQUAL_64(0x7ffffffe, x14); |
6723 ASSERT_EQUAL_64(0x80000001, x15); | 6723 CHECK_EQUAL_64(0x80000001, x15); |
6724 ASSERT_EQUAL_64(1, x17); | 6724 CHECK_EQUAL_64(1, x17); |
6725 ASSERT_EQUAL_64(3, x18); | 6725 CHECK_EQUAL_64(3, x18); |
6726 ASSERT_EQUAL_64(0xfffffffffffffffdUL, x19); | 6726 CHECK_EQUAL_64(0xfffffffffffffffdUL, x19); |
6727 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x20); | 6727 CHECK_EQUAL_64(0x7fffffffffffffffUL, x20); |
6728 ASSERT_EQUAL_64(0x8000000000000000UL, x21); | 6728 CHECK_EQUAL_64(0x8000000000000000UL, x21); |
6729 ASSERT_EQUAL_64(0x7fffff8000000000UL, x22); | 6729 CHECK_EQUAL_64(0x7fffff8000000000UL, x22); |
6730 ASSERT_EQUAL_64(0x8000008000000000UL, x23); | 6730 CHECK_EQUAL_64(0x8000008000000000UL, x23); |
6731 ASSERT_EQUAL_64(1, x24); | 6731 CHECK_EQUAL_64(1, x24); |
6732 ASSERT_EQUAL_64(3, x25); | 6732 CHECK_EQUAL_64(3, x25); |
6733 ASSERT_EQUAL_64(0xfffffffffffffffdUL, x26); | 6733 CHECK_EQUAL_64(0xfffffffffffffffdUL, x26); |
6734 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x27); | 6734 CHECK_EQUAL_64(0x7fffffffffffffffUL, x27); |
6735 ASSERT_EQUAL_64(0x8000000000000000UL, x28); | 6735 CHECK_EQUAL_64(0x8000000000000000UL, x28); |
6736 ASSERT_EQUAL_64(0x7ffffffffffffc00UL, x29); | 6736 CHECK_EQUAL_64(0x7ffffffffffffc00UL, x29); |
6737 ASSERT_EQUAL_64(0x8000000000000400UL, x30); | 6737 CHECK_EQUAL_64(0x8000000000000400UL, x30); |
6738 | 6738 |
6739 TEARDOWN(); | 6739 TEARDOWN(); |
6740 } | 6740 } |
6741 | 6741 |
6742 | 6742 |
6743 TEST(fcvtau) { | 6743 TEST(fcvtau) { |
6744 INIT_V8(); | 6744 INIT_V8(); |
6745 SETUP(); | 6745 SETUP(); |
6746 | 6746 |
6747 START(); | 6747 START(); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6800 __ Fcvtau(x25, d25); | 6800 __ Fcvtau(x25, d25); |
6801 __ Fcvtau(x26, d26); | 6801 __ Fcvtau(x26, d26); |
6802 __ Fcvtau(x27, d27); | 6802 __ Fcvtau(x27, d27); |
6803 __ Fcvtau(x28, d28); | 6803 __ Fcvtau(x28, d28); |
6804 __ Fcvtau(x29, d29); | 6804 __ Fcvtau(x29, d29); |
6805 __ Fcvtau(w30, s30); | 6805 __ Fcvtau(w30, s30); |
6806 END(); | 6806 END(); |
6807 | 6807 |
6808 RUN(); | 6808 RUN(); |
6809 | 6809 |
6810 ASSERT_EQUAL_64(1, x0); | 6810 CHECK_EQUAL_64(1, x0); |
6811 ASSERT_EQUAL_64(1, x1); | 6811 CHECK_EQUAL_64(1, x1); |
6812 ASSERT_EQUAL_64(3, x2); | 6812 CHECK_EQUAL_64(3, x2); |
6813 ASSERT_EQUAL_64(0, x3); | 6813 CHECK_EQUAL_64(0, x3); |
6814 ASSERT_EQUAL_64(0xffffffff, x4); | 6814 CHECK_EQUAL_64(0xffffffff, x4); |
6815 ASSERT_EQUAL_64(0, x5); | 6815 CHECK_EQUAL_64(0, x5); |
6816 ASSERT_EQUAL_64(0xffffff00, x6); | 6816 CHECK_EQUAL_64(0xffffff00, x6); |
6817 ASSERT_EQUAL_64(1, x8); | 6817 CHECK_EQUAL_64(1, x8); |
6818 ASSERT_EQUAL_64(1, x9); | 6818 CHECK_EQUAL_64(1, x9); |
6819 ASSERT_EQUAL_64(3, x10); | 6819 CHECK_EQUAL_64(3, x10); |
6820 ASSERT_EQUAL_64(0, x11); | 6820 CHECK_EQUAL_64(0, x11); |
6821 ASSERT_EQUAL_64(0xffffffff, x12); | 6821 CHECK_EQUAL_64(0xffffffff, x12); |
6822 ASSERT_EQUAL_64(0, x13); | 6822 CHECK_EQUAL_64(0, x13); |
6823 ASSERT_EQUAL_64(0xfffffffe, x14); | 6823 CHECK_EQUAL_64(0xfffffffe, x14); |
6824 ASSERT_EQUAL_64(1, x16); | 6824 CHECK_EQUAL_64(1, x16); |
6825 ASSERT_EQUAL_64(1, x17); | 6825 CHECK_EQUAL_64(1, x17); |
6826 ASSERT_EQUAL_64(3, x18); | 6826 CHECK_EQUAL_64(3, x18); |
6827 ASSERT_EQUAL_64(0, x19); | 6827 CHECK_EQUAL_64(0, x19); |
6828 ASSERT_EQUAL_64(0xffffffffffffffffUL, x20); | 6828 CHECK_EQUAL_64(0xffffffffffffffffUL, x20); |
6829 ASSERT_EQUAL_64(0, x21); | 6829 CHECK_EQUAL_64(0, x21); |
6830 ASSERT_EQUAL_64(0xffffff0000000000UL, x22); | 6830 CHECK_EQUAL_64(0xffffff0000000000UL, x22); |
6831 ASSERT_EQUAL_64(1, x24); | 6831 CHECK_EQUAL_64(1, x24); |
6832 ASSERT_EQUAL_64(3, x25); | 6832 CHECK_EQUAL_64(3, x25); |
6833 ASSERT_EQUAL_64(0, x26); | 6833 CHECK_EQUAL_64(0, x26); |
6834 ASSERT_EQUAL_64(0xffffffffffffffffUL, x27); | 6834 CHECK_EQUAL_64(0xffffffffffffffffUL, x27); |
6835 ASSERT_EQUAL_64(0, x28); | 6835 CHECK_EQUAL_64(0, x28); |
6836 ASSERT_EQUAL_64(0xfffffffffffff800UL, x29); | 6836 CHECK_EQUAL_64(0xfffffffffffff800UL, x29); |
6837 ASSERT_EQUAL_64(0xffffffff, x30); | 6837 CHECK_EQUAL_64(0xffffffff, x30); |
6838 | 6838 |
6839 TEARDOWN(); | 6839 TEARDOWN(); |
6840 } | 6840 } |
6841 | 6841 |
6842 | 6842 |
6843 TEST(fcvtms) { | 6843 TEST(fcvtms) { |
6844 INIT_V8(); | 6844 INIT_V8(); |
6845 SETUP(); | 6845 SETUP(); |
6846 | 6846 |
6847 START(); | 6847 START(); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6903 __ Fcvtms(x25, d25); | 6903 __ Fcvtms(x25, d25); |
6904 __ Fcvtms(x26, d26); | 6904 __ Fcvtms(x26, d26); |
6905 __ Fcvtms(x27, d27); | 6905 __ Fcvtms(x27, d27); |
6906 __ Fcvtms(x28, d28); | 6906 __ Fcvtms(x28, d28); |
6907 __ Fcvtms(x29, d29); | 6907 __ Fcvtms(x29, d29); |
6908 __ Fcvtms(x30, d30); | 6908 __ Fcvtms(x30, d30); |
6909 END(); | 6909 END(); |
6910 | 6910 |
6911 RUN(); | 6911 RUN(); |
6912 | 6912 |
6913 ASSERT_EQUAL_64(1, x0); | 6913 CHECK_EQUAL_64(1, x0); |
6914 ASSERT_EQUAL_64(1, x1); | 6914 CHECK_EQUAL_64(1, x1); |
6915 ASSERT_EQUAL_64(1, x2); | 6915 CHECK_EQUAL_64(1, x2); |
6916 ASSERT_EQUAL_64(0xfffffffe, x3); | 6916 CHECK_EQUAL_64(0xfffffffe, x3); |
6917 ASSERT_EQUAL_64(0x7fffffff, x4); | 6917 CHECK_EQUAL_64(0x7fffffff, x4); |
6918 ASSERT_EQUAL_64(0x80000000, x5); | 6918 CHECK_EQUAL_64(0x80000000, x5); |
6919 ASSERT_EQUAL_64(0x7fffff80, x6); | 6919 CHECK_EQUAL_64(0x7fffff80, x6); |
6920 ASSERT_EQUAL_64(0x80000080, x7); | 6920 CHECK_EQUAL_64(0x80000080, x7); |
6921 ASSERT_EQUAL_64(1, x8); | 6921 CHECK_EQUAL_64(1, x8); |
6922 ASSERT_EQUAL_64(1, x9); | 6922 CHECK_EQUAL_64(1, x9); |
6923 ASSERT_EQUAL_64(1, x10); | 6923 CHECK_EQUAL_64(1, x10); |
6924 ASSERT_EQUAL_64(0xfffffffe, x11); | 6924 CHECK_EQUAL_64(0xfffffffe, x11); |
6925 ASSERT_EQUAL_64(0x7fffffff, x12); | 6925 CHECK_EQUAL_64(0x7fffffff, x12); |
6926 ASSERT_EQUAL_64(0x80000000, x13); | 6926 CHECK_EQUAL_64(0x80000000, x13); |
6927 ASSERT_EQUAL_64(0x7ffffffe, x14); | 6927 CHECK_EQUAL_64(0x7ffffffe, x14); |
6928 ASSERT_EQUAL_64(0x80000001, x15); | 6928 CHECK_EQUAL_64(0x80000001, x15); |
6929 ASSERT_EQUAL_64(1, x17); | 6929 CHECK_EQUAL_64(1, x17); |
6930 ASSERT_EQUAL_64(1, x18); | 6930 CHECK_EQUAL_64(1, x18); |
6931 ASSERT_EQUAL_64(0xfffffffffffffffeUL, x19); | 6931 CHECK_EQUAL_64(0xfffffffffffffffeUL, x19); |
6932 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x20); | 6932 CHECK_EQUAL_64(0x7fffffffffffffffUL, x20); |
6933 ASSERT_EQUAL_64(0x8000000000000000UL, x21); | 6933 CHECK_EQUAL_64(0x8000000000000000UL, x21); |
6934 ASSERT_EQUAL_64(0x7fffff8000000000UL, x22); | 6934 CHECK_EQUAL_64(0x7fffff8000000000UL, x22); |
6935 ASSERT_EQUAL_64(0x8000008000000000UL, x23); | 6935 CHECK_EQUAL_64(0x8000008000000000UL, x23); |
6936 ASSERT_EQUAL_64(1, x24); | 6936 CHECK_EQUAL_64(1, x24); |
6937 ASSERT_EQUAL_64(1, x25); | 6937 CHECK_EQUAL_64(1, x25); |
6938 ASSERT_EQUAL_64(0xfffffffffffffffeUL, x26); | 6938 CHECK_EQUAL_64(0xfffffffffffffffeUL, x26); |
6939 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x27); | 6939 CHECK_EQUAL_64(0x7fffffffffffffffUL, x27); |
6940 ASSERT_EQUAL_64(0x8000000000000000UL, x28); | 6940 CHECK_EQUAL_64(0x8000000000000000UL, x28); |
6941 ASSERT_EQUAL_64(0x7ffffffffffffc00UL, x29); | 6941 CHECK_EQUAL_64(0x7ffffffffffffc00UL, x29); |
6942 ASSERT_EQUAL_64(0x8000000000000400UL, x30); | 6942 CHECK_EQUAL_64(0x8000000000000400UL, x30); |
6943 | 6943 |
6944 TEARDOWN(); | 6944 TEARDOWN(); |
6945 } | 6945 } |
6946 | 6946 |
6947 | 6947 |
6948 TEST(fcvtmu) { | 6948 TEST(fcvtmu) { |
6949 INIT_V8(); | 6949 INIT_V8(); |
6950 SETUP(); | 6950 SETUP(); |
6951 | 6951 |
6952 START(); | 6952 START(); |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7007 __ Fcvtmu(x25, d25); | 7007 __ Fcvtmu(x25, d25); |
7008 __ Fcvtmu(x26, d26); | 7008 __ Fcvtmu(x26, d26); |
7009 __ Fcvtmu(x27, d27); | 7009 __ Fcvtmu(x27, d27); |
7010 __ Fcvtmu(x28, d28); | 7010 __ Fcvtmu(x28, d28); |
7011 __ Fcvtmu(x29, d29); | 7011 __ Fcvtmu(x29, d29); |
7012 __ Fcvtmu(x30, d30); | 7012 __ Fcvtmu(x30, d30); |
7013 END(); | 7013 END(); |
7014 | 7014 |
7015 RUN(); | 7015 RUN(); |
7016 | 7016 |
7017 ASSERT_EQUAL_64(1, x0); | 7017 CHECK_EQUAL_64(1, x0); |
7018 ASSERT_EQUAL_64(1, x1); | 7018 CHECK_EQUAL_64(1, x1); |
7019 ASSERT_EQUAL_64(1, x2); | 7019 CHECK_EQUAL_64(1, x2); |
7020 ASSERT_EQUAL_64(0, x3); | 7020 CHECK_EQUAL_64(0, x3); |
7021 ASSERT_EQUAL_64(0xffffffff, x4); | 7021 CHECK_EQUAL_64(0xffffffff, x4); |
7022 ASSERT_EQUAL_64(0, x5); | 7022 CHECK_EQUAL_64(0, x5); |
7023 ASSERT_EQUAL_64(0x7fffff80, x6); | 7023 CHECK_EQUAL_64(0x7fffff80, x6); |
7024 ASSERT_EQUAL_64(0, x7); | 7024 CHECK_EQUAL_64(0, x7); |
7025 ASSERT_EQUAL_64(1, x8); | 7025 CHECK_EQUAL_64(1, x8); |
7026 ASSERT_EQUAL_64(1, x9); | 7026 CHECK_EQUAL_64(1, x9); |
7027 ASSERT_EQUAL_64(1, x10); | 7027 CHECK_EQUAL_64(1, x10); |
7028 ASSERT_EQUAL_64(0, x11); | 7028 CHECK_EQUAL_64(0, x11); |
7029 ASSERT_EQUAL_64(0xffffffff, x12); | 7029 CHECK_EQUAL_64(0xffffffff, x12); |
7030 ASSERT_EQUAL_64(0, x13); | 7030 CHECK_EQUAL_64(0, x13); |
7031 ASSERT_EQUAL_64(0x7ffffffe, x14); | 7031 CHECK_EQUAL_64(0x7ffffffe, x14); |
7032 ASSERT_EQUAL_64(1, x17); | 7032 CHECK_EQUAL_64(1, x17); |
7033 ASSERT_EQUAL_64(1, x18); | 7033 CHECK_EQUAL_64(1, x18); |
7034 ASSERT_EQUAL_64(0x0UL, x19); | 7034 CHECK_EQUAL_64(0x0UL, x19); |
7035 ASSERT_EQUAL_64(0xffffffffffffffffUL, x20); | 7035 CHECK_EQUAL_64(0xffffffffffffffffUL, x20); |
7036 ASSERT_EQUAL_64(0x0UL, x21); | 7036 CHECK_EQUAL_64(0x0UL, x21); |
7037 ASSERT_EQUAL_64(0x7fffff8000000000UL, x22); | 7037 CHECK_EQUAL_64(0x7fffff8000000000UL, x22); |
7038 ASSERT_EQUAL_64(0x0UL, x23); | 7038 CHECK_EQUAL_64(0x0UL, x23); |
7039 ASSERT_EQUAL_64(1, x24); | 7039 CHECK_EQUAL_64(1, x24); |
7040 ASSERT_EQUAL_64(1, x25); | 7040 CHECK_EQUAL_64(1, x25); |
7041 ASSERT_EQUAL_64(0x0UL, x26); | 7041 CHECK_EQUAL_64(0x0UL, x26); |
7042 ASSERT_EQUAL_64(0xffffffffffffffffUL, x27); | 7042 CHECK_EQUAL_64(0xffffffffffffffffUL, x27); |
7043 ASSERT_EQUAL_64(0x0UL, x28); | 7043 CHECK_EQUAL_64(0x0UL, x28); |
7044 ASSERT_EQUAL_64(0x7ffffffffffffc00UL, x29); | 7044 CHECK_EQUAL_64(0x7ffffffffffffc00UL, x29); |
7045 ASSERT_EQUAL_64(0x0UL, x30); | 7045 CHECK_EQUAL_64(0x0UL, x30); |
7046 | 7046 |
7047 TEARDOWN(); | 7047 TEARDOWN(); |
7048 } | 7048 } |
7049 | 7049 |
7050 | 7050 |
7051 TEST(fcvtns) { | 7051 TEST(fcvtns) { |
7052 INIT_V8(); | 7052 INIT_V8(); |
7053 SETUP(); | 7053 SETUP(); |
7054 | 7054 |
7055 START(); | 7055 START(); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7111 __ Fcvtns(x25, d25); | 7111 __ Fcvtns(x25, d25); |
7112 __ Fcvtns(x26, d26); | 7112 __ Fcvtns(x26, d26); |
7113 __ Fcvtns(x27, d27); | 7113 __ Fcvtns(x27, d27); |
7114 // __ Fcvtns(x28, d28); | 7114 // __ Fcvtns(x28, d28); |
7115 __ Fcvtns(x29, d29); | 7115 __ Fcvtns(x29, d29); |
7116 __ Fcvtns(x30, d30); | 7116 __ Fcvtns(x30, d30); |
7117 END(); | 7117 END(); |
7118 | 7118 |
7119 RUN(); | 7119 RUN(); |
7120 | 7120 |
7121 ASSERT_EQUAL_64(1, x0); | 7121 CHECK_EQUAL_64(1, x0); |
7122 ASSERT_EQUAL_64(1, x1); | 7122 CHECK_EQUAL_64(1, x1); |
7123 ASSERT_EQUAL_64(2, x2); | 7123 CHECK_EQUAL_64(2, x2); |
7124 ASSERT_EQUAL_64(0xfffffffe, x3); | 7124 CHECK_EQUAL_64(0xfffffffe, x3); |
7125 ASSERT_EQUAL_64(0x7fffffff, x4); | 7125 CHECK_EQUAL_64(0x7fffffff, x4); |
7126 ASSERT_EQUAL_64(0x80000000, x5); | 7126 CHECK_EQUAL_64(0x80000000, x5); |
7127 ASSERT_EQUAL_64(0x7fffff80, x6); | 7127 CHECK_EQUAL_64(0x7fffff80, x6); |
7128 ASSERT_EQUAL_64(0x80000080, x7); | 7128 CHECK_EQUAL_64(0x80000080, x7); |
7129 ASSERT_EQUAL_64(1, x8); | 7129 CHECK_EQUAL_64(1, x8); |
7130 ASSERT_EQUAL_64(1, x9); | 7130 CHECK_EQUAL_64(1, x9); |
7131 ASSERT_EQUAL_64(2, x10); | 7131 CHECK_EQUAL_64(2, x10); |
7132 ASSERT_EQUAL_64(0xfffffffe, x11); | 7132 CHECK_EQUAL_64(0xfffffffe, x11); |
7133 ASSERT_EQUAL_64(0x7fffffff, x12); | 7133 CHECK_EQUAL_64(0x7fffffff, x12); |
7134 ASSERT_EQUAL_64(0x80000000, x13); | 7134 CHECK_EQUAL_64(0x80000000, x13); |
7135 ASSERT_EQUAL_64(0x7ffffffe, x14); | 7135 CHECK_EQUAL_64(0x7ffffffe, x14); |
7136 ASSERT_EQUAL_64(0x80000001, x15); | 7136 CHECK_EQUAL_64(0x80000001, x15); |
7137 ASSERT_EQUAL_64(1, x17); | 7137 CHECK_EQUAL_64(1, x17); |
7138 ASSERT_EQUAL_64(2, x18); | 7138 CHECK_EQUAL_64(2, x18); |
7139 ASSERT_EQUAL_64(0xfffffffffffffffeUL, x19); | 7139 CHECK_EQUAL_64(0xfffffffffffffffeUL, x19); |
7140 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x20); | 7140 CHECK_EQUAL_64(0x7fffffffffffffffUL, x20); |
7141 ASSERT_EQUAL_64(0x8000000000000000UL, x21); | 7141 CHECK_EQUAL_64(0x8000000000000000UL, x21); |
7142 ASSERT_EQUAL_64(0x7fffff8000000000UL, x22); | 7142 CHECK_EQUAL_64(0x7fffff8000000000UL, x22); |
7143 ASSERT_EQUAL_64(0x8000008000000000UL, x23); | 7143 CHECK_EQUAL_64(0x8000008000000000UL, x23); |
7144 ASSERT_EQUAL_64(1, x24); | 7144 CHECK_EQUAL_64(1, x24); |
7145 ASSERT_EQUAL_64(2, x25); | 7145 CHECK_EQUAL_64(2, x25); |
7146 ASSERT_EQUAL_64(0xfffffffffffffffeUL, x26); | 7146 CHECK_EQUAL_64(0xfffffffffffffffeUL, x26); |
7147 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x27); | 7147 CHECK_EQUAL_64(0x7fffffffffffffffUL, x27); |
7148 // ASSERT_EQUAL_64(0x8000000000000000UL, x28); | 7148 // CHECK_EQUAL_64(0x8000000000000000UL, x28); |
7149 ASSERT_EQUAL_64(0x7ffffffffffffc00UL, x29); | 7149 CHECK_EQUAL_64(0x7ffffffffffffc00UL, x29); |
7150 ASSERT_EQUAL_64(0x8000000000000400UL, x30); | 7150 CHECK_EQUAL_64(0x8000000000000400UL, x30); |
7151 | 7151 |
7152 TEARDOWN(); | 7152 TEARDOWN(); |
7153 } | 7153 } |
7154 | 7154 |
7155 | 7155 |
7156 TEST(fcvtnu) { | 7156 TEST(fcvtnu) { |
7157 INIT_V8(); | 7157 INIT_V8(); |
7158 SETUP(); | 7158 SETUP(); |
7159 | 7159 |
7160 START(); | 7160 START(); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7213 __ Fcvtnu(x25, d25); | 7213 __ Fcvtnu(x25, d25); |
7214 __ Fcvtnu(x26, d26); | 7214 __ Fcvtnu(x26, d26); |
7215 __ Fcvtnu(x27, d27); | 7215 __ Fcvtnu(x27, d27); |
7216 // __ Fcvtnu(x28, d28); | 7216 // __ Fcvtnu(x28, d28); |
7217 __ Fcvtnu(x29, d29); | 7217 __ Fcvtnu(x29, d29); |
7218 __ Fcvtnu(w30, s30); | 7218 __ Fcvtnu(w30, s30); |
7219 END(); | 7219 END(); |
7220 | 7220 |
7221 RUN(); | 7221 RUN(); |
7222 | 7222 |
7223 ASSERT_EQUAL_64(1, x0); | 7223 CHECK_EQUAL_64(1, x0); |
7224 ASSERT_EQUAL_64(1, x1); | 7224 CHECK_EQUAL_64(1, x1); |
7225 ASSERT_EQUAL_64(2, x2); | 7225 CHECK_EQUAL_64(2, x2); |
7226 ASSERT_EQUAL_64(0, x3); | 7226 CHECK_EQUAL_64(0, x3); |
7227 ASSERT_EQUAL_64(0xffffffff, x4); | 7227 CHECK_EQUAL_64(0xffffffff, x4); |
7228 ASSERT_EQUAL_64(0, x5); | 7228 CHECK_EQUAL_64(0, x5); |
7229 ASSERT_EQUAL_64(0xffffff00, x6); | 7229 CHECK_EQUAL_64(0xffffff00, x6); |
7230 ASSERT_EQUAL_64(1, x8); | 7230 CHECK_EQUAL_64(1, x8); |
7231 ASSERT_EQUAL_64(1, x9); | 7231 CHECK_EQUAL_64(1, x9); |
7232 ASSERT_EQUAL_64(2, x10); | 7232 CHECK_EQUAL_64(2, x10); |
7233 ASSERT_EQUAL_64(0, x11); | 7233 CHECK_EQUAL_64(0, x11); |
7234 ASSERT_EQUAL_64(0xffffffff, x12); | 7234 CHECK_EQUAL_64(0xffffffff, x12); |
7235 ASSERT_EQUAL_64(0, x13); | 7235 CHECK_EQUAL_64(0, x13); |
7236 ASSERT_EQUAL_64(0xfffffffe, x14); | 7236 CHECK_EQUAL_64(0xfffffffe, x14); |
7237 ASSERT_EQUAL_64(1, x16); | 7237 CHECK_EQUAL_64(1, x16); |
7238 ASSERT_EQUAL_64(1, x17); | 7238 CHECK_EQUAL_64(1, x17); |
7239 ASSERT_EQUAL_64(2, x18); | 7239 CHECK_EQUAL_64(2, x18); |
7240 ASSERT_EQUAL_64(0, x19); | 7240 CHECK_EQUAL_64(0, x19); |
7241 ASSERT_EQUAL_64(0xffffffffffffffffUL, x20); | 7241 CHECK_EQUAL_64(0xffffffffffffffffUL, x20); |
7242 ASSERT_EQUAL_64(0, x21); | 7242 CHECK_EQUAL_64(0, x21); |
7243 ASSERT_EQUAL_64(0xffffff0000000000UL, x22); | 7243 CHECK_EQUAL_64(0xffffff0000000000UL, x22); |
7244 ASSERT_EQUAL_64(1, x24); | 7244 CHECK_EQUAL_64(1, x24); |
7245 ASSERT_EQUAL_64(2, x25); | 7245 CHECK_EQUAL_64(2, x25); |
7246 ASSERT_EQUAL_64(0, x26); | 7246 CHECK_EQUAL_64(0, x26); |
7247 ASSERT_EQUAL_64(0xffffffffffffffffUL, x27); | 7247 CHECK_EQUAL_64(0xffffffffffffffffUL, x27); |
7248 // ASSERT_EQUAL_64(0, x28); | 7248 // CHECK_EQUAL_64(0, x28); |
7249 ASSERT_EQUAL_64(0xfffffffffffff800UL, x29); | 7249 CHECK_EQUAL_64(0xfffffffffffff800UL, x29); |
7250 ASSERT_EQUAL_64(0xffffffff, x30); | 7250 CHECK_EQUAL_64(0xffffffff, x30); |
7251 | 7251 |
7252 TEARDOWN(); | 7252 TEARDOWN(); |
7253 } | 7253 } |
7254 | 7254 |
7255 | 7255 |
7256 TEST(fcvtzs) { | 7256 TEST(fcvtzs) { |
7257 INIT_V8(); | 7257 INIT_V8(); |
7258 SETUP(); | 7258 SETUP(); |
7259 | 7259 |
7260 START(); | 7260 START(); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7316 __ Fcvtzs(x25, d25); | 7316 __ Fcvtzs(x25, d25); |
7317 __ Fcvtzs(x26, d26); | 7317 __ Fcvtzs(x26, d26); |
7318 __ Fcvtzs(x27, d27); | 7318 __ Fcvtzs(x27, d27); |
7319 __ Fcvtzs(x28, d28); | 7319 __ Fcvtzs(x28, d28); |
7320 __ Fcvtzs(x29, d29); | 7320 __ Fcvtzs(x29, d29); |
7321 __ Fcvtzs(x30, d30); | 7321 __ Fcvtzs(x30, d30); |
7322 END(); | 7322 END(); |
7323 | 7323 |
7324 RUN(); | 7324 RUN(); |
7325 | 7325 |
7326 ASSERT_EQUAL_64(1, x0); | 7326 CHECK_EQUAL_64(1, x0); |
7327 ASSERT_EQUAL_64(1, x1); | 7327 CHECK_EQUAL_64(1, x1); |
7328 ASSERT_EQUAL_64(1, x2); | 7328 CHECK_EQUAL_64(1, x2); |
7329 ASSERT_EQUAL_64(0xffffffff, x3); | 7329 CHECK_EQUAL_64(0xffffffff, x3); |
7330 ASSERT_EQUAL_64(0x7fffffff, x4); | 7330 CHECK_EQUAL_64(0x7fffffff, x4); |
7331 ASSERT_EQUAL_64(0x80000000, x5); | 7331 CHECK_EQUAL_64(0x80000000, x5); |
7332 ASSERT_EQUAL_64(0x7fffff80, x6); | 7332 CHECK_EQUAL_64(0x7fffff80, x6); |
7333 ASSERT_EQUAL_64(0x80000080, x7); | 7333 CHECK_EQUAL_64(0x80000080, x7); |
7334 ASSERT_EQUAL_64(1, x8); | 7334 CHECK_EQUAL_64(1, x8); |
7335 ASSERT_EQUAL_64(1, x9); | 7335 CHECK_EQUAL_64(1, x9); |
7336 ASSERT_EQUAL_64(1, x10); | 7336 CHECK_EQUAL_64(1, x10); |
7337 ASSERT_EQUAL_64(0xffffffff, x11); | 7337 CHECK_EQUAL_64(0xffffffff, x11); |
7338 ASSERT_EQUAL_64(0x7fffffff, x12); | 7338 CHECK_EQUAL_64(0x7fffffff, x12); |
7339 ASSERT_EQUAL_64(0x80000000, x13); | 7339 CHECK_EQUAL_64(0x80000000, x13); |
7340 ASSERT_EQUAL_64(0x7ffffffe, x14); | 7340 CHECK_EQUAL_64(0x7ffffffe, x14); |
7341 ASSERT_EQUAL_64(0x80000001, x15); | 7341 CHECK_EQUAL_64(0x80000001, x15); |
7342 ASSERT_EQUAL_64(1, x17); | 7342 CHECK_EQUAL_64(1, x17); |
7343 ASSERT_EQUAL_64(1, x18); | 7343 CHECK_EQUAL_64(1, x18); |
7344 ASSERT_EQUAL_64(0xffffffffffffffffUL, x19); | 7344 CHECK_EQUAL_64(0xffffffffffffffffUL, x19); |
7345 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x20); | 7345 CHECK_EQUAL_64(0x7fffffffffffffffUL, x20); |
7346 ASSERT_EQUAL_64(0x8000000000000000UL, x21); | 7346 CHECK_EQUAL_64(0x8000000000000000UL, x21); |
7347 ASSERT_EQUAL_64(0x7fffff8000000000UL, x22); | 7347 CHECK_EQUAL_64(0x7fffff8000000000UL, x22); |
7348 ASSERT_EQUAL_64(0x8000008000000000UL, x23); | 7348 CHECK_EQUAL_64(0x8000008000000000UL, x23); |
7349 ASSERT_EQUAL_64(1, x24); | 7349 CHECK_EQUAL_64(1, x24); |
7350 ASSERT_EQUAL_64(1, x25); | 7350 CHECK_EQUAL_64(1, x25); |
7351 ASSERT_EQUAL_64(0xffffffffffffffffUL, x26); | 7351 CHECK_EQUAL_64(0xffffffffffffffffUL, x26); |
7352 ASSERT_EQUAL_64(0x7fffffffffffffffUL, x27); | 7352 CHECK_EQUAL_64(0x7fffffffffffffffUL, x27); |
7353 ASSERT_EQUAL_64(0x8000000000000000UL, x28); | 7353 CHECK_EQUAL_64(0x8000000000000000UL, x28); |
7354 ASSERT_EQUAL_64(0x7ffffffffffffc00UL, x29); | 7354 CHECK_EQUAL_64(0x7ffffffffffffc00UL, x29); |
7355 ASSERT_EQUAL_64(0x8000000000000400UL, x30); | 7355 CHECK_EQUAL_64(0x8000000000000400UL, x30); |
7356 | 7356 |
7357 TEARDOWN(); | 7357 TEARDOWN(); |
7358 } | 7358 } |
7359 | 7359 |
7360 | 7360 |
7361 TEST(fcvtzu) { | 7361 TEST(fcvtzu) { |
7362 INIT_V8(); | 7362 INIT_V8(); |
7363 SETUP(); | 7363 SETUP(); |
7364 | 7364 |
7365 START(); | 7365 START(); |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7420 __ Fcvtzu(x25, d25); | 7420 __ Fcvtzu(x25, d25); |
7421 __ Fcvtzu(x26, d26); | 7421 __ Fcvtzu(x26, d26); |
7422 __ Fcvtzu(x27, d27); | 7422 __ Fcvtzu(x27, d27); |
7423 __ Fcvtzu(x28, d28); | 7423 __ Fcvtzu(x28, d28); |
7424 __ Fcvtzu(x29, d29); | 7424 __ Fcvtzu(x29, d29); |
7425 __ Fcvtzu(x30, d30); | 7425 __ Fcvtzu(x30, d30); |
7426 END(); | 7426 END(); |
7427 | 7427 |
7428 RUN(); | 7428 RUN(); |
7429 | 7429 |
7430 ASSERT_EQUAL_64(1, x0); | 7430 CHECK_EQUAL_64(1, x0); |
7431 ASSERT_EQUAL_64(1, x1); | 7431 CHECK_EQUAL_64(1, x1); |
7432 ASSERT_EQUAL_64(1, x2); | 7432 CHECK_EQUAL_64(1, x2); |
7433 ASSERT_EQUAL_64(0, x3); | 7433 CHECK_EQUAL_64(0, x3); |
7434 ASSERT_EQUAL_64(0xffffffff, x4); | 7434 CHECK_EQUAL_64(0xffffffff, x4); |
7435 ASSERT_EQUAL_64(0, x5); | 7435 CHECK_EQUAL_64(0, x5); |
7436 ASSERT_EQUAL_64(0x7fffff80, x6); | 7436 CHECK_EQUAL_64(0x7fffff80, x6); |
7437 ASSERT_EQUAL_64(0, x7); | 7437 CHECK_EQUAL_64(0, x7); |
7438 ASSERT_EQUAL_64(1, x8); | 7438 CHECK_EQUAL_64(1, x8); |
7439 ASSERT_EQUAL_64(1, x9); | 7439 CHECK_EQUAL_64(1, x9); |
7440 ASSERT_EQUAL_64(1, x10); | 7440 CHECK_EQUAL_64(1, x10); |
7441 ASSERT_EQUAL_64(0, x11); | 7441 CHECK_EQUAL_64(0, x11); |
7442 ASSERT_EQUAL_64(0xffffffff, x12); | 7442 CHECK_EQUAL_64(0xffffffff, x12); |
7443 ASSERT_EQUAL_64(0, x13); | 7443 CHECK_EQUAL_64(0, x13); |
7444 ASSERT_EQUAL_64(0x7ffffffe, x14); | 7444 CHECK_EQUAL_64(0x7ffffffe, x14); |
7445 ASSERT_EQUAL_64(1, x17); | 7445 CHECK_EQUAL_64(1, x17); |
7446 ASSERT_EQUAL_64(1, x18); | 7446 CHECK_EQUAL_64(1, x18); |
7447 ASSERT_EQUAL_64(0x0UL, x19); | 7447 CHECK_EQUAL_64(0x0UL, x19); |
7448 ASSERT_EQUAL_64(0xffffffffffffffffUL, x20); | 7448 CHECK_EQUAL_64(0xffffffffffffffffUL, x20); |
7449 ASSERT_EQUAL_64(0x0UL, x21); | 7449 CHECK_EQUAL_64(0x0UL, x21); |
7450 ASSERT_EQUAL_64(0x7fffff8000000000UL, x22); | 7450 CHECK_EQUAL_64(0x7fffff8000000000UL, x22); |
7451 ASSERT_EQUAL_64(0x0UL, x23); | 7451 CHECK_EQUAL_64(0x0UL, x23); |
7452 ASSERT_EQUAL_64(1, x24); | 7452 CHECK_EQUAL_64(1, x24); |
7453 ASSERT_EQUAL_64(1, x25); | 7453 CHECK_EQUAL_64(1, x25); |
7454 ASSERT_EQUAL_64(0x0UL, x26); | 7454 CHECK_EQUAL_64(0x0UL, x26); |
7455 ASSERT_EQUAL_64(0xffffffffffffffffUL, x27); | 7455 CHECK_EQUAL_64(0xffffffffffffffffUL, x27); |
7456 ASSERT_EQUAL_64(0x0UL, x28); | 7456 CHECK_EQUAL_64(0x0UL, x28); |
7457 ASSERT_EQUAL_64(0x7ffffffffffffc00UL, x29); | 7457 CHECK_EQUAL_64(0x7ffffffffffffc00UL, x29); |
7458 ASSERT_EQUAL_64(0x0UL, x30); | 7458 CHECK_EQUAL_64(0x0UL, x30); |
7459 | 7459 |
7460 TEARDOWN(); | 7460 TEARDOWN(); |
7461 } | 7461 } |
7462 | 7462 |
7463 | 7463 |
7464 // Test that scvtf and ucvtf can convert the 64-bit input into the expected | 7464 // Test that scvtf and ucvtf can convert the 64-bit input into the expected |
7465 // value. All possible values of 'fbits' are tested. The expected value is | 7465 // value. All possible values of 'fbits' are tested. The expected value is |
7466 // modified accordingly in each case. | 7466 // modified accordingly in each case. |
7467 // | 7467 // |
7468 // The expected value is specified as the bit encoding of the expected double | 7468 // The expected value is specified as the bit encoding of the expected double |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7536 END(); | 7536 END(); |
7537 RUN(); | 7537 RUN(); |
7538 | 7538 |
7539 // Check the results. | 7539 // Check the results. |
7540 double expected_scvtf_base = rawbits_to_double(expected_scvtf_bits); | 7540 double expected_scvtf_base = rawbits_to_double(expected_scvtf_bits); |
7541 double expected_ucvtf_base = rawbits_to_double(expected_ucvtf_bits); | 7541 double expected_ucvtf_base = rawbits_to_double(expected_ucvtf_bits); |
7542 | 7542 |
7543 for (int fbits = 0; fbits <= 32; fbits++) { | 7543 for (int fbits = 0; fbits <= 32; fbits++) { |
7544 double expected_scvtf = expected_scvtf_base / pow(2.0, fbits); | 7544 double expected_scvtf = expected_scvtf_base / pow(2.0, fbits); |
7545 double expected_ucvtf = expected_ucvtf_base / pow(2.0, fbits); | 7545 double expected_ucvtf = expected_ucvtf_base / pow(2.0, fbits); |
7546 ASSERT_EQUAL_FP64(expected_scvtf, results_scvtf_x[fbits]); | 7546 CHECK_EQUAL_FP64(expected_scvtf, results_scvtf_x[fbits]); |
7547 ASSERT_EQUAL_FP64(expected_ucvtf, results_ucvtf_x[fbits]); | 7547 CHECK_EQUAL_FP64(expected_ucvtf, results_ucvtf_x[fbits]); |
7548 if (cvtf_s32) ASSERT_EQUAL_FP64(expected_scvtf, results_scvtf_w[fbits]); | 7548 if (cvtf_s32) CHECK_EQUAL_FP64(expected_scvtf, results_scvtf_w[fbits]); |
7549 if (cvtf_u32) ASSERT_EQUAL_FP64(expected_ucvtf, results_ucvtf_w[fbits]); | 7549 if (cvtf_u32) CHECK_EQUAL_FP64(expected_ucvtf, results_ucvtf_w[fbits]); |
7550 } | 7550 } |
7551 for (int fbits = 33; fbits <= 64; fbits++) { | 7551 for (int fbits = 33; fbits <= 64; fbits++) { |
7552 double expected_scvtf = expected_scvtf_base / pow(2.0, fbits); | 7552 double expected_scvtf = expected_scvtf_base / pow(2.0, fbits); |
7553 double expected_ucvtf = expected_ucvtf_base / pow(2.0, fbits); | 7553 double expected_ucvtf = expected_ucvtf_base / pow(2.0, fbits); |
7554 ASSERT_EQUAL_FP64(expected_scvtf, results_scvtf_x[fbits]); | 7554 CHECK_EQUAL_FP64(expected_scvtf, results_scvtf_x[fbits]); |
7555 ASSERT_EQUAL_FP64(expected_ucvtf, results_ucvtf_x[fbits]); | 7555 CHECK_EQUAL_FP64(expected_ucvtf, results_ucvtf_x[fbits]); |
7556 } | 7556 } |
7557 | 7557 |
7558 TEARDOWN(); | 7558 TEARDOWN(); |
7559 } | 7559 } |
7560 | 7560 |
7561 | 7561 |
7562 TEST(scvtf_ucvtf_double) { | 7562 TEST(scvtf_ucvtf_double) { |
7563 INIT_V8(); | 7563 INIT_V8(); |
7564 // Simple conversions of positive numbers which require no rounding; the | 7564 // Simple conversions of positive numbers which require no rounding; the |
7565 // results should not depened on the rounding mode, and ucvtf and scvtf should | 7565 // results should not depened on the rounding mode, and ucvtf and scvtf should |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7691 END(); | 7691 END(); |
7692 RUN(); | 7692 RUN(); |
7693 | 7693 |
7694 // Check the results. | 7694 // Check the results. |
7695 float expected_scvtf_base = rawbits_to_float(expected_scvtf_bits); | 7695 float expected_scvtf_base = rawbits_to_float(expected_scvtf_bits); |
7696 float expected_ucvtf_base = rawbits_to_float(expected_ucvtf_bits); | 7696 float expected_ucvtf_base = rawbits_to_float(expected_ucvtf_bits); |
7697 | 7697 |
7698 for (int fbits = 0; fbits <= 32; fbits++) { | 7698 for (int fbits = 0; fbits <= 32; fbits++) { |
7699 float expected_scvtf = expected_scvtf_base / powf(2, fbits); | 7699 float expected_scvtf = expected_scvtf_base / powf(2, fbits); |
7700 float expected_ucvtf = expected_ucvtf_base / powf(2, fbits); | 7700 float expected_ucvtf = expected_ucvtf_base / powf(2, fbits); |
7701 ASSERT_EQUAL_FP32(expected_scvtf, results_scvtf_x[fbits]); | 7701 CHECK_EQUAL_FP32(expected_scvtf, results_scvtf_x[fbits]); |
7702 ASSERT_EQUAL_FP32(expected_ucvtf, results_ucvtf_x[fbits]); | 7702 CHECK_EQUAL_FP32(expected_ucvtf, results_ucvtf_x[fbits]); |
7703 if (cvtf_s32) ASSERT_EQUAL_FP32(expected_scvtf, results_scvtf_w[fbits]); | 7703 if (cvtf_s32) CHECK_EQUAL_FP32(expected_scvtf, results_scvtf_w[fbits]); |
7704 if (cvtf_u32) ASSERT_EQUAL_FP32(expected_ucvtf, results_ucvtf_w[fbits]); | 7704 if (cvtf_u32) CHECK_EQUAL_FP32(expected_ucvtf, results_ucvtf_w[fbits]); |
7705 break; | 7705 break; |
7706 } | 7706 } |
7707 for (int fbits = 33; fbits <= 64; fbits++) { | 7707 for (int fbits = 33; fbits <= 64; fbits++) { |
7708 break; | 7708 break; |
7709 float expected_scvtf = expected_scvtf_base / powf(2, fbits); | 7709 float expected_scvtf = expected_scvtf_base / powf(2, fbits); |
7710 float expected_ucvtf = expected_ucvtf_base / powf(2, fbits); | 7710 float expected_ucvtf = expected_ucvtf_base / powf(2, fbits); |
7711 ASSERT_EQUAL_FP32(expected_scvtf, results_scvtf_x[fbits]); | 7711 CHECK_EQUAL_FP32(expected_scvtf, results_scvtf_x[fbits]); |
7712 ASSERT_EQUAL_FP32(expected_ucvtf, results_ucvtf_x[fbits]); | 7712 CHECK_EQUAL_FP32(expected_ucvtf, results_ucvtf_x[fbits]); |
7713 } | 7713 } |
7714 | 7714 |
7715 TEARDOWN(); | 7715 TEARDOWN(); |
7716 } | 7716 } |
7717 | 7717 |
7718 | 7718 |
7719 TEST(scvtf_ucvtf_float) { | 7719 TEST(scvtf_ucvtf_float) { |
7720 INIT_V8(); | 7720 INIT_V8(); |
7721 // Simple conversions of positive numbers which require no rounding; the | 7721 // Simple conversions of positive numbers which require no rounding; the |
7722 // results should not depened on the rounding mode, and ucvtf and scvtf should | 7722 // results should not depened on the rounding mode, and ucvtf and scvtf should |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7806 __ Adds(w0, w2, w2); | 7806 __ Adds(w0, w2, w2); |
7807 __ Mrs(x5, NZCV); | 7807 __ Mrs(x5, NZCV); |
7808 | 7808 |
7809 // Read the default FPCR. | 7809 // Read the default FPCR. |
7810 __ Mrs(x6, FPCR); | 7810 __ Mrs(x6, FPCR); |
7811 END(); | 7811 END(); |
7812 | 7812 |
7813 RUN(); | 7813 RUN(); |
7814 | 7814 |
7815 // NZCV | 7815 // NZCV |
7816 ASSERT_EQUAL_32(ZCFlag, w3); | 7816 CHECK_EQUAL_32(ZCFlag, w3); |
7817 ASSERT_EQUAL_32(NFlag, w4); | 7817 CHECK_EQUAL_32(NFlag, w4); |
7818 ASSERT_EQUAL_32(ZCVFlag, w5); | 7818 CHECK_EQUAL_32(ZCVFlag, w5); |
7819 | 7819 |
7820 // FPCR | 7820 // FPCR |
7821 // The default FPCR on Linux-based platforms is 0. | 7821 // The default FPCR on Linux-based platforms is 0. |
7822 ASSERT_EQUAL_32(0, w6); | 7822 CHECK_EQUAL_32(0, w6); |
7823 | 7823 |
7824 TEARDOWN(); | 7824 TEARDOWN(); |
7825 } | 7825 } |
7826 | 7826 |
7827 | 7827 |
7828 TEST(system_msr) { | 7828 TEST(system_msr) { |
7829 INIT_V8(); | 7829 INIT_V8(); |
7830 // All FPCR fields that must be implemented: AHP, DN, FZ, RMode | 7830 // All FPCR fields that must be implemented: AHP, DN, FZ, RMode |
7831 const uint64_t fpcr_core = 0x07c00000; | 7831 const uint64_t fpcr_core = 0x07c00000; |
7832 | 7832 |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7880 // for the simulator. | 7880 // for the simulator. |
7881 __ Mov(x10, ~fpcr_all); | 7881 __ Mov(x10, ~fpcr_all); |
7882 __ Msr(FPCR, x10); | 7882 __ Msr(FPCR, x10); |
7883 __ Mrs(x10, FPCR); | 7883 __ Mrs(x10, FPCR); |
7884 | 7884 |
7885 END(); | 7885 END(); |
7886 | 7886 |
7887 RUN(); | 7887 RUN(); |
7888 | 7888 |
7889 // We should have incremented x7 (from 0) exactly 8 times. | 7889 // We should have incremented x7 (from 0) exactly 8 times. |
7890 ASSERT_EQUAL_64(8, x7); | 7890 CHECK_EQUAL_64(8, x7); |
7891 | 7891 |
7892 ASSERT_EQUAL_64(fpcr_core, x8); | 7892 CHECK_EQUAL_64(fpcr_core, x8); |
7893 ASSERT_EQUAL_64(fpcr_core, x9); | 7893 CHECK_EQUAL_64(fpcr_core, x9); |
7894 ASSERT_EQUAL_64(0, x10); | 7894 CHECK_EQUAL_64(0, x10); |
7895 | 7895 |
7896 TEARDOWN(); | 7896 TEARDOWN(); |
7897 } | 7897 } |
7898 | 7898 |
7899 | 7899 |
7900 TEST(system_nop) { | 7900 TEST(system_nop) { |
7901 INIT_V8(); | 7901 INIT_V8(); |
7902 SETUP(); | 7902 SETUP(); |
7903 RegisterDump before; | 7903 RegisterDump before; |
7904 | 7904 |
7905 START(); | 7905 START(); |
7906 before.Dump(&masm); | 7906 before.Dump(&masm); |
7907 __ Nop(); | 7907 __ Nop(); |
7908 END(); | 7908 END(); |
7909 | 7909 |
7910 RUN(); | 7910 RUN(); |
7911 | 7911 |
7912 ASSERT_EQUAL_REGISTERS(before); | 7912 CHECK_EQUAL_REGISTERS(before); |
7913 ASSERT_EQUAL_NZCV(before.flags_nzcv()); | 7913 CHECK_EQUAL_NZCV(before.flags_nzcv()); |
7914 | 7914 |
7915 TEARDOWN(); | 7915 TEARDOWN(); |
7916 } | 7916 } |
7917 | 7917 |
7918 | 7918 |
7919 TEST(zero_dest) { | 7919 TEST(zero_dest) { |
7920 INIT_V8(); | 7920 INIT_V8(); |
7921 SETUP(); | 7921 SETUP(); |
7922 RegisterDump before; | 7922 RegisterDump before; |
7923 | 7923 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7969 __ Mov(x29, x30); | 7969 __ Mov(x29, x30); |
7970 __ Mov(x30, csp); | 7970 __ Mov(x30, csp); |
7971 __ Mov(csp, x29); | 7971 __ Mov(csp, x29); |
7972 // We used x29 as a scratch register, so reset it to make sure it doesn't | 7972 // We used x29 as a scratch register, so reset it to make sure it doesn't |
7973 // trigger a test failure. | 7973 // trigger a test failure. |
7974 __ Add(x29, x28, x1); | 7974 __ Add(x29, x28, x1); |
7975 END(); | 7975 END(); |
7976 | 7976 |
7977 RUN(); | 7977 RUN(); |
7978 | 7978 |
7979 ASSERT_EQUAL_REGISTERS(before); | 7979 CHECK_EQUAL_REGISTERS(before); |
7980 ASSERT_EQUAL_NZCV(before.flags_nzcv()); | 7980 CHECK_EQUAL_NZCV(before.flags_nzcv()); |
7981 | 7981 |
7982 TEARDOWN(); | 7982 TEARDOWN(); |
7983 } | 7983 } |
7984 | 7984 |
7985 | 7985 |
7986 TEST(zero_dest_setflags) { | 7986 TEST(zero_dest_setflags) { |
7987 INIT_V8(); | 7987 INIT_V8(); |
7988 SETUP(); | 7988 SETUP(); |
7989 RegisterDump before; | 7989 RegisterDump before; |
7990 | 7990 |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8034 __ Mov(x29, x30); | 8034 __ Mov(x29, x30); |
8035 __ Mov(x30, csp); | 8035 __ Mov(x30, csp); |
8036 __ Mov(csp, x29); | 8036 __ Mov(csp, x29); |
8037 // We used x29 as a scratch register, so reset it to make sure it doesn't | 8037 // We used x29 as a scratch register, so reset it to make sure it doesn't |
8038 // trigger a test failure. | 8038 // trigger a test failure. |
8039 __ Add(x29, x28, x1); | 8039 __ Add(x29, x28, x1); |
8040 END(); | 8040 END(); |
8041 | 8041 |
8042 RUN(); | 8042 RUN(); |
8043 | 8043 |
8044 ASSERT_EQUAL_REGISTERS(before); | 8044 CHECK_EQUAL_REGISTERS(before); |
8045 | 8045 |
8046 TEARDOWN(); | 8046 TEARDOWN(); |
8047 } | 8047 } |
8048 | 8048 |
8049 | 8049 |
8050 TEST(register_bit) { | 8050 TEST(register_bit) { |
8051 // No code generation takes place in this test, so no need to setup and | 8051 // No code generation takes place in this test, so no need to setup and |
8052 // teardown. | 8052 // teardown. |
8053 | 8053 |
8054 // Simple tests. | 8054 // Simple tests. |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8147 __ Peek(w10, 0); | 8147 __ Peek(w10, 0); |
8148 __ Peek(w11, 4); | 8148 __ Peek(w11, 4); |
8149 __ Peek(w12, 8); | 8149 __ Peek(w12, 8); |
8150 __ Peek(w13, 12); | 8150 __ Peek(w13, 12); |
8151 | 8151 |
8152 __ Drop(4); | 8152 __ Drop(4); |
8153 | 8153 |
8154 END(); | 8154 END(); |
8155 RUN(); | 8155 RUN(); |
8156 | 8156 |
8157 ASSERT_EQUAL_64(literal_base * 1, x0); | 8157 CHECK_EQUAL_64(literal_base * 1, x0); |
8158 ASSERT_EQUAL_64(literal_base * 2, x1); | 8158 CHECK_EQUAL_64(literal_base * 2, x1); |
8159 ASSERT_EQUAL_64(literal_base * 3, x2); | 8159 CHECK_EQUAL_64(literal_base * 3, x2); |
8160 ASSERT_EQUAL_64(literal_base * 4, x3); | 8160 CHECK_EQUAL_64(literal_base * 4, x3); |
8161 | 8161 |
8162 ASSERT_EQUAL_64((literal_base * 1) & 0xffffffff, x10); | 8162 CHECK_EQUAL_64((literal_base * 1) & 0xffffffff, x10); |
8163 ASSERT_EQUAL_64((literal_base * 2) & 0xffffffff, x11); | 8163 CHECK_EQUAL_64((literal_base * 2) & 0xffffffff, x11); |
8164 ASSERT_EQUAL_64((literal_base * 3) & 0xffffffff, x12); | 8164 CHECK_EQUAL_64((literal_base * 3) & 0xffffffff, x12); |
8165 ASSERT_EQUAL_64((literal_base * 4) & 0xffffffff, x13); | 8165 CHECK_EQUAL_64((literal_base * 4) & 0xffffffff, x13); |
8166 | 8166 |
8167 TEARDOWN(); | 8167 TEARDOWN(); |
8168 } | 8168 } |
8169 | 8169 |
8170 | 8170 |
8171 TEST(peek_poke_unaligned) { | 8171 TEST(peek_poke_unaligned) { |
8172 INIT_V8(); | 8172 INIT_V8(); |
8173 SETUP(); | 8173 SETUP(); |
8174 START(); | 8174 START(); |
8175 | 8175 |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8225 __ Peek(w11, 2); | 8225 __ Peek(w11, 2); |
8226 __ Poke(w2, 3); | 8226 __ Poke(w2, 3); |
8227 Clobber(&masm, w12.Bit()); | 8227 Clobber(&masm, w12.Bit()); |
8228 __ Peek(w12, 3); | 8228 __ Peek(w12, 3); |
8229 | 8229 |
8230 __ Drop(4); | 8230 __ Drop(4); |
8231 | 8231 |
8232 END(); | 8232 END(); |
8233 RUN(); | 8233 RUN(); |
8234 | 8234 |
8235 ASSERT_EQUAL_64(literal_base * 1, x0); | 8235 CHECK_EQUAL_64(literal_base * 1, x0); |
8236 ASSERT_EQUAL_64(literal_base * 2, x1); | 8236 CHECK_EQUAL_64(literal_base * 2, x1); |
8237 ASSERT_EQUAL_64(literal_base * 3, x2); | 8237 CHECK_EQUAL_64(literal_base * 3, x2); |
8238 ASSERT_EQUAL_64(literal_base * 4, x3); | 8238 CHECK_EQUAL_64(literal_base * 4, x3); |
8239 ASSERT_EQUAL_64(literal_base * 5, x4); | 8239 CHECK_EQUAL_64(literal_base * 5, x4); |
8240 ASSERT_EQUAL_64(literal_base * 6, x5); | 8240 CHECK_EQUAL_64(literal_base * 6, x5); |
8241 ASSERT_EQUAL_64(literal_base * 7, x6); | 8241 CHECK_EQUAL_64(literal_base * 7, x6); |
8242 | 8242 |
8243 ASSERT_EQUAL_64((literal_base * 1) & 0xffffffff, x10); | 8243 CHECK_EQUAL_64((literal_base * 1) & 0xffffffff, x10); |
8244 ASSERT_EQUAL_64((literal_base * 2) & 0xffffffff, x11); | 8244 CHECK_EQUAL_64((literal_base * 2) & 0xffffffff, x11); |
8245 ASSERT_EQUAL_64((literal_base * 3) & 0xffffffff, x12); | 8245 CHECK_EQUAL_64((literal_base * 3) & 0xffffffff, x12); |
8246 | 8246 |
8247 TEARDOWN(); | 8247 TEARDOWN(); |
8248 } | 8248 } |
8249 | 8249 |
8250 | 8250 |
8251 TEST(peek_poke_endianness) { | 8251 TEST(peek_poke_endianness) { |
8252 INIT_V8(); | 8252 INIT_V8(); |
8253 SETUP(); | 8253 SETUP(); |
8254 START(); | 8254 START(); |
8255 | 8255 |
(...skipping 26 matching lines...) Expand all Loading... |
8282 | 8282 |
8283 END(); | 8283 END(); |
8284 RUN(); | 8284 RUN(); |
8285 | 8285 |
8286 uint64_t x0_expected = literal_base * 1; | 8286 uint64_t x0_expected = literal_base * 1; |
8287 uint64_t x1_expected = literal_base * 2; | 8287 uint64_t x1_expected = literal_base * 2; |
8288 uint64_t x4_expected = (x0_expected << 32) | (x0_expected >> 32); | 8288 uint64_t x4_expected = (x0_expected << 32) | (x0_expected >> 32); |
8289 uint64_t x5_expected = ((x1_expected << 16) & 0xffff0000) | | 8289 uint64_t x5_expected = ((x1_expected << 16) & 0xffff0000) | |
8290 ((x1_expected >> 16) & 0x0000ffff); | 8290 ((x1_expected >> 16) & 0x0000ffff); |
8291 | 8291 |
8292 ASSERT_EQUAL_64(x0_expected, x0); | 8292 CHECK_EQUAL_64(x0_expected, x0); |
8293 ASSERT_EQUAL_64(x1_expected, x1); | 8293 CHECK_EQUAL_64(x1_expected, x1); |
8294 ASSERT_EQUAL_64(x4_expected, x4); | 8294 CHECK_EQUAL_64(x4_expected, x4); |
8295 ASSERT_EQUAL_64(x5_expected, x5); | 8295 CHECK_EQUAL_64(x5_expected, x5); |
8296 | 8296 |
8297 TEARDOWN(); | 8297 TEARDOWN(); |
8298 } | 8298 } |
8299 | 8299 |
8300 | 8300 |
8301 TEST(peek_poke_mixed) { | 8301 TEST(peek_poke_mixed) { |
8302 INIT_V8(); | 8302 INIT_V8(); |
8303 SETUP(); | 8303 SETUP(); |
8304 START(); | 8304 START(); |
8305 | 8305 |
(...skipping 13 matching lines...) Expand all Loading... |
8319 __ Claim(4); | 8319 __ Claim(4); |
8320 | 8320 |
8321 // Mix with other stack operations. | 8321 // Mix with other stack operations. |
8322 // After this section: | 8322 // After this section: |
8323 // x0-x3 should be unchanged. | 8323 // x0-x3 should be unchanged. |
8324 // x6 should match x1[31:0]:x0[63:32] | 8324 // x6 should match x1[31:0]:x0[63:32] |
8325 // w7 should match x1[15:0]:x0[63:48] | 8325 // w7 should match x1[15:0]:x0[63:48] |
8326 __ Poke(x1, 8); | 8326 __ Poke(x1, 8); |
8327 __ Poke(x0, 0); | 8327 __ Poke(x0, 0); |
8328 { | 8328 { |
8329 ASSERT(__ StackPointer().Is(csp)); | 8329 DCHECK(__ StackPointer().Is(csp)); |
8330 __ Mov(x4, __ StackPointer()); | 8330 __ Mov(x4, __ StackPointer()); |
8331 __ SetStackPointer(x4); | 8331 __ SetStackPointer(x4); |
8332 | 8332 |
8333 __ Poke(wzr, 0); // Clobber the space we're about to drop. | 8333 __ Poke(wzr, 0); // Clobber the space we're about to drop. |
8334 __ Drop(1, kWRegSize); | 8334 __ Drop(1, kWRegSize); |
8335 __ Peek(x6, 0); | 8335 __ Peek(x6, 0); |
8336 __ Claim(1); | 8336 __ Claim(1); |
8337 __ Peek(w7, 10); | 8337 __ Peek(w7, 10); |
8338 __ Poke(x3, 28); | 8338 __ Poke(x3, 28); |
8339 __ Poke(xzr, 0); // Clobber the space we're about to drop. | 8339 __ Poke(xzr, 0); // Clobber the space we're about to drop. |
(...skipping 11 matching lines...) Expand all Loading... |
8351 RUN(); | 8351 RUN(); |
8352 | 8352 |
8353 uint64_t x0_expected = literal_base * 1; | 8353 uint64_t x0_expected = literal_base * 1; |
8354 uint64_t x1_expected = literal_base * 2; | 8354 uint64_t x1_expected = literal_base * 2; |
8355 uint64_t x2_expected = literal_base * 3; | 8355 uint64_t x2_expected = literal_base * 3; |
8356 uint64_t x3_expected = literal_base * 4; | 8356 uint64_t x3_expected = literal_base * 4; |
8357 uint64_t x6_expected = (x1_expected << 32) | (x0_expected >> 32); | 8357 uint64_t x6_expected = (x1_expected << 32) | (x0_expected >> 32); |
8358 uint64_t x7_expected = ((x1_expected << 16) & 0xffff0000) | | 8358 uint64_t x7_expected = ((x1_expected << 16) & 0xffff0000) | |
8359 ((x0_expected >> 48) & 0x0000ffff); | 8359 ((x0_expected >> 48) & 0x0000ffff); |
8360 | 8360 |
8361 ASSERT_EQUAL_64(x0_expected, x0); | 8361 CHECK_EQUAL_64(x0_expected, x0); |
8362 ASSERT_EQUAL_64(x1_expected, x1); | 8362 CHECK_EQUAL_64(x1_expected, x1); |
8363 ASSERT_EQUAL_64(x2_expected, x2); | 8363 CHECK_EQUAL_64(x2_expected, x2); |
8364 ASSERT_EQUAL_64(x3_expected, x3); | 8364 CHECK_EQUAL_64(x3_expected, x3); |
8365 ASSERT_EQUAL_64(x6_expected, x6); | 8365 CHECK_EQUAL_64(x6_expected, x6); |
8366 ASSERT_EQUAL_64(x7_expected, x7); | 8366 CHECK_EQUAL_64(x7_expected, x7); |
8367 | 8367 |
8368 TEARDOWN(); | 8368 TEARDOWN(); |
8369 } | 8369 } |
8370 | 8370 |
8371 | 8371 |
8372 // This enum is used only as an argument to the push-pop test helpers. | 8372 // This enum is used only as an argument to the push-pop test helpers. |
8373 enum PushPopMethod { | 8373 enum PushPopMethod { |
8374 // Push or Pop using the Push and Pop methods, with blocks of up to four | 8374 // Push or Pop using the Push and Pop methods, with blocks of up to four |
8375 // registers. (Smaller blocks will be used if necessary.) | 8375 // registers. (Smaller blocks will be used if necessary.) |
8376 PushPopByFour, | 8376 PushPopByFour, |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8416 allowed); | 8416 allowed); |
8417 | 8417 |
8418 // The literal base is chosen to have two useful properties: | 8418 // The literal base is chosen to have two useful properties: |
8419 // * When multiplied by small values (such as a register index), this value | 8419 // * When multiplied by small values (such as a register index), this value |
8420 // is clearly readable in the result. | 8420 // is clearly readable in the result. |
8421 // * The value is not formed from repeating fixed-size smaller values, so it | 8421 // * The value is not formed from repeating fixed-size smaller values, so it |
8422 // can be used to detect endianness-related errors. | 8422 // can be used to detect endianness-related errors. |
8423 uint64_t literal_base = 0x0100001000100101UL; | 8423 uint64_t literal_base = 0x0100001000100101UL; |
8424 | 8424 |
8425 { | 8425 { |
8426 ASSERT(__ StackPointer().Is(csp)); | 8426 DCHECK(__ StackPointer().Is(csp)); |
8427 __ Mov(jssp, __ StackPointer()); | 8427 __ Mov(jssp, __ StackPointer()); |
8428 __ SetStackPointer(jssp); | 8428 __ SetStackPointer(jssp); |
8429 | 8429 |
8430 int i; | 8430 int i; |
8431 | 8431 |
8432 // Initialize the registers. | 8432 // Initialize the registers. |
8433 for (i = 0; i < reg_count; i++) { | 8433 for (i = 0; i < reg_count; i++) { |
8434 // Always write into the X register, to ensure that the upper word is | 8434 // Always write into the X register, to ensure that the upper word is |
8435 // properly ignored by Push when testing W registers. | 8435 // properly ignored by Push when testing W registers. |
8436 if (!x[i].IsZero()) { | 8436 if (!x[i].IsZero()) { |
8437 __ Mov(x[i], literal_base * i); | 8437 __ Mov(x[i], literal_base * i); |
8438 } | 8438 } |
8439 } | 8439 } |
8440 | 8440 |
8441 // Claim memory first, as requested. | 8441 // Claim memory first, as requested. |
8442 __ Claim(claim, kByteSizeInBytes); | 8442 __ Claim(claim, kByteSizeInBytes); |
8443 | 8443 |
8444 switch (push_method) { | 8444 switch (push_method) { |
8445 case PushPopByFour: | 8445 case PushPopByFour: |
8446 // Push high-numbered registers first (to the highest addresses). | 8446 // Push high-numbered registers first (to the highest addresses). |
8447 for (i = reg_count; i >= 4; i -= 4) { | 8447 for (i = reg_count; i >= 4; i -= 4) { |
8448 __ Push(r[i-1], r[i-2], r[i-3], r[i-4]); | 8448 __ Push(r[i-1], r[i-2], r[i-3], r[i-4]); |
8449 } | 8449 } |
8450 // Finish off the leftovers. | 8450 // Finish off the leftovers. |
8451 switch (i) { | 8451 switch (i) { |
8452 case 3: __ Push(r[2], r[1], r[0]); break; | 8452 case 3: __ Push(r[2], r[1], r[0]); break; |
8453 case 2: __ Push(r[1], r[0]); break; | 8453 case 2: __ Push(r[1], r[0]); break; |
8454 case 1: __ Push(r[0]); break; | 8454 case 1: __ Push(r[0]); break; |
8455 default: ASSERT(i == 0); break; | 8455 default: DCHECK(i == 0); break; |
8456 } | 8456 } |
8457 break; | 8457 break; |
8458 case PushPopRegList: | 8458 case PushPopRegList: |
8459 __ PushSizeRegList(list, reg_size); | 8459 __ PushSizeRegList(list, reg_size); |
8460 break; | 8460 break; |
8461 } | 8461 } |
8462 | 8462 |
8463 // Clobber all the registers, to ensure that they get repopulated by Pop. | 8463 // Clobber all the registers, to ensure that they get repopulated by Pop. |
8464 Clobber(&masm, list); | 8464 Clobber(&masm, list); |
8465 | 8465 |
8466 switch (pop_method) { | 8466 switch (pop_method) { |
8467 case PushPopByFour: | 8467 case PushPopByFour: |
8468 // Pop low-numbered registers first (from the lowest addresses). | 8468 // Pop low-numbered registers first (from the lowest addresses). |
8469 for (i = 0; i <= (reg_count-4); i += 4) { | 8469 for (i = 0; i <= (reg_count-4); i += 4) { |
8470 __ Pop(r[i], r[i+1], r[i+2], r[i+3]); | 8470 __ Pop(r[i], r[i+1], r[i+2], r[i+3]); |
8471 } | 8471 } |
8472 // Finish off the leftovers. | 8472 // Finish off the leftovers. |
8473 switch (reg_count - i) { | 8473 switch (reg_count - i) { |
8474 case 3: __ Pop(r[i], r[i+1], r[i+2]); break; | 8474 case 3: __ Pop(r[i], r[i+1], r[i+2]); break; |
8475 case 2: __ Pop(r[i], r[i+1]); break; | 8475 case 2: __ Pop(r[i], r[i+1]); break; |
8476 case 1: __ Pop(r[i]); break; | 8476 case 1: __ Pop(r[i]); break; |
8477 default: ASSERT(i == reg_count); break; | 8477 default: DCHECK(i == reg_count); break; |
8478 } | 8478 } |
8479 break; | 8479 break; |
8480 case PushPopRegList: | 8480 case PushPopRegList: |
8481 __ PopSizeRegList(list, reg_size); | 8481 __ PopSizeRegList(list, reg_size); |
8482 break; | 8482 break; |
8483 } | 8483 } |
8484 | 8484 |
8485 // Drop memory to restore jssp. | 8485 // Drop memory to restore jssp. |
8486 __ Drop(claim, kByteSizeInBytes); | 8486 __ Drop(claim, kByteSizeInBytes); |
8487 | 8487 |
8488 __ Mov(csp, __ StackPointer()); | 8488 __ Mov(csp, __ StackPointer()); |
8489 __ SetStackPointer(csp); | 8489 __ SetStackPointer(csp); |
8490 } | 8490 } |
8491 | 8491 |
8492 END(); | 8492 END(); |
8493 | 8493 |
8494 RUN(); | 8494 RUN(); |
8495 | 8495 |
8496 // Check that the register contents were preserved. | 8496 // Check that the register contents were preserved. |
8497 // Always use ASSERT_EQUAL_64, even when testing W registers, so we can test | 8497 // Always use CHECK_EQUAL_64, even when testing W registers, so we can test |
8498 // that the upper word was properly cleared by Pop. | 8498 // that the upper word was properly cleared by Pop. |
8499 literal_base &= (0xffffffffffffffffUL >> (64-reg_size)); | 8499 literal_base &= (0xffffffffffffffffUL >> (64-reg_size)); |
8500 for (int i = 0; i < reg_count; i++) { | 8500 for (int i = 0; i < reg_count; i++) { |
8501 if (x[i].IsZero()) { | 8501 if (x[i].IsZero()) { |
8502 ASSERT_EQUAL_64(0, x[i]); | 8502 CHECK_EQUAL_64(0, x[i]); |
8503 } else { | 8503 } else { |
8504 ASSERT_EQUAL_64(literal_base * i, x[i]); | 8504 CHECK_EQUAL_64(literal_base * i, x[i]); |
8505 } | 8505 } |
8506 } | 8506 } |
8507 | 8507 |
8508 TEARDOWN(); | 8508 TEARDOWN(); |
8509 } | 8509 } |
8510 | 8510 |
8511 | 8511 |
8512 TEST(push_pop_jssp_simple_32) { | 8512 TEST(push_pop_jssp_simple_32) { |
8513 INIT_V8(); | 8513 INIT_V8(); |
8514 for (int claim = 0; claim <= 8; claim++) { | 8514 for (int claim = 0; claim <= 8; claim++) { |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8598 // The literal base is chosen to have two useful properties: | 8598 // The literal base is chosen to have two useful properties: |
8599 // * When multiplied (using an integer) by small values (such as a register | 8599 // * When multiplied (using an integer) by small values (such as a register |
8600 // index), this value is clearly readable in the result. | 8600 // index), this value is clearly readable in the result. |
8601 // * The value is not formed from repeating fixed-size smaller values, so it | 8601 // * The value is not formed from repeating fixed-size smaller values, so it |
8602 // can be used to detect endianness-related errors. | 8602 // can be used to detect endianness-related errors. |
8603 // * It is never a floating-point NaN, and will therefore always compare | 8603 // * It is never a floating-point NaN, and will therefore always compare |
8604 // equal to itself. | 8604 // equal to itself. |
8605 uint64_t literal_base = 0x0100001000100101UL; | 8605 uint64_t literal_base = 0x0100001000100101UL; |
8606 | 8606 |
8607 { | 8607 { |
8608 ASSERT(__ StackPointer().Is(csp)); | 8608 DCHECK(__ StackPointer().Is(csp)); |
8609 __ Mov(jssp, __ StackPointer()); | 8609 __ Mov(jssp, __ StackPointer()); |
8610 __ SetStackPointer(jssp); | 8610 __ SetStackPointer(jssp); |
8611 | 8611 |
8612 int i; | 8612 int i; |
8613 | 8613 |
8614 // Initialize the registers, using X registers to load the literal. | 8614 // Initialize the registers, using X registers to load the literal. |
8615 __ Mov(x0, 0); | 8615 __ Mov(x0, 0); |
8616 __ Mov(x1, literal_base); | 8616 __ Mov(x1, literal_base); |
8617 for (i = 0; i < reg_count; i++) { | 8617 for (i = 0; i < reg_count; i++) { |
8618 // Always write into the D register, to ensure that the upper word is | 8618 // Always write into the D register, to ensure that the upper word is |
(...skipping 10 matching lines...) Expand all Loading... |
8629 case PushPopByFour: | 8629 case PushPopByFour: |
8630 // Push high-numbered registers first (to the highest addresses). | 8630 // Push high-numbered registers first (to the highest addresses). |
8631 for (i = reg_count; i >= 4; i -= 4) { | 8631 for (i = reg_count; i >= 4; i -= 4) { |
8632 __ Push(v[i-1], v[i-2], v[i-3], v[i-4]); | 8632 __ Push(v[i-1], v[i-2], v[i-3], v[i-4]); |
8633 } | 8633 } |
8634 // Finish off the leftovers. | 8634 // Finish off the leftovers. |
8635 switch (i) { | 8635 switch (i) { |
8636 case 3: __ Push(v[2], v[1], v[0]); break; | 8636 case 3: __ Push(v[2], v[1], v[0]); break; |
8637 case 2: __ Push(v[1], v[0]); break; | 8637 case 2: __ Push(v[1], v[0]); break; |
8638 case 1: __ Push(v[0]); break; | 8638 case 1: __ Push(v[0]); break; |
8639 default: ASSERT(i == 0); break; | 8639 default: DCHECK(i == 0); break; |
8640 } | 8640 } |
8641 break; | 8641 break; |
8642 case PushPopRegList: | 8642 case PushPopRegList: |
8643 __ PushSizeRegList(list, reg_size, CPURegister::kFPRegister); | 8643 __ PushSizeRegList(list, reg_size, CPURegister::kFPRegister); |
8644 break; | 8644 break; |
8645 } | 8645 } |
8646 | 8646 |
8647 // Clobber all the registers, to ensure that they get repopulated by Pop. | 8647 // Clobber all the registers, to ensure that they get repopulated by Pop. |
8648 ClobberFP(&masm, list); | 8648 ClobberFP(&masm, list); |
8649 | 8649 |
8650 switch (pop_method) { | 8650 switch (pop_method) { |
8651 case PushPopByFour: | 8651 case PushPopByFour: |
8652 // Pop low-numbered registers first (from the lowest addresses). | 8652 // Pop low-numbered registers first (from the lowest addresses). |
8653 for (i = 0; i <= (reg_count-4); i += 4) { | 8653 for (i = 0; i <= (reg_count-4); i += 4) { |
8654 __ Pop(v[i], v[i+1], v[i+2], v[i+3]); | 8654 __ Pop(v[i], v[i+1], v[i+2], v[i+3]); |
8655 } | 8655 } |
8656 // Finish off the leftovers. | 8656 // Finish off the leftovers. |
8657 switch (reg_count - i) { | 8657 switch (reg_count - i) { |
8658 case 3: __ Pop(v[i], v[i+1], v[i+2]); break; | 8658 case 3: __ Pop(v[i], v[i+1], v[i+2]); break; |
8659 case 2: __ Pop(v[i], v[i+1]); break; | 8659 case 2: __ Pop(v[i], v[i+1]); break; |
8660 case 1: __ Pop(v[i]); break; | 8660 case 1: __ Pop(v[i]); break; |
8661 default: ASSERT(i == reg_count); break; | 8661 default: DCHECK(i == reg_count); break; |
8662 } | 8662 } |
8663 break; | 8663 break; |
8664 case PushPopRegList: | 8664 case PushPopRegList: |
8665 __ PopSizeRegList(list, reg_size, CPURegister::kFPRegister); | 8665 __ PopSizeRegList(list, reg_size, CPURegister::kFPRegister); |
8666 break; | 8666 break; |
8667 } | 8667 } |
8668 | 8668 |
8669 // Drop memory to restore jssp. | 8669 // Drop memory to restore jssp. |
8670 __ Drop(claim, kByteSizeInBytes); | 8670 __ Drop(claim, kByteSizeInBytes); |
8671 | 8671 |
8672 __ Mov(csp, __ StackPointer()); | 8672 __ Mov(csp, __ StackPointer()); |
8673 __ SetStackPointer(csp); | 8673 __ SetStackPointer(csp); |
8674 } | 8674 } |
8675 | 8675 |
8676 END(); | 8676 END(); |
8677 | 8677 |
8678 RUN(); | 8678 RUN(); |
8679 | 8679 |
8680 // Check that the register contents were preserved. | 8680 // Check that the register contents were preserved. |
8681 // Always use ASSERT_EQUAL_FP64, even when testing S registers, so we can | 8681 // Always use CHECK_EQUAL_FP64, even when testing S registers, so we can |
8682 // test that the upper word was properly cleared by Pop. | 8682 // test that the upper word was properly cleared by Pop. |
8683 literal_base &= (0xffffffffffffffffUL >> (64-reg_size)); | 8683 literal_base &= (0xffffffffffffffffUL >> (64-reg_size)); |
8684 for (int i = 0; i < reg_count; i++) { | 8684 for (int i = 0; i < reg_count; i++) { |
8685 uint64_t literal = literal_base * i; | 8685 uint64_t literal = literal_base * i; |
8686 double expected; | 8686 double expected; |
8687 memcpy(&expected, &literal, sizeof(expected)); | 8687 memcpy(&expected, &literal, sizeof(expected)); |
8688 ASSERT_EQUAL_FP64(expected, d[i]); | 8688 CHECK_EQUAL_FP64(expected, d[i]); |
8689 } | 8689 } |
8690 | 8690 |
8691 TEARDOWN(); | 8691 TEARDOWN(); |
8692 } | 8692 } |
8693 | 8693 |
8694 | 8694 |
8695 TEST(push_pop_fp_jssp_simple_32) { | 8695 TEST(push_pop_fp_jssp_simple_32) { |
8696 INIT_V8(); | 8696 INIT_V8(); |
8697 for (int claim = 0; claim <= 8; claim++) { | 8697 for (int claim = 0; claim <= 8; claim++) { |
8698 for (int count = 0; count <= 8; count++) { | 8698 for (int count = 0; count <= 8; count++) { |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8775 | 8775 |
8776 // The literal base is chosen to have two useful properties: | 8776 // The literal base is chosen to have two useful properties: |
8777 // * When multiplied by small values (such as a register index), this value | 8777 // * When multiplied by small values (such as a register index), this value |
8778 // is clearly readable in the result. | 8778 // is clearly readable in the result. |
8779 // * The value is not formed from repeating fixed-size smaller values, so it | 8779 // * The value is not formed from repeating fixed-size smaller values, so it |
8780 // can be used to detect endianness-related errors. | 8780 // can be used to detect endianness-related errors. |
8781 uint64_t literal_base = 0x0100001000100101UL; | 8781 uint64_t literal_base = 0x0100001000100101UL; |
8782 | 8782 |
8783 START(); | 8783 START(); |
8784 { | 8784 { |
8785 ASSERT(__ StackPointer().Is(csp)); | 8785 DCHECK(__ StackPointer().Is(csp)); |
8786 __ Mov(jssp, __ StackPointer()); | 8786 __ Mov(jssp, __ StackPointer()); |
8787 __ SetStackPointer(jssp); | 8787 __ SetStackPointer(jssp); |
8788 | 8788 |
8789 // Claim memory first, as requested. | 8789 // Claim memory first, as requested. |
8790 __ Claim(claim, kByteSizeInBytes); | 8790 __ Claim(claim, kByteSizeInBytes); |
8791 | 8791 |
8792 __ Mov(x[3], literal_base * 3); | 8792 __ Mov(x[3], literal_base * 3); |
8793 __ Mov(x[2], literal_base * 2); | 8793 __ Mov(x[2], literal_base * 2); |
8794 __ Mov(x[1], literal_base * 1); | 8794 __ Mov(x[1], literal_base * 1); |
8795 __ Mov(x[0], literal_base * 0); | 8795 __ Mov(x[0], literal_base * 0); |
(...skipping 15 matching lines...) Expand all Loading... |
8811 __ Drop(claim, kByteSizeInBytes); | 8811 __ Drop(claim, kByteSizeInBytes); |
8812 | 8812 |
8813 __ Mov(csp, __ StackPointer()); | 8813 __ Mov(csp, __ StackPointer()); |
8814 __ SetStackPointer(csp); | 8814 __ SetStackPointer(csp); |
8815 } | 8815 } |
8816 | 8816 |
8817 END(); | 8817 END(); |
8818 | 8818 |
8819 RUN(); | 8819 RUN(); |
8820 | 8820 |
8821 // Always use ASSERT_EQUAL_64, even when testing W registers, so we can test | 8821 // Always use CHECK_EQUAL_64, even when testing W registers, so we can test |
8822 // that the upper word was properly cleared by Pop. | 8822 // that the upper word was properly cleared by Pop. |
8823 literal_base &= (0xffffffffffffffffUL >> (64-reg_size)); | 8823 literal_base &= (0xffffffffffffffffUL >> (64-reg_size)); |
8824 | 8824 |
8825 ASSERT_EQUAL_64(literal_base * 3, x[9]); | 8825 CHECK_EQUAL_64(literal_base * 3, x[9]); |
8826 ASSERT_EQUAL_64(literal_base * 2, x[8]); | 8826 CHECK_EQUAL_64(literal_base * 2, x[8]); |
8827 ASSERT_EQUAL_64(literal_base * 0, x[7]); | 8827 CHECK_EQUAL_64(literal_base * 0, x[7]); |
8828 ASSERT_EQUAL_64(literal_base * 3, x[6]); | 8828 CHECK_EQUAL_64(literal_base * 3, x[6]); |
8829 ASSERT_EQUAL_64(literal_base * 1, x[5]); | 8829 CHECK_EQUAL_64(literal_base * 1, x[5]); |
8830 ASSERT_EQUAL_64(literal_base * 2, x[4]); | 8830 CHECK_EQUAL_64(literal_base * 2, x[4]); |
8831 | 8831 |
8832 TEARDOWN(); | 8832 TEARDOWN(); |
8833 } | 8833 } |
8834 | 8834 |
8835 | 8835 |
8836 TEST(push_pop_jssp_mixed_methods_64) { | 8836 TEST(push_pop_jssp_mixed_methods_64) { |
8837 INIT_V8(); | 8837 INIT_V8(); |
8838 for (int claim = 0; claim <= 8; claim++) { | 8838 for (int claim = 0; claim <= 8; claim++) { |
8839 PushPopJsspMixedMethodsHelper(claim, kXRegSizeInBits); | 8839 PushPopJsspMixedMethodsHelper(claim, kXRegSizeInBits); |
8840 } | 8840 } |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
8880 // is clearly readable in the result. | 8880 // is clearly readable in the result. |
8881 // * The value is not formed from repeating fixed-size smaller values, so it | 8881 // * The value is not formed from repeating fixed-size smaller values, so it |
8882 // can be used to detect endianness-related errors. | 8882 // can be used to detect endianness-related errors. |
8883 static uint64_t const literal_base = 0x0100001000100101UL; | 8883 static uint64_t const literal_base = 0x0100001000100101UL; |
8884 static uint64_t const literal_base_hi = literal_base >> 32; | 8884 static uint64_t const literal_base_hi = literal_base >> 32; |
8885 static uint64_t const literal_base_lo = literal_base & 0xffffffff; | 8885 static uint64_t const literal_base_lo = literal_base & 0xffffffff; |
8886 static uint64_t const literal_base_w = literal_base & 0xffffffff; | 8886 static uint64_t const literal_base_w = literal_base & 0xffffffff; |
8887 | 8887 |
8888 START(); | 8888 START(); |
8889 { | 8889 { |
8890 ASSERT(__ StackPointer().Is(csp)); | 8890 DCHECK(__ StackPointer().Is(csp)); |
8891 __ Mov(jssp, __ StackPointer()); | 8891 __ Mov(jssp, __ StackPointer()); |
8892 __ SetStackPointer(jssp); | 8892 __ SetStackPointer(jssp); |
8893 | 8893 |
8894 // Initialize the registers. | 8894 // Initialize the registers. |
8895 for (int i = 0; i < reg_count; i++) { | 8895 for (int i = 0; i < reg_count; i++) { |
8896 // Always write into the X register, to ensure that the upper word is | 8896 // Always write into the X register, to ensure that the upper word is |
8897 // properly ignored by Push when testing W registers. | 8897 // properly ignored by Push when testing W registers. |
8898 if (!x[i].IsZero()) { | 8898 if (!x[i].IsZero()) { |
8899 __ Mov(x[i], literal_base * i); | 8899 __ Mov(x[i], literal_base * i); |
8900 } | 8900 } |
(...skipping 27 matching lines...) Expand all Loading... |
8928 // where i is the register number. | 8928 // where i is the register number. |
8929 // Registers are popped starting with the higher numbers one-by-one, | 8929 // Registers are popped starting with the higher numbers one-by-one, |
8930 // alternating between x and w registers, but only popping one at a time. | 8930 // alternating between x and w registers, but only popping one at a time. |
8931 // | 8931 // |
8932 // This pattern provides a wide variety of alignment effects and overlaps. | 8932 // This pattern provides a wide variety of alignment effects and overlaps. |
8933 | 8933 |
8934 // ---- Push ---- | 8934 // ---- Push ---- |
8935 | 8935 |
8936 int active_w_slots = 0; | 8936 int active_w_slots = 0; |
8937 for (int i = 0; active_w_slots < requested_w_slots; i++) { | 8937 for (int i = 0; active_w_slots < requested_w_slots; i++) { |
8938 ASSERT(i < reg_count); | 8938 DCHECK(i < reg_count); |
8939 // In order to test various arguments to PushMultipleTimes, and to try to | 8939 // In order to test various arguments to PushMultipleTimes, and to try to |
8940 // exercise different alignment and overlap effects, we push each | 8940 // exercise different alignment and overlap effects, we push each |
8941 // register a different number of times. | 8941 // register a different number of times. |
8942 int times = i % 4 + 1; | 8942 int times = i % 4 + 1; |
8943 if (i & 1) { | 8943 if (i & 1) { |
8944 // Push odd-numbered registers as W registers. | 8944 // Push odd-numbered registers as W registers. |
8945 if (i & 2) { | 8945 if (i & 2) { |
8946 __ PushMultipleTimes(w[i], times); | 8946 __ PushMultipleTimes(w[i], times); |
8947 } else { | 8947 } else { |
8948 // Use a register to specify the count. | 8948 // Use a register to specify the count. |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9001 for (int i = reg_count-1; i >= 0; i--) { | 9001 for (int i = reg_count-1; i >= 0; i--) { |
9002 if (next_is_64) { | 9002 if (next_is_64) { |
9003 __ Pop(x[i]); | 9003 __ Pop(x[i]); |
9004 active_w_slots -= 2; | 9004 active_w_slots -= 2; |
9005 } else { | 9005 } else { |
9006 __ Pop(w[i]); | 9006 __ Pop(w[i]); |
9007 active_w_slots -= 1; | 9007 active_w_slots -= 1; |
9008 } | 9008 } |
9009 next_is_64 = !next_is_64; | 9009 next_is_64 = !next_is_64; |
9010 } | 9010 } |
9011 ASSERT(active_w_slots == 0); | 9011 DCHECK(active_w_slots == 0); |
9012 | 9012 |
9013 // Drop memory to restore jssp. | 9013 // Drop memory to restore jssp. |
9014 __ Drop(claim, kByteSizeInBytes); | 9014 __ Drop(claim, kByteSizeInBytes); |
9015 | 9015 |
9016 __ Mov(csp, __ StackPointer()); | 9016 __ Mov(csp, __ StackPointer()); |
9017 __ SetStackPointer(csp); | 9017 __ SetStackPointer(csp); |
9018 } | 9018 } |
9019 | 9019 |
9020 END(); | 9020 END(); |
9021 | 9021 |
9022 RUN(); | 9022 RUN(); |
9023 | 9023 |
9024 int slot = 0; | 9024 int slot = 0; |
9025 for (int i = 0; i < reg_count; i++) { | 9025 for (int i = 0; i < reg_count; i++) { |
9026 // Even-numbered registers were written as W registers. | 9026 // Even-numbered registers were written as W registers. |
9027 // Odd-numbered registers were written as X registers. | 9027 // Odd-numbered registers were written as X registers. |
9028 bool expect_64 = (i & 1); | 9028 bool expect_64 = (i & 1); |
9029 uint64_t expected; | 9029 uint64_t expected; |
9030 | 9030 |
9031 if (expect_64) { | 9031 if (expect_64) { |
9032 uint64_t hi = stack[slot++]; | 9032 uint64_t hi = stack[slot++]; |
9033 uint64_t lo = stack[slot++]; | 9033 uint64_t lo = stack[slot++]; |
9034 expected = (hi << 32) | lo; | 9034 expected = (hi << 32) | lo; |
9035 } else { | 9035 } else { |
9036 expected = stack[slot++]; | 9036 expected = stack[slot++]; |
9037 } | 9037 } |
9038 | 9038 |
9039 // Always use ASSERT_EQUAL_64, even when testing W registers, so we can | 9039 // Always use CHECK_EQUAL_64, even when testing W registers, so we can |
9040 // test that the upper word was properly cleared by Pop. | 9040 // test that the upper word was properly cleared by Pop. |
9041 if (x[i].IsZero()) { | 9041 if (x[i].IsZero()) { |
9042 ASSERT_EQUAL_64(0, x[i]); | 9042 CHECK_EQUAL_64(0, x[i]); |
9043 } else { | 9043 } else { |
9044 ASSERT_EQUAL_64(expected, x[i]); | 9044 CHECK_EQUAL_64(expected, x[i]); |
9045 } | 9045 } |
9046 } | 9046 } |
9047 ASSERT(slot == requested_w_slots); | 9047 DCHECK(slot == requested_w_slots); |
9048 | 9048 |
9049 TEARDOWN(); | 9049 TEARDOWN(); |
9050 } | 9050 } |
9051 | 9051 |
9052 | 9052 |
9053 TEST(push_pop_jssp_wx_overlap) { | 9053 TEST(push_pop_jssp_wx_overlap) { |
9054 INIT_V8(); | 9054 INIT_V8(); |
9055 for (int claim = 0; claim <= 8; claim++) { | 9055 for (int claim = 0; claim <= 8; claim++) { |
9056 for (int count = 1; count <= 8; count++) { | 9056 for (int count = 1; count <= 8; count++) { |
9057 PushPopJsspWXOverlapHelper(count, claim); | 9057 PushPopJsspWXOverlapHelper(count, claim); |
9058 PushPopJsspWXOverlapHelper(count, claim); | 9058 PushPopJsspWXOverlapHelper(count, claim); |
9059 PushPopJsspWXOverlapHelper(count, claim); | 9059 PushPopJsspWXOverlapHelper(count, claim); |
9060 PushPopJsspWXOverlapHelper(count, claim); | 9060 PushPopJsspWXOverlapHelper(count, claim); |
9061 } | 9061 } |
9062 // Test with the maximum number of registers. | 9062 // Test with the maximum number of registers. |
9063 PushPopJsspWXOverlapHelper(kPushPopJsspMaxRegCount, claim); | 9063 PushPopJsspWXOverlapHelper(kPushPopJsspMaxRegCount, claim); |
9064 PushPopJsspWXOverlapHelper(kPushPopJsspMaxRegCount, claim); | 9064 PushPopJsspWXOverlapHelper(kPushPopJsspMaxRegCount, claim); |
9065 PushPopJsspWXOverlapHelper(kPushPopJsspMaxRegCount, claim); | 9065 PushPopJsspWXOverlapHelper(kPushPopJsspMaxRegCount, claim); |
9066 PushPopJsspWXOverlapHelper(kPushPopJsspMaxRegCount, claim); | 9066 PushPopJsspWXOverlapHelper(kPushPopJsspMaxRegCount, claim); |
9067 } | 9067 } |
9068 } | 9068 } |
9069 | 9069 |
9070 | 9070 |
9071 TEST(push_pop_csp) { | 9071 TEST(push_pop_csp) { |
9072 INIT_V8(); | 9072 INIT_V8(); |
9073 SETUP(); | 9073 SETUP(); |
9074 | 9074 |
9075 START(); | 9075 START(); |
9076 | 9076 |
9077 ASSERT(csp.Is(__ StackPointer())); | 9077 DCHECK(csp.Is(__ StackPointer())); |
9078 | 9078 |
9079 __ Mov(x3, 0x3333333333333333UL); | 9079 __ Mov(x3, 0x3333333333333333UL); |
9080 __ Mov(x2, 0x2222222222222222UL); | 9080 __ Mov(x2, 0x2222222222222222UL); |
9081 __ Mov(x1, 0x1111111111111111UL); | 9081 __ Mov(x1, 0x1111111111111111UL); |
9082 __ Mov(x0, 0x0000000000000000UL); | 9082 __ Mov(x0, 0x0000000000000000UL); |
9083 __ Claim(2); | 9083 __ Claim(2); |
9084 __ PushXRegList(x0.Bit() | x1.Bit() | x2.Bit() | x3.Bit()); | 9084 __ PushXRegList(x0.Bit() | x1.Bit() | x2.Bit() | x3.Bit()); |
9085 __ Push(x3, x2); | 9085 __ Push(x3, x2); |
9086 __ PopXRegList(x0.Bit() | x1.Bit() | x2.Bit() | x3.Bit()); | 9086 __ PopXRegList(x0.Bit() | x1.Bit() | x2.Bit() | x3.Bit()); |
9087 __ Push(x2, x1, x3, x0); | 9087 __ Push(x2, x1, x3, x0); |
(...skipping 24 matching lines...) Expand all Loading... |
9112 __ PushXRegList(0); | 9112 __ PushXRegList(0); |
9113 __ PopXRegList(0); | 9113 __ PopXRegList(0); |
9114 __ PushXRegList(0xffffffff); | 9114 __ PushXRegList(0xffffffff); |
9115 __ PopXRegList(0xffffffff); | 9115 __ PopXRegList(0xffffffff); |
9116 __ Drop(12); | 9116 __ Drop(12); |
9117 | 9117 |
9118 END(); | 9118 END(); |
9119 | 9119 |
9120 RUN(); | 9120 RUN(); |
9121 | 9121 |
9122 ASSERT_EQUAL_64(0x1111111111111111UL, x3); | 9122 CHECK_EQUAL_64(0x1111111111111111UL, x3); |
9123 ASSERT_EQUAL_64(0x0000000000000000UL, x2); | 9123 CHECK_EQUAL_64(0x0000000000000000UL, x2); |
9124 ASSERT_EQUAL_64(0x3333333333333333UL, x1); | 9124 CHECK_EQUAL_64(0x3333333333333333UL, x1); |
9125 ASSERT_EQUAL_64(0x2222222222222222UL, x0); | 9125 CHECK_EQUAL_64(0x2222222222222222UL, x0); |
9126 ASSERT_EQUAL_64(0x3333333333333333UL, x9); | 9126 CHECK_EQUAL_64(0x3333333333333333UL, x9); |
9127 ASSERT_EQUAL_64(0x2222222222222222UL, x8); | 9127 CHECK_EQUAL_64(0x2222222222222222UL, x8); |
9128 ASSERT_EQUAL_64(0x0000000000000000UL, x7); | 9128 CHECK_EQUAL_64(0x0000000000000000UL, x7); |
9129 ASSERT_EQUAL_64(0x3333333333333333UL, x6); | 9129 CHECK_EQUAL_64(0x3333333333333333UL, x6); |
9130 ASSERT_EQUAL_64(0x1111111111111111UL, x5); | 9130 CHECK_EQUAL_64(0x1111111111111111UL, x5); |
9131 ASSERT_EQUAL_64(0x2222222222222222UL, x4); | 9131 CHECK_EQUAL_64(0x2222222222222222UL, x4); |
9132 | 9132 |
9133 ASSERT_EQUAL_32(0x11111111U, w13); | 9133 CHECK_EQUAL_32(0x11111111U, w13); |
9134 ASSERT_EQUAL_32(0x33333333U, w12); | 9134 CHECK_EQUAL_32(0x33333333U, w12); |
9135 ASSERT_EQUAL_32(0x00000000U, w11); | 9135 CHECK_EQUAL_32(0x00000000U, w11); |
9136 ASSERT_EQUAL_32(0x22222222U, w10); | 9136 CHECK_EQUAL_32(0x22222222U, w10); |
9137 ASSERT_EQUAL_32(0x11111111U, w17); | 9137 CHECK_EQUAL_32(0x11111111U, w17); |
9138 ASSERT_EQUAL_32(0x00000000U, w16); | 9138 CHECK_EQUAL_32(0x00000000U, w16); |
9139 ASSERT_EQUAL_32(0x33333333U, w15); | 9139 CHECK_EQUAL_32(0x33333333U, w15); |
9140 ASSERT_EQUAL_32(0x22222222U, w14); | 9140 CHECK_EQUAL_32(0x22222222U, w14); |
9141 | 9141 |
9142 ASSERT_EQUAL_32(0x11111111U, w18); | 9142 CHECK_EQUAL_32(0x11111111U, w18); |
9143 ASSERT_EQUAL_32(0x11111111U, w19); | 9143 CHECK_EQUAL_32(0x11111111U, w19); |
9144 ASSERT_EQUAL_32(0x11111111U, w20); | 9144 CHECK_EQUAL_32(0x11111111U, w20); |
9145 ASSERT_EQUAL_32(0x11111111U, w21); | 9145 CHECK_EQUAL_32(0x11111111U, w21); |
9146 ASSERT_EQUAL_64(0x3333333333333333UL, x22); | 9146 CHECK_EQUAL_64(0x3333333333333333UL, x22); |
9147 ASSERT_EQUAL_64(0x0000000000000000UL, x23); | 9147 CHECK_EQUAL_64(0x0000000000000000UL, x23); |
9148 | 9148 |
9149 ASSERT_EQUAL_64(0x3333333333333333UL, x24); | 9149 CHECK_EQUAL_64(0x3333333333333333UL, x24); |
9150 ASSERT_EQUAL_64(0x3333333333333333UL, x26); | 9150 CHECK_EQUAL_64(0x3333333333333333UL, x26); |
9151 | 9151 |
9152 ASSERT_EQUAL_32(0x33333333U, w25); | 9152 CHECK_EQUAL_32(0x33333333U, w25); |
9153 ASSERT_EQUAL_32(0x00000000U, w27); | 9153 CHECK_EQUAL_32(0x00000000U, w27); |
9154 ASSERT_EQUAL_32(0x22222222U, w28); | 9154 CHECK_EQUAL_32(0x22222222U, w28); |
9155 ASSERT_EQUAL_32(0x33333333U, w29); | 9155 CHECK_EQUAL_32(0x33333333U, w29); |
9156 TEARDOWN(); | 9156 TEARDOWN(); |
9157 } | 9157 } |
9158 | 9158 |
9159 | 9159 |
9160 TEST(push_queued) { | 9160 TEST(push_queued) { |
9161 INIT_V8(); | 9161 INIT_V8(); |
9162 SETUP(); | 9162 SETUP(); |
9163 | 9163 |
9164 START(); | 9164 START(); |
9165 | 9165 |
9166 ASSERT(__ StackPointer().Is(csp)); | 9166 DCHECK(__ StackPointer().Is(csp)); |
9167 __ Mov(jssp, __ StackPointer()); | 9167 __ Mov(jssp, __ StackPointer()); |
9168 __ SetStackPointer(jssp); | 9168 __ SetStackPointer(jssp); |
9169 | 9169 |
9170 MacroAssembler::PushPopQueue queue(&masm); | 9170 MacroAssembler::PushPopQueue queue(&masm); |
9171 | 9171 |
9172 // Queue up registers. | 9172 // Queue up registers. |
9173 queue.Queue(x0); | 9173 queue.Queue(x0); |
9174 queue.Queue(x1); | 9174 queue.Queue(x1); |
9175 queue.Queue(x2); | 9175 queue.Queue(x2); |
9176 queue.Queue(x3); | 9176 queue.Queue(x3); |
(...skipping 30 matching lines...) Expand all Loading... |
9207 __ Pop(w6, w5, w4); | 9207 __ Pop(w6, w5, w4); |
9208 __ Pop(x3, x2, x1, x0); | 9208 __ Pop(x3, x2, x1, x0); |
9209 | 9209 |
9210 __ Mov(csp, __ StackPointer()); | 9210 __ Mov(csp, __ StackPointer()); |
9211 __ SetStackPointer(csp); | 9211 __ SetStackPointer(csp); |
9212 | 9212 |
9213 END(); | 9213 END(); |
9214 | 9214 |
9215 RUN(); | 9215 RUN(); |
9216 | 9216 |
9217 ASSERT_EQUAL_64(0x1234000000000000, x0); | 9217 CHECK_EQUAL_64(0x1234000000000000, x0); |
9218 ASSERT_EQUAL_64(0x1234000100010001, x1); | 9218 CHECK_EQUAL_64(0x1234000100010001, x1); |
9219 ASSERT_EQUAL_64(0x1234000200020002, x2); | 9219 CHECK_EQUAL_64(0x1234000200020002, x2); |
9220 ASSERT_EQUAL_64(0x1234000300030003, x3); | 9220 CHECK_EQUAL_64(0x1234000300030003, x3); |
9221 | 9221 |
9222 ASSERT_EQUAL_32(0x12340004, w4); | 9222 CHECK_EQUAL_32(0x12340004, w4); |
9223 ASSERT_EQUAL_32(0x12340005, w5); | 9223 CHECK_EQUAL_32(0x12340005, w5); |
9224 ASSERT_EQUAL_32(0x12340006, w6); | 9224 CHECK_EQUAL_32(0x12340006, w6); |
9225 | 9225 |
9226 ASSERT_EQUAL_FP64(123400.0, d0); | 9226 CHECK_EQUAL_FP64(123400.0, d0); |
9227 ASSERT_EQUAL_FP64(123401.0, d1); | 9227 CHECK_EQUAL_FP64(123401.0, d1); |
9228 | 9228 |
9229 ASSERT_EQUAL_FP32(123402.0, s2); | 9229 CHECK_EQUAL_FP32(123402.0, s2); |
9230 | 9230 |
9231 TEARDOWN(); | 9231 TEARDOWN(); |
9232 } | 9232 } |
9233 | 9233 |
9234 | 9234 |
9235 TEST(pop_queued) { | 9235 TEST(pop_queued) { |
9236 INIT_V8(); | 9236 INIT_V8(); |
9237 SETUP(); | 9237 SETUP(); |
9238 | 9238 |
9239 START(); | 9239 START(); |
9240 | 9240 |
9241 ASSERT(__ StackPointer().Is(csp)); | 9241 DCHECK(__ StackPointer().Is(csp)); |
9242 __ Mov(jssp, __ StackPointer()); | 9242 __ Mov(jssp, __ StackPointer()); |
9243 __ SetStackPointer(jssp); | 9243 __ SetStackPointer(jssp); |
9244 | 9244 |
9245 MacroAssembler::PushPopQueue queue(&masm); | 9245 MacroAssembler::PushPopQueue queue(&masm); |
9246 | 9246 |
9247 __ Mov(x0, 0x1234000000000000); | 9247 __ Mov(x0, 0x1234000000000000); |
9248 __ Mov(x1, 0x1234000100010001); | 9248 __ Mov(x1, 0x1234000100010001); |
9249 __ Mov(x2, 0x1234000200020002); | 9249 __ Mov(x2, 0x1234000200020002); |
9250 __ Mov(x3, 0x1234000300030003); | 9250 __ Mov(x3, 0x1234000300030003); |
9251 __ Mov(w4, 0x12340004); | 9251 __ Mov(w4, 0x12340004); |
(...skipping 30 matching lines...) Expand all Loading... |
9282 // Actually pop them. | 9282 // Actually pop them. |
9283 queue.PopQueued(); | 9283 queue.PopQueued(); |
9284 | 9284 |
9285 __ Mov(csp, __ StackPointer()); | 9285 __ Mov(csp, __ StackPointer()); |
9286 __ SetStackPointer(csp); | 9286 __ SetStackPointer(csp); |
9287 | 9287 |
9288 END(); | 9288 END(); |
9289 | 9289 |
9290 RUN(); | 9290 RUN(); |
9291 | 9291 |
9292 ASSERT_EQUAL_64(0x1234000000000000, x0); | 9292 CHECK_EQUAL_64(0x1234000000000000, x0); |
9293 ASSERT_EQUAL_64(0x1234000100010001, x1); | 9293 CHECK_EQUAL_64(0x1234000100010001, x1); |
9294 ASSERT_EQUAL_64(0x1234000200020002, x2); | 9294 CHECK_EQUAL_64(0x1234000200020002, x2); |
9295 ASSERT_EQUAL_64(0x1234000300030003, x3); | 9295 CHECK_EQUAL_64(0x1234000300030003, x3); |
9296 | 9296 |
9297 ASSERT_EQUAL_64(0x0000000012340004, x4); | 9297 CHECK_EQUAL_64(0x0000000012340004, x4); |
9298 ASSERT_EQUAL_64(0x0000000012340005, x5); | 9298 CHECK_EQUAL_64(0x0000000012340005, x5); |
9299 ASSERT_EQUAL_64(0x0000000012340006, x6); | 9299 CHECK_EQUAL_64(0x0000000012340006, x6); |
9300 | 9300 |
9301 ASSERT_EQUAL_FP64(123400.0, d0); | 9301 CHECK_EQUAL_FP64(123400.0, d0); |
9302 ASSERT_EQUAL_FP64(123401.0, d1); | 9302 CHECK_EQUAL_FP64(123401.0, d1); |
9303 | 9303 |
9304 ASSERT_EQUAL_FP32(123402.0, s2); | 9304 CHECK_EQUAL_FP32(123402.0, s2); |
9305 | 9305 |
9306 TEARDOWN(); | 9306 TEARDOWN(); |
9307 } | 9307 } |
9308 | 9308 |
9309 | 9309 |
9310 TEST(jump_both_smi) { | 9310 TEST(jump_both_smi) { |
9311 INIT_V8(); | 9311 INIT_V8(); |
9312 SETUP(); | 9312 SETUP(); |
9313 | 9313 |
9314 Label cond_pass_00, cond_pass_01, cond_pass_10, cond_pass_11; | 9314 Label cond_pass_00, cond_pass_01, cond_pass_10, cond_pass_11; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9360 __ B(&done); | 9360 __ B(&done); |
9361 __ Bind(&cond_pass_11); | 9361 __ Bind(&cond_pass_11); |
9362 __ Mov(x7, 1); | 9362 __ Mov(x7, 1); |
9363 | 9363 |
9364 __ Bind(&done); | 9364 __ Bind(&done); |
9365 | 9365 |
9366 END(); | 9366 END(); |
9367 | 9367 |
9368 RUN(); | 9368 RUN(); |
9369 | 9369 |
9370 ASSERT_EQUAL_64(0x5555555500000001UL, x0); | 9370 CHECK_EQUAL_64(0x5555555500000001UL, x0); |
9371 ASSERT_EQUAL_64(0xaaaaaaaa00000001UL, x1); | 9371 CHECK_EQUAL_64(0xaaaaaaaa00000001UL, x1); |
9372 ASSERT_EQUAL_64(0x1234567800000000UL, x2); | 9372 CHECK_EQUAL_64(0x1234567800000000UL, x2); |
9373 ASSERT_EQUAL_64(0x8765432100000000UL, x3); | 9373 CHECK_EQUAL_64(0x8765432100000000UL, x3); |
9374 ASSERT_EQUAL_64(0, x4); | 9374 CHECK_EQUAL_64(0, x4); |
9375 ASSERT_EQUAL_64(0, x5); | 9375 CHECK_EQUAL_64(0, x5); |
9376 ASSERT_EQUAL_64(0, x6); | 9376 CHECK_EQUAL_64(0, x6); |
9377 ASSERT_EQUAL_64(1, x7); | 9377 CHECK_EQUAL_64(1, x7); |
9378 | 9378 |
9379 TEARDOWN(); | 9379 TEARDOWN(); |
9380 } | 9380 } |
9381 | 9381 |
9382 | 9382 |
9383 TEST(jump_either_smi) { | 9383 TEST(jump_either_smi) { |
9384 INIT_V8(); | 9384 INIT_V8(); |
9385 SETUP(); | 9385 SETUP(); |
9386 | 9386 |
9387 Label cond_pass_00, cond_pass_01, cond_pass_10, cond_pass_11; | 9387 Label cond_pass_00, cond_pass_01, cond_pass_10, cond_pass_11; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9433 __ B(&done); | 9433 __ B(&done); |
9434 __ Bind(&cond_pass_11); | 9434 __ Bind(&cond_pass_11); |
9435 __ Mov(x7, 1); | 9435 __ Mov(x7, 1); |
9436 | 9436 |
9437 __ Bind(&done); | 9437 __ Bind(&done); |
9438 | 9438 |
9439 END(); | 9439 END(); |
9440 | 9440 |
9441 RUN(); | 9441 RUN(); |
9442 | 9442 |
9443 ASSERT_EQUAL_64(0x5555555500000001UL, x0); | 9443 CHECK_EQUAL_64(0x5555555500000001UL, x0); |
9444 ASSERT_EQUAL_64(0xaaaaaaaa00000001UL, x1); | 9444 CHECK_EQUAL_64(0xaaaaaaaa00000001UL, x1); |
9445 ASSERT_EQUAL_64(0x1234567800000000UL, x2); | 9445 CHECK_EQUAL_64(0x1234567800000000UL, x2); |
9446 ASSERT_EQUAL_64(0x8765432100000000UL, x3); | 9446 CHECK_EQUAL_64(0x8765432100000000UL, x3); |
9447 ASSERT_EQUAL_64(0, x4); | 9447 CHECK_EQUAL_64(0, x4); |
9448 ASSERT_EQUAL_64(1, x5); | 9448 CHECK_EQUAL_64(1, x5); |
9449 ASSERT_EQUAL_64(1, x6); | 9449 CHECK_EQUAL_64(1, x6); |
9450 ASSERT_EQUAL_64(1, x7); | 9450 CHECK_EQUAL_64(1, x7); |
9451 | 9451 |
9452 TEARDOWN(); | 9452 TEARDOWN(); |
9453 } | 9453 } |
9454 | 9454 |
9455 | 9455 |
9456 TEST(noreg) { | 9456 TEST(noreg) { |
9457 // This test doesn't generate any code, but it verifies some invariants | 9457 // This test doesn't generate any code, but it verifies some invariants |
9458 // related to NoReg. | 9458 // related to NoReg. |
9459 CHECK(NoReg.Is(NoFPReg)); | 9459 CHECK(NoReg.Is(NoFPReg)); |
9460 CHECK(NoFPReg.Is(NoReg)); | 9460 CHECK(NoFPReg.Is(NoReg)); |
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9852 __ Printf("Test %%s: %s\n", x2); | 9852 __ Printf("Test %%s: %s\n", x2); |
9853 __ Printf("w3(uint32): %" PRIu32 "\nw4(int32): %" PRId32 "\n" | 9853 __ Printf("w3(uint32): %" PRIu32 "\nw4(int32): %" PRId32 "\n" |
9854 "x5(uint64): %" PRIu64 "\nx6(int64): %" PRId64 "\n", | 9854 "x5(uint64): %" PRIu64 "\nx6(int64): %" PRId64 "\n", |
9855 w3, w4, x5, x6); | 9855 w3, w4, x5, x6); |
9856 __ Printf("%%f: %f\n%%g: %g\n%%e: %e\n%%E: %E\n", s1, s2, d3, d4); | 9856 __ Printf("%%f: %f\n%%g: %g\n%%e: %e\n%%E: %E\n", s1, s2, d3, d4); |
9857 __ Printf("0x%" PRIx32 ", 0x%" PRIx64 "\n", w28, x28); | 9857 __ Printf("0x%" PRIx32 ", 0x%" PRIx64 "\n", w28, x28); |
9858 __ Printf("%g\n", d10); | 9858 __ Printf("%g\n", d10); |
9859 __ Printf("%%%%%s%%%c%%\n", x2, w13); | 9859 __ Printf("%%%%%s%%%c%%\n", x2, w13); |
9860 | 9860 |
9861 // Print the stack pointer (csp). | 9861 // Print the stack pointer (csp). |
9862 ASSERT(csp.Is(__ StackPointer())); | 9862 DCHECK(csp.Is(__ StackPointer())); |
9863 __ Printf("StackPointer(csp): 0x%016" PRIx64 ", 0x%08" PRIx32 "\n", | 9863 __ Printf("StackPointer(csp): 0x%016" PRIx64 ", 0x%08" PRIx32 "\n", |
9864 __ StackPointer(), __ StackPointer().W()); | 9864 __ StackPointer(), __ StackPointer().W()); |
9865 | 9865 |
9866 // Test with a different stack pointer. | 9866 // Test with a different stack pointer. |
9867 const Register old_stack_pointer = __ StackPointer(); | 9867 const Register old_stack_pointer = __ StackPointer(); |
9868 __ Mov(x29, old_stack_pointer); | 9868 __ Mov(x29, old_stack_pointer); |
9869 __ SetStackPointer(x29); | 9869 __ SetStackPointer(x29); |
9870 // Print the stack pointer (not csp). | 9870 // Print the stack pointer (not csp). |
9871 __ Printf("StackPointer(not csp): 0x%016" PRIx64 ", 0x%08" PRIx32 "\n", | 9871 __ Printf("StackPointer(not csp): 0x%016" PRIx64 ", 0x%08" PRIx32 "\n", |
9872 __ StackPointer(), __ StackPointer().W()); | 9872 __ StackPointer(), __ StackPointer().W()); |
9873 __ Mov(old_stack_pointer, __ StackPointer()); | 9873 __ Mov(old_stack_pointer, __ StackPointer()); |
9874 __ SetStackPointer(old_stack_pointer); | 9874 __ SetStackPointer(old_stack_pointer); |
9875 | 9875 |
9876 // Test with three arguments. | 9876 // Test with three arguments. |
9877 __ Printf("3=%u, 4=%u, 5=%u\n", x10, x11, x12); | 9877 __ Printf("3=%u, 4=%u, 5=%u\n", x10, x11, x12); |
9878 | 9878 |
9879 // Mixed argument types. | 9879 // Mixed argument types. |
9880 __ Printf("w3: %" PRIu32 ", s1: %f, x5: %" PRIu64 ", d3: %f\n", | 9880 __ Printf("w3: %" PRIu32 ", s1: %f, x5: %" PRIu64 ", d3: %f\n", |
9881 w3, s1, x5, d3); | 9881 w3, s1, x5, d3); |
9882 __ Printf("s1: %f, d3: %f, w3: %" PRId32 ", x5: %" PRId64 "\n", | 9882 __ Printf("s1: %f, d3: %f, w3: %" PRId32 ", x5: %" PRId64 "\n", |
9883 s1, d3, w3, x5); | 9883 s1, d3, w3, x5); |
9884 | 9884 |
9885 END(); | 9885 END(); |
9886 RUN(); | 9886 RUN(); |
9887 | 9887 |
9888 // We cannot easily test the output of the Printf sequences, and because | 9888 // We cannot easily test the output of the Printf sequences, and because |
9889 // Printf preserves all registers by default, we can't look at the number of | 9889 // Printf preserves all registers by default, we can't look at the number of |
9890 // bytes that were printed. However, the printf_no_preserve test should check | 9890 // bytes that were printed. However, the printf_no_preserve test should check |
9891 // that, and here we just test that we didn't clobber any registers. | 9891 // that, and here we just test that we didn't clobber any registers. |
9892 ASSERT_EQUAL_REGISTERS(before); | 9892 CHECK_EQUAL_REGISTERS(before); |
9893 | 9893 |
9894 TEARDOWN(); | 9894 TEARDOWN(); |
9895 } | 9895 } |
9896 | 9896 |
9897 | 9897 |
9898 TEST(printf_no_preserve) { | 9898 TEST(printf_no_preserve) { |
9899 INIT_V8(); | 9899 INIT_V8(); |
9900 SETUP(); | 9900 SETUP(); |
9901 START(); | 9901 START(); |
9902 | 9902 |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
9976 w3, s1, x5, d3); | 9976 w3, s1, x5, d3); |
9977 __ Mov(x29, x0); | 9977 __ Mov(x29, x0); |
9978 | 9978 |
9979 END(); | 9979 END(); |
9980 RUN(); | 9980 RUN(); |
9981 | 9981 |
9982 // We cannot easily test the exact output of the Printf sequences, but we can | 9982 // We cannot easily test the exact output of the Printf sequences, but we can |
9983 // use the return code to check that the string length was correct. | 9983 // use the return code to check that the string length was correct. |
9984 | 9984 |
9985 // Printf with no arguments. | 9985 // Printf with no arguments. |
9986 ASSERT_EQUAL_64(strlen(test_plain_string), x19); | 9986 CHECK_EQUAL_64(strlen(test_plain_string), x19); |
9987 // x0: 1234, x1: 0x00001234 | 9987 // x0: 1234, x1: 0x00001234 |
9988 ASSERT_EQUAL_64(25, x20); | 9988 CHECK_EQUAL_64(25, x20); |
9989 // d0: 1.234000 | 9989 // d0: 1.234000 |
9990 ASSERT_EQUAL_64(13, x21); | 9990 CHECK_EQUAL_64(13, x21); |
9991 // Test %s: 'This is a substring.' | 9991 // Test %s: 'This is a substring.' |
9992 ASSERT_EQUAL_64(32, x22); | 9992 CHECK_EQUAL_64(32, x22); |
9993 // w3(uint32): 4294967295 | 9993 // w3(uint32): 4294967295 |
9994 // w4(int32): -1 | 9994 // w4(int32): -1 |
9995 // x5(uint64): 18446744073709551615 | 9995 // x5(uint64): 18446744073709551615 |
9996 // x6(int64): -1 | 9996 // x6(int64): -1 |
9997 ASSERT_EQUAL_64(23 + 14 + 33 + 14, x23); | 9997 CHECK_EQUAL_64(23 + 14 + 33 + 14, x23); |
9998 // %f: 1.234000 | 9998 // %f: 1.234000 |
9999 // %g: 2.345 | 9999 // %g: 2.345 |
10000 // %e: 3.456000e+00 | 10000 // %e: 3.456000e+00 |
10001 // %E: 4.567000E+00 | 10001 // %E: 4.567000E+00 |
10002 ASSERT_EQUAL_64(13 + 10 + 17 + 17, x24); | 10002 CHECK_EQUAL_64(13 + 10 + 17 + 17, x24); |
10003 // 0x89abcdef, 0x123456789abcdef | 10003 // 0x89abcdef, 0x123456789abcdef |
10004 ASSERT_EQUAL_64(30, x25); | 10004 CHECK_EQUAL_64(30, x25); |
10005 // 42 | 10005 // 42 |
10006 ASSERT_EQUAL_64(3, x26); | 10006 CHECK_EQUAL_64(3, x26); |
10007 // StackPointer(not csp): 0x00007fb037ae2370, 0x37ae2370 | 10007 // StackPointer(not csp): 0x00007fb037ae2370, 0x37ae2370 |
10008 // Note: This is an example value, but the field width is fixed here so the | 10008 // Note: This is an example value, but the field width is fixed here so the |
10009 // string length is still predictable. | 10009 // string length is still predictable. |
10010 ASSERT_EQUAL_64(54, x27); | 10010 CHECK_EQUAL_64(54, x27); |
10011 // 3=3, 4=40, 5=500 | 10011 // 3=3, 4=40, 5=500 |
10012 ASSERT_EQUAL_64(17, x28); | 10012 CHECK_EQUAL_64(17, x28); |
10013 // w3: 4294967295, s1: 1.234000, x5: 18446744073709551615, d3: 3.456000 | 10013 // w3: 4294967295, s1: 1.234000, x5: 18446744073709551615, d3: 3.456000 |
10014 ASSERT_EQUAL_64(69, x29); | 10014 CHECK_EQUAL_64(69, x29); |
10015 | 10015 |
10016 TEARDOWN(); | 10016 TEARDOWN(); |
10017 } | 10017 } |
10018 | 10018 |
10019 | 10019 |
10020 // This is a V8-specific test. | 10020 // This is a V8-specific test. |
10021 static void CopyFieldsHelper(CPURegList temps) { | 10021 static void CopyFieldsHelper(CPURegList temps) { |
10022 static const uint64_t kLiteralBase = 0x0100001000100101UL; | 10022 static const uint64_t kLiteralBase = 0x0100001000100101UL; |
10023 static const uint64_t src[] = {kLiteralBase * 1, | 10023 static const uint64_t src[] = {kLiteralBase * 1, |
10024 kLiteralBase * 2, | 10024 kLiteralBase * 2, |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10101 __ Bind(&slow); | 10101 __ Bind(&slow); |
10102 __ Mov(x2, 0xbad); | 10102 __ Mov(x2, 0xbad); |
10103 | 10103 |
10104 __ Bind(&end); | 10104 __ Bind(&end); |
10105 END(); | 10105 END(); |
10106 | 10106 |
10107 RUN(); | 10107 RUN(); |
10108 | 10108 |
10109 if (must_fail) { | 10109 if (must_fail) { |
10110 // We tested an invalid conversion. The code must have jump on slow. | 10110 // We tested an invalid conversion. The code must have jump on slow. |
10111 ASSERT_EQUAL_64(0xbad, x2); | 10111 CHECK_EQUAL_64(0xbad, x2); |
10112 } else { | 10112 } else { |
10113 // The conversion is valid, check the result. | 10113 // The conversion is valid, check the result. |
10114 int32_t result = (value >= 0) ? value : -value; | 10114 int32_t result = (value >= 0) ? value : -value; |
10115 ASSERT_EQUAL_64(result, x1); | 10115 CHECK_EQUAL_64(result, x1); |
10116 | 10116 |
10117 // Check that we didn't jump on slow. | 10117 // Check that we didn't jump on slow. |
10118 ASSERT_EQUAL_64(0xc001c0de, x2); | 10118 CHECK_EQUAL_64(0xc001c0de, x2); |
10119 } | 10119 } |
10120 | 10120 |
10121 TEARDOWN(); | 10121 TEARDOWN(); |
10122 } | 10122 } |
10123 | 10123 |
10124 | 10124 |
10125 TEST(smi_abs) { | 10125 TEST(smi_abs) { |
10126 INIT_V8(); | 10126 INIT_V8(); |
10127 // Simple and edge cases. | 10127 // Simple and edge cases. |
10128 DoSmiAbsTest(0); | 10128 DoSmiAbsTest(0); |
(...skipping 26 matching lines...) Expand all Loading... |
10155 __ B(&end); | 10155 __ B(&end); |
10156 | 10156 |
10157 __ Bind(&target); | 10157 __ Bind(&target); |
10158 __ Mov(x0, 0xc001c0de); | 10158 __ Mov(x0, 0xc001c0de); |
10159 | 10159 |
10160 __ Bind(&end); | 10160 __ Bind(&end); |
10161 END(); | 10161 END(); |
10162 | 10162 |
10163 RUN(); | 10163 RUN(); |
10164 | 10164 |
10165 ASSERT_EQUAL_64(0xc001c0de, x0); | 10165 CHECK_EQUAL_64(0xc001c0de, x0); |
10166 | 10166 |
10167 TEARDOWN(); | 10167 TEARDOWN(); |
10168 } | 10168 } |
10169 | 10169 |
10170 | 10170 |
10171 TEST(barriers) { | 10171 TEST(barriers) { |
10172 // Generate all supported barriers, this is just a smoke test | 10172 // Generate all supported barriers, this is just a smoke test |
10173 INIT_V8(); | 10173 INIT_V8(); |
10174 SETUP(); | 10174 SETUP(); |
10175 | 10175 |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10226 | 10226 |
10227 TEARDOWN(); | 10227 TEARDOWN(); |
10228 } | 10228 } |
10229 | 10229 |
10230 | 10230 |
10231 TEST(process_nan_double) { | 10231 TEST(process_nan_double) { |
10232 INIT_V8(); | 10232 INIT_V8(); |
10233 // Make sure that NaN propagation works correctly. | 10233 // Make sure that NaN propagation works correctly. |
10234 double sn = rawbits_to_double(0x7ff5555511111111); | 10234 double sn = rawbits_to_double(0x7ff5555511111111); |
10235 double qn = rawbits_to_double(0x7ffaaaaa11111111); | 10235 double qn = rawbits_to_double(0x7ffaaaaa11111111); |
10236 ASSERT(IsSignallingNaN(sn)); | 10236 DCHECK(IsSignallingNaN(sn)); |
10237 ASSERT(IsQuietNaN(qn)); | 10237 DCHECK(IsQuietNaN(qn)); |
10238 | 10238 |
10239 // The input NaNs after passing through ProcessNaN. | 10239 // The input NaNs after passing through ProcessNaN. |
10240 double sn_proc = rawbits_to_double(0x7ffd555511111111); | 10240 double sn_proc = rawbits_to_double(0x7ffd555511111111); |
10241 double qn_proc = qn; | 10241 double qn_proc = qn; |
10242 ASSERT(IsQuietNaN(sn_proc)); | 10242 DCHECK(IsQuietNaN(sn_proc)); |
10243 ASSERT(IsQuietNaN(qn_proc)); | 10243 DCHECK(IsQuietNaN(qn_proc)); |
10244 | 10244 |
10245 SETUP(); | 10245 SETUP(); |
10246 START(); | 10246 START(); |
10247 | 10247 |
10248 // Execute a number of instructions which all use ProcessNaN, and check that | 10248 // Execute a number of instructions which all use ProcessNaN, and check that |
10249 // they all handle the NaN correctly. | 10249 // they all handle the NaN correctly. |
10250 __ Fmov(d0, sn); | 10250 __ Fmov(d0, sn); |
10251 __ Fmov(d10, qn); | 10251 __ Fmov(d10, qn); |
10252 | 10252 |
10253 // Operations that always propagate NaNs unchanged, even signalling NaNs. | 10253 // Operations that always propagate NaNs unchanged, even signalling NaNs. |
(...skipping 20 matching lines...) Expand all Loading... |
10274 | 10274 |
10275 // The behaviour of fcvt is checked in TEST(fcvt_sd). | 10275 // The behaviour of fcvt is checked in TEST(fcvt_sd). |
10276 | 10276 |
10277 END(); | 10277 END(); |
10278 RUN(); | 10278 RUN(); |
10279 | 10279 |
10280 uint64_t qn_raw = double_to_rawbits(qn); | 10280 uint64_t qn_raw = double_to_rawbits(qn); |
10281 uint64_t sn_raw = double_to_rawbits(sn); | 10281 uint64_t sn_raw = double_to_rawbits(sn); |
10282 | 10282 |
10283 // - Signalling NaN | 10283 // - Signalling NaN |
10284 ASSERT_EQUAL_FP64(sn, d1); | 10284 CHECK_EQUAL_FP64(sn, d1); |
10285 ASSERT_EQUAL_FP64(rawbits_to_double(sn_raw & ~kDSignMask), d2); | 10285 CHECK_EQUAL_FP64(rawbits_to_double(sn_raw & ~kDSignMask), d2); |
10286 ASSERT_EQUAL_FP64(rawbits_to_double(sn_raw ^ kDSignMask), d3); | 10286 CHECK_EQUAL_FP64(rawbits_to_double(sn_raw ^ kDSignMask), d3); |
10287 // - Quiet NaN | 10287 // - Quiet NaN |
10288 ASSERT_EQUAL_FP64(qn, d11); | 10288 CHECK_EQUAL_FP64(qn, d11); |
10289 ASSERT_EQUAL_FP64(rawbits_to_double(qn_raw & ~kDSignMask), d12); | 10289 CHECK_EQUAL_FP64(rawbits_to_double(qn_raw & ~kDSignMask), d12); |
10290 ASSERT_EQUAL_FP64(rawbits_to_double(qn_raw ^ kDSignMask), d13); | 10290 CHECK_EQUAL_FP64(rawbits_to_double(qn_raw ^ kDSignMask), d13); |
10291 | 10291 |
10292 // - Signalling NaN | 10292 // - Signalling NaN |
10293 ASSERT_EQUAL_FP64(sn_proc, d4); | 10293 CHECK_EQUAL_FP64(sn_proc, d4); |
10294 ASSERT_EQUAL_FP64(sn_proc, d5); | 10294 CHECK_EQUAL_FP64(sn_proc, d5); |
10295 ASSERT_EQUAL_FP64(sn_proc, d6); | 10295 CHECK_EQUAL_FP64(sn_proc, d6); |
10296 ASSERT_EQUAL_FP64(sn_proc, d7); | 10296 CHECK_EQUAL_FP64(sn_proc, d7); |
10297 // - Quiet NaN | 10297 // - Quiet NaN |
10298 ASSERT_EQUAL_FP64(qn_proc, d14); | 10298 CHECK_EQUAL_FP64(qn_proc, d14); |
10299 ASSERT_EQUAL_FP64(qn_proc, d15); | 10299 CHECK_EQUAL_FP64(qn_proc, d15); |
10300 ASSERT_EQUAL_FP64(qn_proc, d16); | 10300 CHECK_EQUAL_FP64(qn_proc, d16); |
10301 ASSERT_EQUAL_FP64(qn_proc, d17); | 10301 CHECK_EQUAL_FP64(qn_proc, d17); |
10302 | 10302 |
10303 TEARDOWN(); | 10303 TEARDOWN(); |
10304 } | 10304 } |
10305 | 10305 |
10306 | 10306 |
10307 TEST(process_nan_float) { | 10307 TEST(process_nan_float) { |
10308 INIT_V8(); | 10308 INIT_V8(); |
10309 // Make sure that NaN propagation works correctly. | 10309 // Make sure that NaN propagation works correctly. |
10310 float sn = rawbits_to_float(0x7f951111); | 10310 float sn = rawbits_to_float(0x7f951111); |
10311 float qn = rawbits_to_float(0x7fea1111); | 10311 float qn = rawbits_to_float(0x7fea1111); |
10312 ASSERT(IsSignallingNaN(sn)); | 10312 DCHECK(IsSignallingNaN(sn)); |
10313 ASSERT(IsQuietNaN(qn)); | 10313 DCHECK(IsQuietNaN(qn)); |
10314 | 10314 |
10315 // The input NaNs after passing through ProcessNaN. | 10315 // The input NaNs after passing through ProcessNaN. |
10316 float sn_proc = rawbits_to_float(0x7fd51111); | 10316 float sn_proc = rawbits_to_float(0x7fd51111); |
10317 float qn_proc = qn; | 10317 float qn_proc = qn; |
10318 ASSERT(IsQuietNaN(sn_proc)); | 10318 DCHECK(IsQuietNaN(sn_proc)); |
10319 ASSERT(IsQuietNaN(qn_proc)); | 10319 DCHECK(IsQuietNaN(qn_proc)); |
10320 | 10320 |
10321 SETUP(); | 10321 SETUP(); |
10322 START(); | 10322 START(); |
10323 | 10323 |
10324 // Execute a number of instructions which all use ProcessNaN, and check that | 10324 // Execute a number of instructions which all use ProcessNaN, and check that |
10325 // they all handle the NaN correctly. | 10325 // they all handle the NaN correctly. |
10326 __ Fmov(s0, sn); | 10326 __ Fmov(s0, sn); |
10327 __ Fmov(s10, qn); | 10327 __ Fmov(s10, qn); |
10328 | 10328 |
10329 // Operations that always propagate NaNs unchanged, even signalling NaNs. | 10329 // Operations that always propagate NaNs unchanged, even signalling NaNs. |
(...skipping 20 matching lines...) Expand all Loading... |
10350 | 10350 |
10351 // The behaviour of fcvt is checked in TEST(fcvt_sd). | 10351 // The behaviour of fcvt is checked in TEST(fcvt_sd). |
10352 | 10352 |
10353 END(); | 10353 END(); |
10354 RUN(); | 10354 RUN(); |
10355 | 10355 |
10356 uint32_t qn_raw = float_to_rawbits(qn); | 10356 uint32_t qn_raw = float_to_rawbits(qn); |
10357 uint32_t sn_raw = float_to_rawbits(sn); | 10357 uint32_t sn_raw = float_to_rawbits(sn); |
10358 | 10358 |
10359 // - Signalling NaN | 10359 // - Signalling NaN |
10360 ASSERT_EQUAL_FP32(sn, s1); | 10360 CHECK_EQUAL_FP32(sn, s1); |
10361 ASSERT_EQUAL_FP32(rawbits_to_float(sn_raw & ~kSSignMask), s2); | 10361 CHECK_EQUAL_FP32(rawbits_to_float(sn_raw & ~kSSignMask), s2); |
10362 ASSERT_EQUAL_FP32(rawbits_to_float(sn_raw ^ kSSignMask), s3); | 10362 CHECK_EQUAL_FP32(rawbits_to_float(sn_raw ^ kSSignMask), s3); |
10363 // - Quiet NaN | 10363 // - Quiet NaN |
10364 ASSERT_EQUAL_FP32(qn, s11); | 10364 CHECK_EQUAL_FP32(qn, s11); |
10365 ASSERT_EQUAL_FP32(rawbits_to_float(qn_raw & ~kSSignMask), s12); | 10365 CHECK_EQUAL_FP32(rawbits_to_float(qn_raw & ~kSSignMask), s12); |
10366 ASSERT_EQUAL_FP32(rawbits_to_float(qn_raw ^ kSSignMask), s13); | 10366 CHECK_EQUAL_FP32(rawbits_to_float(qn_raw ^ kSSignMask), s13); |
10367 | 10367 |
10368 // - Signalling NaN | 10368 // - Signalling NaN |
10369 ASSERT_EQUAL_FP32(sn_proc, s4); | 10369 CHECK_EQUAL_FP32(sn_proc, s4); |
10370 ASSERT_EQUAL_FP32(sn_proc, s5); | 10370 CHECK_EQUAL_FP32(sn_proc, s5); |
10371 ASSERT_EQUAL_FP32(sn_proc, s6); | 10371 CHECK_EQUAL_FP32(sn_proc, s6); |
10372 ASSERT_EQUAL_FP32(sn_proc, s7); | 10372 CHECK_EQUAL_FP32(sn_proc, s7); |
10373 // - Quiet NaN | 10373 // - Quiet NaN |
10374 ASSERT_EQUAL_FP32(qn_proc, s14); | 10374 CHECK_EQUAL_FP32(qn_proc, s14); |
10375 ASSERT_EQUAL_FP32(qn_proc, s15); | 10375 CHECK_EQUAL_FP32(qn_proc, s15); |
10376 ASSERT_EQUAL_FP32(qn_proc, s16); | 10376 CHECK_EQUAL_FP32(qn_proc, s16); |
10377 ASSERT_EQUAL_FP32(qn_proc, s17); | 10377 CHECK_EQUAL_FP32(qn_proc, s17); |
10378 | 10378 |
10379 TEARDOWN(); | 10379 TEARDOWN(); |
10380 } | 10380 } |
10381 | 10381 |
10382 | 10382 |
10383 static void ProcessNaNsHelper(double n, double m, double expected) { | 10383 static void ProcessNaNsHelper(double n, double m, double expected) { |
10384 ASSERT(std::isnan(n) || std::isnan(m)); | 10384 DCHECK(std::isnan(n) || std::isnan(m)); |
10385 ASSERT(std::isnan(expected)); | 10385 DCHECK(std::isnan(expected)); |
10386 | 10386 |
10387 SETUP(); | 10387 SETUP(); |
10388 START(); | 10388 START(); |
10389 | 10389 |
10390 // Execute a number of instructions which all use ProcessNaNs, and check that | 10390 // Execute a number of instructions which all use ProcessNaNs, and check that |
10391 // they all propagate NaNs correctly. | 10391 // they all propagate NaNs correctly. |
10392 __ Fmov(d0, n); | 10392 __ Fmov(d0, n); |
10393 __ Fmov(d1, m); | 10393 __ Fmov(d1, m); |
10394 | 10394 |
10395 __ Fadd(d2, d0, d1); | 10395 __ Fadd(d2, d0, d1); |
10396 __ Fsub(d3, d0, d1); | 10396 __ Fsub(d3, d0, d1); |
10397 __ Fmul(d4, d0, d1); | 10397 __ Fmul(d4, d0, d1); |
10398 __ Fdiv(d5, d0, d1); | 10398 __ Fdiv(d5, d0, d1); |
10399 __ Fmax(d6, d0, d1); | 10399 __ Fmax(d6, d0, d1); |
10400 __ Fmin(d7, d0, d1); | 10400 __ Fmin(d7, d0, d1); |
10401 | 10401 |
10402 END(); | 10402 END(); |
10403 RUN(); | 10403 RUN(); |
10404 | 10404 |
10405 ASSERT_EQUAL_FP64(expected, d2); | 10405 CHECK_EQUAL_FP64(expected, d2); |
10406 ASSERT_EQUAL_FP64(expected, d3); | 10406 CHECK_EQUAL_FP64(expected, d3); |
10407 ASSERT_EQUAL_FP64(expected, d4); | 10407 CHECK_EQUAL_FP64(expected, d4); |
10408 ASSERT_EQUAL_FP64(expected, d5); | 10408 CHECK_EQUAL_FP64(expected, d5); |
10409 ASSERT_EQUAL_FP64(expected, d6); | 10409 CHECK_EQUAL_FP64(expected, d6); |
10410 ASSERT_EQUAL_FP64(expected, d7); | 10410 CHECK_EQUAL_FP64(expected, d7); |
10411 | 10411 |
10412 TEARDOWN(); | 10412 TEARDOWN(); |
10413 } | 10413 } |
10414 | 10414 |
10415 | 10415 |
10416 TEST(process_nans_double) { | 10416 TEST(process_nans_double) { |
10417 INIT_V8(); | 10417 INIT_V8(); |
10418 // Make sure that NaN propagation works correctly. | 10418 // Make sure that NaN propagation works correctly. |
10419 double sn = rawbits_to_double(0x7ff5555511111111); | 10419 double sn = rawbits_to_double(0x7ff5555511111111); |
10420 double sm = rawbits_to_double(0x7ff5555522222222); | 10420 double sm = rawbits_to_double(0x7ff5555522222222); |
10421 double qn = rawbits_to_double(0x7ffaaaaa11111111); | 10421 double qn = rawbits_to_double(0x7ffaaaaa11111111); |
10422 double qm = rawbits_to_double(0x7ffaaaaa22222222); | 10422 double qm = rawbits_to_double(0x7ffaaaaa22222222); |
10423 ASSERT(IsSignallingNaN(sn)); | 10423 DCHECK(IsSignallingNaN(sn)); |
10424 ASSERT(IsSignallingNaN(sm)); | 10424 DCHECK(IsSignallingNaN(sm)); |
10425 ASSERT(IsQuietNaN(qn)); | 10425 DCHECK(IsQuietNaN(qn)); |
10426 ASSERT(IsQuietNaN(qm)); | 10426 DCHECK(IsQuietNaN(qm)); |
10427 | 10427 |
10428 // The input NaNs after passing through ProcessNaN. | 10428 // The input NaNs after passing through ProcessNaN. |
10429 double sn_proc = rawbits_to_double(0x7ffd555511111111); | 10429 double sn_proc = rawbits_to_double(0x7ffd555511111111); |
10430 double sm_proc = rawbits_to_double(0x7ffd555522222222); | 10430 double sm_proc = rawbits_to_double(0x7ffd555522222222); |
10431 double qn_proc = qn; | 10431 double qn_proc = qn; |
10432 double qm_proc = qm; | 10432 double qm_proc = qm; |
10433 ASSERT(IsQuietNaN(sn_proc)); | 10433 DCHECK(IsQuietNaN(sn_proc)); |
10434 ASSERT(IsQuietNaN(sm_proc)); | 10434 DCHECK(IsQuietNaN(sm_proc)); |
10435 ASSERT(IsQuietNaN(qn_proc)); | 10435 DCHECK(IsQuietNaN(qn_proc)); |
10436 ASSERT(IsQuietNaN(qm_proc)); | 10436 DCHECK(IsQuietNaN(qm_proc)); |
10437 | 10437 |
10438 // Quiet NaNs are propagated. | 10438 // Quiet NaNs are propagated. |
10439 ProcessNaNsHelper(qn, 0, qn_proc); | 10439 ProcessNaNsHelper(qn, 0, qn_proc); |
10440 ProcessNaNsHelper(0, qm, qm_proc); | 10440 ProcessNaNsHelper(0, qm, qm_proc); |
10441 ProcessNaNsHelper(qn, qm, qn_proc); | 10441 ProcessNaNsHelper(qn, qm, qn_proc); |
10442 | 10442 |
10443 // Signalling NaNs are propagated, and made quiet. | 10443 // Signalling NaNs are propagated, and made quiet. |
10444 ProcessNaNsHelper(sn, 0, sn_proc); | 10444 ProcessNaNsHelper(sn, 0, sn_proc); |
10445 ProcessNaNsHelper(0, sm, sm_proc); | 10445 ProcessNaNsHelper(0, sm, sm_proc); |
10446 ProcessNaNsHelper(sn, sm, sn_proc); | 10446 ProcessNaNsHelper(sn, sm, sn_proc); |
10447 | 10447 |
10448 // Signalling NaNs take precedence over quiet NaNs. | 10448 // Signalling NaNs take precedence over quiet NaNs. |
10449 ProcessNaNsHelper(sn, qm, sn_proc); | 10449 ProcessNaNsHelper(sn, qm, sn_proc); |
10450 ProcessNaNsHelper(qn, sm, sm_proc); | 10450 ProcessNaNsHelper(qn, sm, sm_proc); |
10451 ProcessNaNsHelper(sn, sm, sn_proc); | 10451 ProcessNaNsHelper(sn, sm, sn_proc); |
10452 } | 10452 } |
10453 | 10453 |
10454 | 10454 |
10455 static void ProcessNaNsHelper(float n, float m, float expected) { | 10455 static void ProcessNaNsHelper(float n, float m, float expected) { |
10456 ASSERT(std::isnan(n) || std::isnan(m)); | 10456 DCHECK(std::isnan(n) || std::isnan(m)); |
10457 ASSERT(std::isnan(expected)); | 10457 DCHECK(std::isnan(expected)); |
10458 | 10458 |
10459 SETUP(); | 10459 SETUP(); |
10460 START(); | 10460 START(); |
10461 | 10461 |
10462 // Execute a number of instructions which all use ProcessNaNs, and check that | 10462 // Execute a number of instructions which all use ProcessNaNs, and check that |
10463 // they all propagate NaNs correctly. | 10463 // they all propagate NaNs correctly. |
10464 __ Fmov(s0, n); | 10464 __ Fmov(s0, n); |
10465 __ Fmov(s1, m); | 10465 __ Fmov(s1, m); |
10466 | 10466 |
10467 __ Fadd(s2, s0, s1); | 10467 __ Fadd(s2, s0, s1); |
10468 __ Fsub(s3, s0, s1); | 10468 __ Fsub(s3, s0, s1); |
10469 __ Fmul(s4, s0, s1); | 10469 __ Fmul(s4, s0, s1); |
10470 __ Fdiv(s5, s0, s1); | 10470 __ Fdiv(s5, s0, s1); |
10471 __ Fmax(s6, s0, s1); | 10471 __ Fmax(s6, s0, s1); |
10472 __ Fmin(s7, s0, s1); | 10472 __ Fmin(s7, s0, s1); |
10473 | 10473 |
10474 END(); | 10474 END(); |
10475 RUN(); | 10475 RUN(); |
10476 | 10476 |
10477 ASSERT_EQUAL_FP32(expected, s2); | 10477 CHECK_EQUAL_FP32(expected, s2); |
10478 ASSERT_EQUAL_FP32(expected, s3); | 10478 CHECK_EQUAL_FP32(expected, s3); |
10479 ASSERT_EQUAL_FP32(expected, s4); | 10479 CHECK_EQUAL_FP32(expected, s4); |
10480 ASSERT_EQUAL_FP32(expected, s5); | 10480 CHECK_EQUAL_FP32(expected, s5); |
10481 ASSERT_EQUAL_FP32(expected, s6); | 10481 CHECK_EQUAL_FP32(expected, s6); |
10482 ASSERT_EQUAL_FP32(expected, s7); | 10482 CHECK_EQUAL_FP32(expected, s7); |
10483 | 10483 |
10484 TEARDOWN(); | 10484 TEARDOWN(); |
10485 } | 10485 } |
10486 | 10486 |
10487 | 10487 |
10488 TEST(process_nans_float) { | 10488 TEST(process_nans_float) { |
10489 INIT_V8(); | 10489 INIT_V8(); |
10490 // Make sure that NaN propagation works correctly. | 10490 // Make sure that NaN propagation works correctly. |
10491 float sn = rawbits_to_float(0x7f951111); | 10491 float sn = rawbits_to_float(0x7f951111); |
10492 float sm = rawbits_to_float(0x7f952222); | 10492 float sm = rawbits_to_float(0x7f952222); |
10493 float qn = rawbits_to_float(0x7fea1111); | 10493 float qn = rawbits_to_float(0x7fea1111); |
10494 float qm = rawbits_to_float(0x7fea2222); | 10494 float qm = rawbits_to_float(0x7fea2222); |
10495 ASSERT(IsSignallingNaN(sn)); | 10495 DCHECK(IsSignallingNaN(sn)); |
10496 ASSERT(IsSignallingNaN(sm)); | 10496 DCHECK(IsSignallingNaN(sm)); |
10497 ASSERT(IsQuietNaN(qn)); | 10497 DCHECK(IsQuietNaN(qn)); |
10498 ASSERT(IsQuietNaN(qm)); | 10498 DCHECK(IsQuietNaN(qm)); |
10499 | 10499 |
10500 // The input NaNs after passing through ProcessNaN. | 10500 // The input NaNs after passing through ProcessNaN. |
10501 float sn_proc = rawbits_to_float(0x7fd51111); | 10501 float sn_proc = rawbits_to_float(0x7fd51111); |
10502 float sm_proc = rawbits_to_float(0x7fd52222); | 10502 float sm_proc = rawbits_to_float(0x7fd52222); |
10503 float qn_proc = qn; | 10503 float qn_proc = qn; |
10504 float qm_proc = qm; | 10504 float qm_proc = qm; |
10505 ASSERT(IsQuietNaN(sn_proc)); | 10505 DCHECK(IsQuietNaN(sn_proc)); |
10506 ASSERT(IsQuietNaN(sm_proc)); | 10506 DCHECK(IsQuietNaN(sm_proc)); |
10507 ASSERT(IsQuietNaN(qn_proc)); | 10507 DCHECK(IsQuietNaN(qn_proc)); |
10508 ASSERT(IsQuietNaN(qm_proc)); | 10508 DCHECK(IsQuietNaN(qm_proc)); |
10509 | 10509 |
10510 // Quiet NaNs are propagated. | 10510 // Quiet NaNs are propagated. |
10511 ProcessNaNsHelper(qn, 0, qn_proc); | 10511 ProcessNaNsHelper(qn, 0, qn_proc); |
10512 ProcessNaNsHelper(0, qm, qm_proc); | 10512 ProcessNaNsHelper(0, qm, qm_proc); |
10513 ProcessNaNsHelper(qn, qm, qn_proc); | 10513 ProcessNaNsHelper(qn, qm, qn_proc); |
10514 | 10514 |
10515 // Signalling NaNs are propagated, and made quiet. | 10515 // Signalling NaNs are propagated, and made quiet. |
10516 ProcessNaNsHelper(sn, 0, sn_proc); | 10516 ProcessNaNsHelper(sn, 0, sn_proc); |
10517 ProcessNaNsHelper(0, sm, sm_proc); | 10517 ProcessNaNsHelper(0, sm, sm_proc); |
10518 ProcessNaNsHelper(sn, sm, sn_proc); | 10518 ProcessNaNsHelper(sn, sm, sn_proc); |
10519 | 10519 |
10520 // Signalling NaNs take precedence over quiet NaNs. | 10520 // Signalling NaNs take precedence over quiet NaNs. |
10521 ProcessNaNsHelper(sn, qm, sn_proc); | 10521 ProcessNaNsHelper(sn, qm, sn_proc); |
10522 ProcessNaNsHelper(qn, sm, sm_proc); | 10522 ProcessNaNsHelper(qn, sm, sm_proc); |
10523 ProcessNaNsHelper(sn, sm, sn_proc); | 10523 ProcessNaNsHelper(sn, sm, sn_proc); |
10524 } | 10524 } |
10525 | 10525 |
10526 | 10526 |
10527 static void DefaultNaNHelper(float n, float m, float a) { | 10527 static void DefaultNaNHelper(float n, float m, float a) { |
10528 ASSERT(std::isnan(n) || std::isnan(m) || std::isnan(a)); | 10528 DCHECK(std::isnan(n) || std::isnan(m) || std::isnan(a)); |
10529 | 10529 |
10530 bool test_1op = std::isnan(n); | 10530 bool test_1op = std::isnan(n); |
10531 bool test_2op = std::isnan(n) || std::isnan(m); | 10531 bool test_2op = std::isnan(n) || std::isnan(m); |
10532 | 10532 |
10533 SETUP(); | 10533 SETUP(); |
10534 START(); | 10534 START(); |
10535 | 10535 |
10536 // Enable Default-NaN mode in the FPCR. | 10536 // Enable Default-NaN mode in the FPCR. |
10537 __ Mrs(x0, FPCR); | 10537 __ Mrs(x0, FPCR); |
10538 __ Orr(x1, x0, DN_mask); | 10538 __ Orr(x1, x0, DN_mask); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10575 __ Fnmsub(s27, s0, s1, s2); | 10575 __ Fnmsub(s27, s0, s1, s2); |
10576 | 10576 |
10577 // Restore FPCR. | 10577 // Restore FPCR. |
10578 __ Msr(FPCR, x0); | 10578 __ Msr(FPCR, x0); |
10579 | 10579 |
10580 END(); | 10580 END(); |
10581 RUN(); | 10581 RUN(); |
10582 | 10582 |
10583 if (test_1op) { | 10583 if (test_1op) { |
10584 uint32_t n_raw = float_to_rawbits(n); | 10584 uint32_t n_raw = float_to_rawbits(n); |
10585 ASSERT_EQUAL_FP32(n, s10); | 10585 CHECK_EQUAL_FP32(n, s10); |
10586 ASSERT_EQUAL_FP32(rawbits_to_float(n_raw & ~kSSignMask), s11); | 10586 CHECK_EQUAL_FP32(rawbits_to_float(n_raw & ~kSSignMask), s11); |
10587 ASSERT_EQUAL_FP32(rawbits_to_float(n_raw ^ kSSignMask), s12); | 10587 CHECK_EQUAL_FP32(rawbits_to_float(n_raw ^ kSSignMask), s12); |
10588 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s13); | 10588 CHECK_EQUAL_FP32(kFP32DefaultNaN, s13); |
10589 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s14); | 10589 CHECK_EQUAL_FP32(kFP32DefaultNaN, s14); |
10590 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s15); | 10590 CHECK_EQUAL_FP32(kFP32DefaultNaN, s15); |
10591 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s16); | 10591 CHECK_EQUAL_FP32(kFP32DefaultNaN, s16); |
10592 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d17); | 10592 CHECK_EQUAL_FP64(kFP64DefaultNaN, d17); |
10593 } | 10593 } |
10594 | 10594 |
10595 if (test_2op) { | 10595 if (test_2op) { |
10596 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s18); | 10596 CHECK_EQUAL_FP32(kFP32DefaultNaN, s18); |
10597 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s19); | 10597 CHECK_EQUAL_FP32(kFP32DefaultNaN, s19); |
10598 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s20); | 10598 CHECK_EQUAL_FP32(kFP32DefaultNaN, s20); |
10599 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s21); | 10599 CHECK_EQUAL_FP32(kFP32DefaultNaN, s21); |
10600 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s22); | 10600 CHECK_EQUAL_FP32(kFP32DefaultNaN, s22); |
10601 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s23); | 10601 CHECK_EQUAL_FP32(kFP32DefaultNaN, s23); |
10602 } | 10602 } |
10603 | 10603 |
10604 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s24); | 10604 CHECK_EQUAL_FP32(kFP32DefaultNaN, s24); |
10605 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s25); | 10605 CHECK_EQUAL_FP32(kFP32DefaultNaN, s25); |
10606 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s26); | 10606 CHECK_EQUAL_FP32(kFP32DefaultNaN, s26); |
10607 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s27); | 10607 CHECK_EQUAL_FP32(kFP32DefaultNaN, s27); |
10608 | 10608 |
10609 TEARDOWN(); | 10609 TEARDOWN(); |
10610 } | 10610 } |
10611 | 10611 |
10612 | 10612 |
10613 TEST(default_nan_float) { | 10613 TEST(default_nan_float) { |
10614 INIT_V8(); | 10614 INIT_V8(); |
10615 float sn = rawbits_to_float(0x7f951111); | 10615 float sn = rawbits_to_float(0x7f951111); |
10616 float sm = rawbits_to_float(0x7f952222); | 10616 float sm = rawbits_to_float(0x7f952222); |
10617 float sa = rawbits_to_float(0x7f95aaaa); | 10617 float sa = rawbits_to_float(0x7f95aaaa); |
10618 float qn = rawbits_to_float(0x7fea1111); | 10618 float qn = rawbits_to_float(0x7fea1111); |
10619 float qm = rawbits_to_float(0x7fea2222); | 10619 float qm = rawbits_to_float(0x7fea2222); |
10620 float qa = rawbits_to_float(0x7feaaaaa); | 10620 float qa = rawbits_to_float(0x7feaaaaa); |
10621 ASSERT(IsSignallingNaN(sn)); | 10621 DCHECK(IsSignallingNaN(sn)); |
10622 ASSERT(IsSignallingNaN(sm)); | 10622 DCHECK(IsSignallingNaN(sm)); |
10623 ASSERT(IsSignallingNaN(sa)); | 10623 DCHECK(IsSignallingNaN(sa)); |
10624 ASSERT(IsQuietNaN(qn)); | 10624 DCHECK(IsQuietNaN(qn)); |
10625 ASSERT(IsQuietNaN(qm)); | 10625 DCHECK(IsQuietNaN(qm)); |
10626 ASSERT(IsQuietNaN(qa)); | 10626 DCHECK(IsQuietNaN(qa)); |
10627 | 10627 |
10628 // - Signalling NaNs | 10628 // - Signalling NaNs |
10629 DefaultNaNHelper(sn, 0.0f, 0.0f); | 10629 DefaultNaNHelper(sn, 0.0f, 0.0f); |
10630 DefaultNaNHelper(0.0f, sm, 0.0f); | 10630 DefaultNaNHelper(0.0f, sm, 0.0f); |
10631 DefaultNaNHelper(0.0f, 0.0f, sa); | 10631 DefaultNaNHelper(0.0f, 0.0f, sa); |
10632 DefaultNaNHelper(sn, sm, 0.0f); | 10632 DefaultNaNHelper(sn, sm, 0.0f); |
10633 DefaultNaNHelper(0.0f, sm, sa); | 10633 DefaultNaNHelper(0.0f, sm, sa); |
10634 DefaultNaNHelper(sn, 0.0f, sa); | 10634 DefaultNaNHelper(sn, 0.0f, sa); |
10635 DefaultNaNHelper(sn, sm, sa); | 10635 DefaultNaNHelper(sn, sm, sa); |
10636 // - Quiet NaNs | 10636 // - Quiet NaNs |
10637 DefaultNaNHelper(qn, 0.0f, 0.0f); | 10637 DefaultNaNHelper(qn, 0.0f, 0.0f); |
10638 DefaultNaNHelper(0.0f, qm, 0.0f); | 10638 DefaultNaNHelper(0.0f, qm, 0.0f); |
10639 DefaultNaNHelper(0.0f, 0.0f, qa); | 10639 DefaultNaNHelper(0.0f, 0.0f, qa); |
10640 DefaultNaNHelper(qn, qm, 0.0f); | 10640 DefaultNaNHelper(qn, qm, 0.0f); |
10641 DefaultNaNHelper(0.0f, qm, qa); | 10641 DefaultNaNHelper(0.0f, qm, qa); |
10642 DefaultNaNHelper(qn, 0.0f, qa); | 10642 DefaultNaNHelper(qn, 0.0f, qa); |
10643 DefaultNaNHelper(qn, qm, qa); | 10643 DefaultNaNHelper(qn, qm, qa); |
10644 // - Mixed NaNs | 10644 // - Mixed NaNs |
10645 DefaultNaNHelper(qn, sm, sa); | 10645 DefaultNaNHelper(qn, sm, sa); |
10646 DefaultNaNHelper(sn, qm, sa); | 10646 DefaultNaNHelper(sn, qm, sa); |
10647 DefaultNaNHelper(sn, sm, qa); | 10647 DefaultNaNHelper(sn, sm, qa); |
10648 DefaultNaNHelper(qn, qm, sa); | 10648 DefaultNaNHelper(qn, qm, sa); |
10649 DefaultNaNHelper(sn, qm, qa); | 10649 DefaultNaNHelper(sn, qm, qa); |
10650 DefaultNaNHelper(qn, sm, qa); | 10650 DefaultNaNHelper(qn, sm, qa); |
10651 DefaultNaNHelper(qn, qm, qa); | 10651 DefaultNaNHelper(qn, qm, qa); |
10652 } | 10652 } |
10653 | 10653 |
10654 | 10654 |
10655 static void DefaultNaNHelper(double n, double m, double a) { | 10655 static void DefaultNaNHelper(double n, double m, double a) { |
10656 ASSERT(std::isnan(n) || std::isnan(m) || std::isnan(a)); | 10656 DCHECK(std::isnan(n) || std::isnan(m) || std::isnan(a)); |
10657 | 10657 |
10658 bool test_1op = std::isnan(n); | 10658 bool test_1op = std::isnan(n); |
10659 bool test_2op = std::isnan(n) || std::isnan(m); | 10659 bool test_2op = std::isnan(n) || std::isnan(m); |
10660 | 10660 |
10661 SETUP(); | 10661 SETUP(); |
10662 START(); | 10662 START(); |
10663 | 10663 |
10664 // Enable Default-NaN mode in the FPCR. | 10664 // Enable Default-NaN mode in the FPCR. |
10665 __ Mrs(x0, FPCR); | 10665 __ Mrs(x0, FPCR); |
10666 __ Orr(x1, x0, DN_mask); | 10666 __ Orr(x1, x0, DN_mask); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10703 __ Fnmsub(d27, d0, d1, d2); | 10703 __ Fnmsub(d27, d0, d1, d2); |
10704 | 10704 |
10705 // Restore FPCR. | 10705 // Restore FPCR. |
10706 __ Msr(FPCR, x0); | 10706 __ Msr(FPCR, x0); |
10707 | 10707 |
10708 END(); | 10708 END(); |
10709 RUN(); | 10709 RUN(); |
10710 | 10710 |
10711 if (test_1op) { | 10711 if (test_1op) { |
10712 uint64_t n_raw = double_to_rawbits(n); | 10712 uint64_t n_raw = double_to_rawbits(n); |
10713 ASSERT_EQUAL_FP64(n, d10); | 10713 CHECK_EQUAL_FP64(n, d10); |
10714 ASSERT_EQUAL_FP64(rawbits_to_double(n_raw & ~kDSignMask), d11); | 10714 CHECK_EQUAL_FP64(rawbits_to_double(n_raw & ~kDSignMask), d11); |
10715 ASSERT_EQUAL_FP64(rawbits_to_double(n_raw ^ kDSignMask), d12); | 10715 CHECK_EQUAL_FP64(rawbits_to_double(n_raw ^ kDSignMask), d12); |
10716 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d13); | 10716 CHECK_EQUAL_FP64(kFP64DefaultNaN, d13); |
10717 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d14); | 10717 CHECK_EQUAL_FP64(kFP64DefaultNaN, d14); |
10718 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d15); | 10718 CHECK_EQUAL_FP64(kFP64DefaultNaN, d15); |
10719 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d16); | 10719 CHECK_EQUAL_FP64(kFP64DefaultNaN, d16); |
10720 ASSERT_EQUAL_FP32(kFP32DefaultNaN, s17); | 10720 CHECK_EQUAL_FP32(kFP32DefaultNaN, s17); |
10721 } | 10721 } |
10722 | 10722 |
10723 if (test_2op) { | 10723 if (test_2op) { |
10724 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d18); | 10724 CHECK_EQUAL_FP64(kFP64DefaultNaN, d18); |
10725 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d19); | 10725 CHECK_EQUAL_FP64(kFP64DefaultNaN, d19); |
10726 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d20); | 10726 CHECK_EQUAL_FP64(kFP64DefaultNaN, d20); |
10727 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d21); | 10727 CHECK_EQUAL_FP64(kFP64DefaultNaN, d21); |
10728 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d22); | 10728 CHECK_EQUAL_FP64(kFP64DefaultNaN, d22); |
10729 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d23); | 10729 CHECK_EQUAL_FP64(kFP64DefaultNaN, d23); |
10730 } | 10730 } |
10731 | 10731 |
10732 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d24); | 10732 CHECK_EQUAL_FP64(kFP64DefaultNaN, d24); |
10733 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d25); | 10733 CHECK_EQUAL_FP64(kFP64DefaultNaN, d25); |
10734 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d26); | 10734 CHECK_EQUAL_FP64(kFP64DefaultNaN, d26); |
10735 ASSERT_EQUAL_FP64(kFP64DefaultNaN, d27); | 10735 CHECK_EQUAL_FP64(kFP64DefaultNaN, d27); |
10736 | 10736 |
10737 TEARDOWN(); | 10737 TEARDOWN(); |
10738 } | 10738 } |
10739 | 10739 |
10740 | 10740 |
10741 TEST(default_nan_double) { | 10741 TEST(default_nan_double) { |
10742 INIT_V8(); | 10742 INIT_V8(); |
10743 double sn = rawbits_to_double(0x7ff5555511111111); | 10743 double sn = rawbits_to_double(0x7ff5555511111111); |
10744 double sm = rawbits_to_double(0x7ff5555522222222); | 10744 double sm = rawbits_to_double(0x7ff5555522222222); |
10745 double sa = rawbits_to_double(0x7ff55555aaaaaaaa); | 10745 double sa = rawbits_to_double(0x7ff55555aaaaaaaa); |
10746 double qn = rawbits_to_double(0x7ffaaaaa11111111); | 10746 double qn = rawbits_to_double(0x7ffaaaaa11111111); |
10747 double qm = rawbits_to_double(0x7ffaaaaa22222222); | 10747 double qm = rawbits_to_double(0x7ffaaaaa22222222); |
10748 double qa = rawbits_to_double(0x7ffaaaaaaaaaaaaa); | 10748 double qa = rawbits_to_double(0x7ffaaaaaaaaaaaaa); |
10749 ASSERT(IsSignallingNaN(sn)); | 10749 DCHECK(IsSignallingNaN(sn)); |
10750 ASSERT(IsSignallingNaN(sm)); | 10750 DCHECK(IsSignallingNaN(sm)); |
10751 ASSERT(IsSignallingNaN(sa)); | 10751 DCHECK(IsSignallingNaN(sa)); |
10752 ASSERT(IsQuietNaN(qn)); | 10752 DCHECK(IsQuietNaN(qn)); |
10753 ASSERT(IsQuietNaN(qm)); | 10753 DCHECK(IsQuietNaN(qm)); |
10754 ASSERT(IsQuietNaN(qa)); | 10754 DCHECK(IsQuietNaN(qa)); |
10755 | 10755 |
10756 // - Signalling NaNs | 10756 // - Signalling NaNs |
10757 DefaultNaNHelper(sn, 0.0, 0.0); | 10757 DefaultNaNHelper(sn, 0.0, 0.0); |
10758 DefaultNaNHelper(0.0, sm, 0.0); | 10758 DefaultNaNHelper(0.0, sm, 0.0); |
10759 DefaultNaNHelper(0.0, 0.0, sa); | 10759 DefaultNaNHelper(0.0, 0.0, sa); |
10760 DefaultNaNHelper(sn, sm, 0.0); | 10760 DefaultNaNHelper(sn, sm, 0.0); |
10761 DefaultNaNHelper(0.0, sm, sa); | 10761 DefaultNaNHelper(0.0, sm, sa); |
10762 DefaultNaNHelper(sn, 0.0, sa); | 10762 DefaultNaNHelper(sn, 0.0, sa); |
10763 DefaultNaNHelper(sn, sm, sa); | 10763 DefaultNaNHelper(sn, sm, sa); |
10764 // - Quiet NaNs | 10764 // - Quiet NaNs |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10805 Assembler::BlockConstPoolScope scope(&masm); | 10805 Assembler::BlockConstPoolScope scope(&masm); |
10806 call_start = buf + __ pc_offset(); | 10806 call_start = buf + __ pc_offset(); |
10807 __ Call(buf + function.pos(), RelocInfo::NONE64); | 10807 __ Call(buf + function.pos(), RelocInfo::NONE64); |
10808 return_address = buf + __ pc_offset(); | 10808 return_address = buf + __ pc_offset(); |
10809 } | 10809 } |
10810 __ Pop(xzr, lr); | 10810 __ Pop(xzr, lr); |
10811 END(); | 10811 END(); |
10812 | 10812 |
10813 RUN(); | 10813 RUN(); |
10814 | 10814 |
10815 ASSERT_EQUAL_64(1, x0); | 10815 CHECK_EQUAL_64(1, x0); |
10816 | 10816 |
10817 // The return_address_from_call_start function doesn't currently encounter any | 10817 // The return_address_from_call_start function doesn't currently encounter any |
10818 // non-relocatable sequences, so we check it here to make sure it works. | 10818 // non-relocatable sequences, so we check it here to make sure it works. |
10819 // TODO(jbramley): Once Crankshaft is complete, decide if we need to support | 10819 // TODO(jbramley): Once Crankshaft is complete, decide if we need to support |
10820 // non-relocatable calls at all. | 10820 // non-relocatable calls at all. |
10821 CHECK(return_address == | 10821 CHECK(return_address == |
10822 Assembler::return_address_from_call_start(call_start)); | 10822 Assembler::return_address_from_call_start(call_start)); |
10823 | 10823 |
10824 TEARDOWN(); | 10824 TEARDOWN(); |
10825 } | 10825 } |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10862 } | 10862 } |
10863 | 10863 |
10864 __ Bind(&fail); | 10864 __ Bind(&fail); |
10865 __ Mov(x0, -1); | 10865 __ Mov(x0, -1); |
10866 | 10866 |
10867 __ Bind(&done); | 10867 __ Bind(&done); |
10868 | 10868 |
10869 END(); | 10869 END(); |
10870 RUN(); | 10870 RUN(); |
10871 | 10871 |
10872 ASSERT_EQUAL_64(0, x0); | 10872 CHECK_EQUAL_64(0, x0); |
10873 ASSERT_EQUAL_64(value, x1); | 10873 CHECK_EQUAL_64(value, x1); |
10874 ASSERT_EQUAL_64(expected, x10); | 10874 CHECK_EQUAL_64(expected, x10); |
10875 ASSERT_EQUAL_64(expected, x11); | 10875 CHECK_EQUAL_64(expected, x11); |
10876 ASSERT_EQUAL_64(expected, x12); | 10876 CHECK_EQUAL_64(expected, x12); |
10877 ASSERT_EQUAL_64(expected, x13); | 10877 CHECK_EQUAL_64(expected, x13); |
10878 | 10878 |
10879 TEARDOWN(); | 10879 TEARDOWN(); |
10880 } | 10880 } |
10881 | 10881 |
10882 | 10882 |
10883 static void AbsHelperW(int32_t value) { | 10883 static void AbsHelperW(int32_t value) { |
10884 int32_t expected; | 10884 int32_t expected; |
10885 | 10885 |
10886 SETUP(); | 10886 SETUP(); |
10887 START(); | 10887 START(); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10919 } | 10919 } |
10920 | 10920 |
10921 __ Bind(&fail); | 10921 __ Bind(&fail); |
10922 __ Mov(w0, -1); | 10922 __ Mov(w0, -1); |
10923 | 10923 |
10924 __ Bind(&done); | 10924 __ Bind(&done); |
10925 | 10925 |
10926 END(); | 10926 END(); |
10927 RUN(); | 10927 RUN(); |
10928 | 10928 |
10929 ASSERT_EQUAL_32(0, w0); | 10929 CHECK_EQUAL_32(0, w0); |
10930 ASSERT_EQUAL_32(value, w1); | 10930 CHECK_EQUAL_32(value, w1); |
10931 ASSERT_EQUAL_32(expected, w10); | 10931 CHECK_EQUAL_32(expected, w10); |
10932 ASSERT_EQUAL_32(expected, w11); | 10932 CHECK_EQUAL_32(expected, w11); |
10933 ASSERT_EQUAL_32(expected, w12); | 10933 CHECK_EQUAL_32(expected, w12); |
10934 ASSERT_EQUAL_32(expected, w13); | 10934 CHECK_EQUAL_32(expected, w13); |
10935 | 10935 |
10936 TEARDOWN(); | 10936 TEARDOWN(); |
10937 } | 10937 } |
10938 | 10938 |
10939 | 10939 |
10940 TEST(abs) { | 10940 TEST(abs) { |
10941 INIT_V8(); | 10941 INIT_V8(); |
10942 AbsHelperX(0); | 10942 AbsHelperX(0); |
10943 AbsHelperX(42); | 10943 AbsHelperX(42); |
10944 AbsHelperX(-42); | 10944 AbsHelperX(-42); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10982 CodeDesc desc; | 10982 CodeDesc desc; |
10983 masm.GetCode(&desc); | 10983 masm.GetCode(&desc); |
10984 Handle<Code> code = isolate->factory()->NewCode(desc, 0, masm.CodeObject()); | 10984 Handle<Code> code = isolate->factory()->NewCode(desc, 0, masm.CodeObject()); |
10985 | 10985 |
10986 unsigned pool_count = 0; | 10986 unsigned pool_count = 0; |
10987 int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | | 10987 int pool_mask = RelocInfo::ModeMask(RelocInfo::CONST_POOL) | |
10988 RelocInfo::ModeMask(RelocInfo::VENEER_POOL); | 10988 RelocInfo::ModeMask(RelocInfo::VENEER_POOL); |
10989 for (RelocIterator it(*code, pool_mask); !it.done(); it.next()) { | 10989 for (RelocIterator it(*code, pool_mask); !it.done(); it.next()) { |
10990 RelocInfo* info = it.rinfo(); | 10990 RelocInfo* info = it.rinfo(); |
10991 if (RelocInfo::IsConstPool(info->rmode())) { | 10991 if (RelocInfo::IsConstPool(info->rmode())) { |
10992 ASSERT(info->data() == constant_pool_size); | 10992 DCHECK(info->data() == constant_pool_size); |
10993 ++pool_count; | 10993 ++pool_count; |
10994 } | 10994 } |
10995 if (RelocInfo::IsVeneerPool(info->rmode())) { | 10995 if (RelocInfo::IsVeneerPool(info->rmode())) { |
10996 ASSERT(info->data() == veneer_pool_size); | 10996 DCHECK(info->data() == veneer_pool_size); |
10997 ++pool_count; | 10997 ++pool_count; |
10998 } | 10998 } |
10999 } | 10999 } |
11000 | 11000 |
11001 ASSERT(pool_count == 2); | 11001 DCHECK(pool_count == 2); |
11002 | 11002 |
11003 TEARDOWN(); | 11003 TEARDOWN(); |
11004 } | 11004 } |
OLD | NEW |