| Index: test/cctest/test-assembler-mips.cc
|
| diff --git a/test/cctest/test-assembler-mips.cc b/test/cctest/test-assembler-mips.cc
|
| index 467bb614901dd5037cfab7124b505d93e2c78e1e..419ef35526b4299649a8c1ce902d6eb07409e052 100644
|
| --- a/test/cctest/test-assembler-mips.cc
|
| +++ b/test/cctest/test-assembler-mips.cc
|
| @@ -277,33 +277,30 @@ TEST(MIPS3) {
|
| MacroAssembler assm(isolate, NULL, 0);
|
| Label L, C;
|
|
|
| - if (CpuFeatures::IsSupported(FPU)) {
|
| - CpuFeatureScope scope(&assm, FPU);
|
| + __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| + __ ldc1(f6, MemOperand(a0, OFFSET_OF(T, b)) );
|
| + __ add_d(f8, f4, f6);
|
| + __ sdc1(f8, MemOperand(a0, OFFSET_OF(T, c)) ); // c = a + b.
|
|
|
| - __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| - __ ldc1(f6, MemOperand(a0, OFFSET_OF(T, b)) );
|
| - __ add_d(f8, f4, f6);
|
| - __ sdc1(f8, MemOperand(a0, OFFSET_OF(T, c)) ); // c = a + b.
|
| + __ mov_d(f10, f8); // c
|
| + __ neg_d(f12, f6); // -b
|
| + __ sub_d(f10, f10, f12);
|
| + __ sdc1(f10, MemOperand(a0, OFFSET_OF(T, d)) ); // d = c - (-b).
|
|
|
| - __ mov_d(f10, f8); // c
|
| - __ neg_d(f12, f6); // -b
|
| - __ sub_d(f10, f10, f12);
|
| - __ sdc1(f10, MemOperand(a0, OFFSET_OF(T, d)) ); // d = c - (-b).
|
| + __ sdc1(f4, MemOperand(a0, OFFSET_OF(T, b)) ); // b = a.
|
|
|
| - __ sdc1(f4, MemOperand(a0, OFFSET_OF(T, b)) ); // b = a.
|
| + __ li(t0, 120);
|
| + __ mtc1(t0, f14);
|
| + __ cvt_d_w(f14, f14); // f14 = 120.0.
|
| + __ mul_d(f10, f10, f14);
|
| + __ sdc1(f10, MemOperand(a0, OFFSET_OF(T, e)) ); // e = d * 120 = 1.8066e16.
|
|
|
| - __ li(t0, 120);
|
| - __ mtc1(t0, f14);
|
| - __ cvt_d_w(f14, f14); // f14 = 120.0.
|
| - __ mul_d(f10, f10, f14);
|
| - __ sdc1(f10, MemOperand(a0, OFFSET_OF(T, e)) ); // e = d * 120 = 1.8066e16.
|
| + __ div_d(f12, f10, f4);
|
| + __ sdc1(f12, MemOperand(a0, OFFSET_OF(T, f)) ); // f = e / a = 120.44.
|
|
|
| - __ div_d(f12, f10, f4);
|
| - __ sdc1(f12, MemOperand(a0, OFFSET_OF(T, f)) ); // f = e / a = 120.44.
|
| -
|
| - __ sqrt_d(f14, f12);
|
| - __ sdc1(f14, MemOperand(a0, OFFSET_OF(T, g)) );
|
| - // g = sqrt(f) = 10.97451593465515908537
|
| + __ sqrt_d(f14, f12);
|
| + __ sdc1(f14, MemOperand(a0, OFFSET_OF(T, g)) );
|
| + // g = sqrt(f) = 10.97451593465515908537
|
|
|
| if (kArchVariant == kMips32r2) {
|
| __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, h)) );
|
| @@ -312,36 +309,35 @@ TEST(MIPS3) {
|
| __ sdc1(f14, MemOperand(a0, OFFSET_OF(T, h)) );
|
| }
|
|
|
| - __ jr(ra);
|
| - __ nop();
|
| + __ jr(ra);
|
| + __ nop();
|
|
|
| - CodeDesc desc;
|
| - assm.GetCode(&desc);
|
| - Object* code = HEAP->CreateCode(
|
| - desc,
|
| - Code::ComputeFlags(Code::STUB),
|
| - Handle<Code>())->ToObjectChecked();
|
| - CHECK(code->IsCode());
|
| - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| - t.a = 1.5e14;
|
| - t.b = 2.75e11;
|
| - t.c = 0.0;
|
| - t.d = 0.0;
|
| - t.e = 0.0;
|
| - t.f = 0.0;
|
| - t.h = 1.5;
|
| - t.i = 2.75;
|
| - Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| - USE(dummy);
|
| - CHECK_EQ(1.5e14, t.a);
|
| - CHECK_EQ(1.5e14, t.b);
|
| - CHECK_EQ(1.50275e14, t.c);
|
| - CHECK_EQ(1.50550e14, t.d);
|
| - CHECK_EQ(1.8066e16, t.e);
|
| - CHECK_EQ(120.44, t.f);
|
| - CHECK_EQ(10.97451593465515908537, t.g);
|
| - CHECK_EQ(6.875, t.h);
|
| - }
|
| + CodeDesc desc;
|
| + assm.GetCode(&desc);
|
| + Object* code = HEAP->CreateCode(
|
| + desc,
|
| + Code::ComputeFlags(Code::STUB),
|
| + Handle<Code>())->ToObjectChecked();
|
| + CHECK(code->IsCode());
|
| + F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| + t.a = 1.5e14;
|
| + t.b = 2.75e11;
|
| + t.c = 0.0;
|
| + t.d = 0.0;
|
| + t.e = 0.0;
|
| + t.f = 0.0;
|
| + t.h = 1.5;
|
| + t.i = 2.75;
|
| + Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| + USE(dummy);
|
| + CHECK_EQ(1.5e14, t.a);
|
| + CHECK_EQ(1.5e14, t.b);
|
| + CHECK_EQ(1.50275e14, t.c);
|
| + CHECK_EQ(1.50550e14, t.d);
|
| + CHECK_EQ(1.8066e16, t.e);
|
| + CHECK_EQ(120.44, t.f);
|
| + CHECK_EQ(10.97451593465515908537, t.g);
|
| + CHECK_EQ(6.875, t.h);
|
| }
|
|
|
|
|
| @@ -361,48 +357,44 @@ TEST(MIPS4) {
|
| Assembler assm(isolate, NULL, 0);
|
| Label L, C;
|
|
|
| - if (CpuFeatures::IsSupported(FPU)) {
|
| - CpuFeatureScope scope(&assm, FPU);
|
| -
|
| - __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| - __ ldc1(f6, MemOperand(a0, OFFSET_OF(T, b)) );
|
| + __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| + __ ldc1(f6, MemOperand(a0, OFFSET_OF(T, b)) );
|
|
|
| - // Swap f4 and f6, by using four integer registers, t0-t3.
|
| - __ mfc1(t0, f4);
|
| - __ mfc1(t1, f5);
|
| - __ mfc1(t2, f6);
|
| - __ mfc1(t3, f7);
|
| + // Swap f4 and f6, by using four integer registers, t0-t3.
|
| + __ mfc1(t0, f4);
|
| + __ mfc1(t1, f5);
|
| + __ mfc1(t2, f6);
|
| + __ mfc1(t3, f7);
|
|
|
| - __ mtc1(t0, f6);
|
| - __ mtc1(t1, f7);
|
| - __ mtc1(t2, f4);
|
| - __ mtc1(t3, f5);
|
| + __ mtc1(t0, f6);
|
| + __ mtc1(t1, f7);
|
| + __ mtc1(t2, f4);
|
| + __ mtc1(t3, f5);
|
|
|
| - // Store the swapped f4 and f5 back to memory.
|
| - __ sdc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| - __ sdc1(f6, MemOperand(a0, OFFSET_OF(T, c)) );
|
| + // Store the swapped f4 and f5 back to memory.
|
| + __ sdc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| + __ sdc1(f6, MemOperand(a0, OFFSET_OF(T, c)) );
|
|
|
| - __ jr(ra);
|
| - __ nop();
|
| + __ jr(ra);
|
| + __ nop();
|
|
|
| - CodeDesc desc;
|
| - assm.GetCode(&desc);
|
| - Object* code = HEAP->CreateCode(
|
| - desc,
|
| - Code::ComputeFlags(Code::STUB),
|
| - Handle<Code>())->ToObjectChecked();
|
| - CHECK(code->IsCode());
|
| - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| - t.a = 1.5e22;
|
| - t.b = 2.75e11;
|
| - t.c = 17.17;
|
| - Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| - USE(dummy);
|
| + CodeDesc desc;
|
| + assm.GetCode(&desc);
|
| + Object* code = HEAP->CreateCode(
|
| + desc,
|
| + Code::ComputeFlags(Code::STUB),
|
| + Handle<Code>())->ToObjectChecked();
|
| + CHECK(code->IsCode());
|
| + F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| + t.a = 1.5e22;
|
| + t.b = 2.75e11;
|
| + t.c = 17.17;
|
| + Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| + USE(dummy);
|
|
|
| - CHECK_EQ(2.75e11, t.a);
|
| - CHECK_EQ(2.75e11, t.b);
|
| - CHECK_EQ(1.5e22, t.c);
|
| - }
|
| + CHECK_EQ(2.75e11, t.a);
|
| + CHECK_EQ(2.75e11, t.b);
|
| + CHECK_EQ(1.5e22, t.c);
|
| }
|
|
|
|
|
| @@ -423,58 +415,54 @@ TEST(MIPS5) {
|
| Assembler assm(isolate, NULL, 0);
|
| Label L, C;
|
|
|
| - if (CpuFeatures::IsSupported(FPU)) {
|
| - CpuFeatureScope scope(&assm, FPU);
|
| + // Load all structure elements to registers.
|
| + __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| + __ ldc1(f6, MemOperand(a0, OFFSET_OF(T, b)) );
|
| + __ lw(t0, MemOperand(a0, OFFSET_OF(T, i)) );
|
| + __ lw(t1, MemOperand(a0, OFFSET_OF(T, j)) );
|
|
|
| - // Load all structure elements to registers.
|
| - __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| - __ ldc1(f6, MemOperand(a0, OFFSET_OF(T, b)) );
|
| - __ lw(t0, MemOperand(a0, OFFSET_OF(T, i)) );
|
| - __ lw(t1, MemOperand(a0, OFFSET_OF(T, j)) );
|
| -
|
| - // Convert double in f4 to int in element i.
|
| - __ cvt_w_d(f8, f4);
|
| - __ mfc1(t2, f8);
|
| - __ sw(t2, MemOperand(a0, OFFSET_OF(T, i)) );
|
| -
|
| - // Convert double in f6 to int in element j.
|
| - __ cvt_w_d(f10, f6);
|
| - __ mfc1(t3, f10);
|
| - __ sw(t3, MemOperand(a0, OFFSET_OF(T, j)) );
|
| -
|
| - // Convert int in original i (t0) to double in a.
|
| - __ mtc1(t0, f12);
|
| - __ cvt_d_w(f0, f12);
|
| - __ sdc1(f0, MemOperand(a0, OFFSET_OF(T, a)) );
|
| -
|
| - // Convert int in original j (t1) to double in b.
|
| - __ mtc1(t1, f14);
|
| - __ cvt_d_w(f2, f14);
|
| - __ sdc1(f2, MemOperand(a0, OFFSET_OF(T, b)) );
|
| + // Convert double in f4 to int in element i.
|
| + __ cvt_w_d(f8, f4);
|
| + __ mfc1(t2, f8);
|
| + __ sw(t2, MemOperand(a0, OFFSET_OF(T, i)) );
|
|
|
| - __ jr(ra);
|
| - __ nop();
|
| + // Convert double in f6 to int in element j.
|
| + __ cvt_w_d(f10, f6);
|
| + __ mfc1(t3, f10);
|
| + __ sw(t3, MemOperand(a0, OFFSET_OF(T, j)) );
|
|
|
| - CodeDesc desc;
|
| - assm.GetCode(&desc);
|
| - Object* code = HEAP->CreateCode(
|
| - desc,
|
| - Code::ComputeFlags(Code::STUB),
|
| - Handle<Code>())->ToObjectChecked();
|
| - CHECK(code->IsCode());
|
| - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| - t.a = 1.5e4;
|
| - t.b = 2.75e8;
|
| - t.i = 12345678;
|
| - t.j = -100000;
|
| - Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| - USE(dummy);
|
| + // Convert int in original i (t0) to double in a.
|
| + __ mtc1(t0, f12);
|
| + __ cvt_d_w(f0, f12);
|
| + __ sdc1(f0, MemOperand(a0, OFFSET_OF(T, a)) );
|
|
|
| - CHECK_EQ(12345678.0, t.a);
|
| - CHECK_EQ(-100000.0, t.b);
|
| - CHECK_EQ(15000, t.i);
|
| - CHECK_EQ(275000000, t.j);
|
| - }
|
| + // Convert int in original j (t1) to double in b.
|
| + __ mtc1(t1, f14);
|
| + __ cvt_d_w(f2, f14);
|
| + __ sdc1(f2, MemOperand(a0, OFFSET_OF(T, b)) );
|
| +
|
| + __ jr(ra);
|
| + __ nop();
|
| +
|
| + CodeDesc desc;
|
| + assm.GetCode(&desc);
|
| + Object* code = HEAP->CreateCode(
|
| + desc,
|
| + Code::ComputeFlags(Code::STUB),
|
| + Handle<Code>())->ToObjectChecked();
|
| + CHECK(code->IsCode());
|
| + F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| + t.a = 1.5e4;
|
| + t.b = 2.75e8;
|
| + t.i = 12345678;
|
| + t.j = -100000;
|
| + Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| + USE(dummy);
|
| +
|
| + CHECK_EQ(12345678.0, t.a);
|
| + CHECK_EQ(-100000.0, t.b);
|
| + CHECK_EQ(15000, t.i);
|
| + CHECK_EQ(275000000, t.j);
|
| }
|
|
|
|
|
| @@ -573,63 +561,59 @@ TEST(MIPS7) {
|
| MacroAssembler assm(isolate, NULL, 0);
|
| Label neither_is_nan, less_than, outa_here;
|
|
|
| - if (CpuFeatures::IsSupported(FPU)) {
|
| - CpuFeatureScope scope(&assm, FPU);
|
| + __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| + __ ldc1(f6, MemOperand(a0, OFFSET_OF(T, b)) );
|
| + __ c(UN, D, f4, f6);
|
| + __ bc1f(&neither_is_nan);
|
| + __ nop();
|
| + __ sw(zero_reg, MemOperand(a0, OFFSET_OF(T, result)) );
|
| + __ Branch(&outa_here);
|
|
|
| - __ ldc1(f4, MemOperand(a0, OFFSET_OF(T, a)) );
|
| - __ ldc1(f6, MemOperand(a0, OFFSET_OF(T, b)) );
|
| - __ c(UN, D, f4, f6);
|
| - __ bc1f(&neither_is_nan);
|
| - __ nop();
|
| - __ sw(zero_reg, MemOperand(a0, OFFSET_OF(T, result)) );
|
| - __ Branch(&outa_here);
|
| -
|
| - __ bind(&neither_is_nan);
|
| -
|
| - if (kArchVariant == kLoongson) {
|
| - __ c(OLT, D, f6, f4);
|
| - __ bc1t(&less_than);
|
| - } else {
|
| - __ c(OLT, D, f6, f4, 2);
|
| - __ bc1t(&less_than, 2);
|
| - }
|
| - __ nop();
|
| - __ sw(zero_reg, MemOperand(a0, OFFSET_OF(T, result)) );
|
| - __ Branch(&outa_here);
|
| + __ bind(&neither_is_nan);
|
|
|
| - __ bind(&less_than);
|
| - __ Addu(t0, zero_reg, Operand(1));
|
| - __ sw(t0, MemOperand(a0, OFFSET_OF(T, result)) ); // Set true.
|
| + if (kArchVariant == kLoongson) {
|
| + __ c(OLT, D, f6, f4);
|
| + __ bc1t(&less_than);
|
| + } else {
|
| + __ c(OLT, D, f6, f4, 2);
|
| + __ bc1t(&less_than, 2);
|
| + }
|
| + __ nop();
|
| + __ sw(zero_reg, MemOperand(a0, OFFSET_OF(T, result)) );
|
| + __ Branch(&outa_here);
|
|
|
| + __ bind(&less_than);
|
| + __ Addu(t0, zero_reg, Operand(1));
|
| + __ sw(t0, MemOperand(a0, OFFSET_OF(T, result)) ); // Set true.
|
|
|
| - // This test-case should have additional tests.
|
|
|
| - __ bind(&outa_here);
|
| + // This test-case should have additional tests.
|
|
|
| - __ jr(ra);
|
| - __ nop();
|
| + __ bind(&outa_here);
|
|
|
| - CodeDesc desc;
|
| - assm.GetCode(&desc);
|
| - Object* code = HEAP->CreateCode(
|
| - desc,
|
| - Code::ComputeFlags(Code::STUB),
|
| - Handle<Code>())->ToObjectChecked();
|
| - CHECK(code->IsCode());
|
| - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| - t.a = 1.5e14;
|
| - t.b = 2.75e11;
|
| - t.c = 2.0;
|
| - t.d = -4.0;
|
| - t.e = 0.0;
|
| - t.f = 0.0;
|
| - t.result = 0;
|
| - Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| - USE(dummy);
|
| - CHECK_EQ(1.5e14, t.a);
|
| - CHECK_EQ(2.75e11, t.b);
|
| - CHECK_EQ(1, t.result);
|
| - }
|
| + __ jr(ra);
|
| + __ nop();
|
| +
|
| + CodeDesc desc;
|
| + assm.GetCode(&desc);
|
| + Object* code = HEAP->CreateCode(
|
| + desc,
|
| + Code::ComputeFlags(Code::STUB),
|
| + Handle<Code>())->ToObjectChecked();
|
| + CHECK(code->IsCode());
|
| + F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| + t.a = 1.5e14;
|
| + t.b = 2.75e11;
|
| + t.c = 2.0;
|
| + t.d = -4.0;
|
| + t.e = 0.0;
|
| + t.f = 0.0;
|
| + t.result = 0;
|
| + Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| + USE(dummy);
|
| + CHECK_EQ(1.5e14, t.a);
|
| + CHECK_EQ(2.75e11, t.b);
|
| + CHECK_EQ(1, t.result);
|
| }
|
|
|
|
|
| @@ -789,9 +773,7 @@ TEST(MIPS10) {
|
| Assembler assm(isolate, NULL, 0);
|
| Label L, C;
|
|
|
| - if (CpuFeatures::IsSupported(FPU) && kArchVariant == kMips32r2) {
|
| - CpuFeatureScope scope(&assm, FPU);
|
| -
|
| + if (kArchVariant == kMips32r2) {
|
| // Load all structure elements to registers.
|
| __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, a)));
|
|
|
| @@ -1097,48 +1079,44 @@ TEST(MIPS13) {
|
|
|
| MacroAssembler assm(isolate, NULL, 0);
|
|
|
| - if (CpuFeatures::IsSupported(FPU)) {
|
| - CpuFeatureScope scope(&assm, FPU);
|
| -
|
| - __ sw(t0, MemOperand(a0, OFFSET_OF(T, cvt_small_in)));
|
| - __ Cvt_d_uw(f10, t0, f22);
|
| - __ sdc1(f10, MemOperand(a0, OFFSET_OF(T, cvt_small_out)));
|
| + __ sw(t0, MemOperand(a0, OFFSET_OF(T, cvt_small_in)));
|
| + __ Cvt_d_uw(f10, t0, f22);
|
| + __ sdc1(f10, MemOperand(a0, OFFSET_OF(T, cvt_small_out)));
|
|
|
| - __ Trunc_uw_d(f10, f10, f22);
|
| - __ swc1(f10, MemOperand(a0, OFFSET_OF(T, trunc_small_out)));
|
| + __ Trunc_uw_d(f10, f10, f22);
|
| + __ swc1(f10, MemOperand(a0, OFFSET_OF(T, trunc_small_out)));
|
|
|
| - __ sw(t0, MemOperand(a0, OFFSET_OF(T, cvt_big_in)));
|
| - __ Cvt_d_uw(f8, t0, f22);
|
| - __ sdc1(f8, MemOperand(a0, OFFSET_OF(T, cvt_big_out)));
|
| + __ sw(t0, MemOperand(a0, OFFSET_OF(T, cvt_big_in)));
|
| + __ Cvt_d_uw(f8, t0, f22);
|
| + __ sdc1(f8, MemOperand(a0, OFFSET_OF(T, cvt_big_out)));
|
|
|
| - __ Trunc_uw_d(f8, f8, f22);
|
| - __ swc1(f8, MemOperand(a0, OFFSET_OF(T, trunc_big_out)));
|
| + __ Trunc_uw_d(f8, f8, f22);
|
| + __ swc1(f8, MemOperand(a0, OFFSET_OF(T, trunc_big_out)));
|
|
|
| - __ jr(ra);
|
| - __ nop();
|
| + __ jr(ra);
|
| + __ nop();
|
|
|
| - CodeDesc desc;
|
| - assm.GetCode(&desc);
|
| - Object* code = HEAP->CreateCode(
|
| - desc,
|
| - Code::ComputeFlags(Code::STUB),
|
| - Handle<Code>())->ToObjectChecked();
|
| - CHECK(code->IsCode());
|
| - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| + CodeDesc desc;
|
| + assm.GetCode(&desc);
|
| + Object* code = HEAP->CreateCode(
|
| + desc,
|
| + Code::ComputeFlags(Code::STUB),
|
| + Handle<Code>())->ToObjectChecked();
|
| + CHECK(code->IsCode());
|
| + F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
|
|
| - t.cvt_big_in = 0xFFFFFFFF;
|
| - t.cvt_small_in = 333;
|
| + t.cvt_big_in = 0xFFFFFFFF;
|
| + t.cvt_small_in = 333;
|
|
|
| - Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| - USE(dummy);
|
| + Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| + USE(dummy);
|
|
|
| - CHECK_EQ(t.cvt_big_out, static_cast<double>(t.cvt_big_in));
|
| - CHECK_EQ(t.cvt_small_out, static_cast<double>(t.cvt_small_in));
|
| + CHECK_EQ(t.cvt_big_out, static_cast<double>(t.cvt_big_in));
|
| + CHECK_EQ(t.cvt_small_out, static_cast<double>(t.cvt_small_in));
|
|
|
| - CHECK_EQ(static_cast<int>(t.trunc_big_out), static_cast<int>(t.cvt_big_in));
|
| - CHECK_EQ(static_cast<int>(t.trunc_small_out),
|
| - static_cast<int>(t.cvt_small_in));
|
| - }
|
| + CHECK_EQ(static_cast<int>(t.trunc_big_out), static_cast<int>(t.cvt_big_in));
|
| + CHECK_EQ(static_cast<int>(t.trunc_small_out),
|
| + static_cast<int>(t.cvt_small_in));
|
| }
|
|
|
|
|
| @@ -1181,87 +1159,84 @@ TEST(MIPS14) {
|
|
|
| MacroAssembler assm(isolate, NULL, 0);
|
|
|
| - if (CpuFeatures::IsSupported(FPU)) {
|
| - CpuFeatureScope scope(&assm, FPU);
|
| -
|
| - // Save FCSR.
|
| - __ cfc1(a1, FCSR);
|
| - // Disable FPU exceptions.
|
| - __ ctc1(zero_reg, FCSR);
|
| + // Save FCSR.
|
| + __ cfc1(a1, FCSR);
|
| + // Disable FPU exceptions.
|
| + __ ctc1(zero_reg, FCSR);
|
| #define RUN_ROUND_TEST(x) \
|
| - __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, round_up_in))); \
|
| - __ x##_w_d(f0, f0); \
|
| - __ swc1(f0, MemOperand(a0, OFFSET_OF(T, x##_up_out))); \
|
| - \
|
| - __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, round_down_in))); \
|
| - __ x##_w_d(f0, f0); \
|
| - __ swc1(f0, MemOperand(a0, OFFSET_OF(T, x##_down_out))); \
|
| - \
|
| - __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, neg_round_up_in))); \
|
| - __ x##_w_d(f0, f0); \
|
| - __ swc1(f0, MemOperand(a0, OFFSET_OF(T, neg_##x##_up_out))); \
|
| - \
|
| - __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, neg_round_down_in))); \
|
| - __ x##_w_d(f0, f0); \
|
| - __ swc1(f0, MemOperand(a0, OFFSET_OF(T, neg_##x##_down_out))); \
|
| - \
|
| - __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, err1_in))); \
|
| - __ ctc1(zero_reg, FCSR); \
|
| - __ x##_w_d(f0, f0); \
|
| - __ cfc1(a2, FCSR); \
|
| - __ sw(a2, MemOperand(a0, OFFSET_OF(T, x##_err1_out))); \
|
| - \
|
| - __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, err2_in))); \
|
| - __ ctc1(zero_reg, FCSR); \
|
| - __ x##_w_d(f0, f0); \
|
| - __ cfc1(a2, FCSR); \
|
| - __ sw(a2, MemOperand(a0, OFFSET_OF(T, x##_err2_out))); \
|
| - \
|
| - __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, err3_in))); \
|
| - __ ctc1(zero_reg, FCSR); \
|
| - __ x##_w_d(f0, f0); \
|
| - __ cfc1(a2, FCSR); \
|
| - __ sw(a2, MemOperand(a0, OFFSET_OF(T, x##_err3_out))); \
|
| - \
|
| - __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, err4_in))); \
|
| - __ ctc1(zero_reg, FCSR); \
|
| - __ x##_w_d(f0, f0); \
|
| - __ cfc1(a2, FCSR); \
|
| - __ sw(a2, MemOperand(a0, OFFSET_OF(T, x##_err4_out))); \
|
| - __ swc1(f0, MemOperand(a0, OFFSET_OF(T, x##_invalid_result)));
|
| -
|
| - RUN_ROUND_TEST(round)
|
| - RUN_ROUND_TEST(floor)
|
| - RUN_ROUND_TEST(ceil)
|
| - RUN_ROUND_TEST(trunc)
|
| - RUN_ROUND_TEST(cvt)
|
| -
|
| - // Restore FCSR.
|
| - __ ctc1(a1, FCSR);
|
| + __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, round_up_in))); \
|
| + __ x##_w_d(f0, f0); \
|
| + __ swc1(f0, MemOperand(a0, OFFSET_OF(T, x##_up_out))); \
|
| + \
|
| + __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, round_down_in))); \
|
| + __ x##_w_d(f0, f0); \
|
| + __ swc1(f0, MemOperand(a0, OFFSET_OF(T, x##_down_out))); \
|
| + \
|
| + __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, neg_round_up_in))); \
|
| + __ x##_w_d(f0, f0); \
|
| + __ swc1(f0, MemOperand(a0, OFFSET_OF(T, neg_##x##_up_out))); \
|
| + \
|
| + __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, neg_round_down_in))); \
|
| + __ x##_w_d(f0, f0); \
|
| + __ swc1(f0, MemOperand(a0, OFFSET_OF(T, neg_##x##_down_out))); \
|
| + \
|
| + __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, err1_in))); \
|
| + __ ctc1(zero_reg, FCSR); \
|
| + __ x##_w_d(f0, f0); \
|
| + __ cfc1(a2, FCSR); \
|
| + __ sw(a2, MemOperand(a0, OFFSET_OF(T, x##_err1_out))); \
|
| + \
|
| + __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, err2_in))); \
|
| + __ ctc1(zero_reg, FCSR); \
|
| + __ x##_w_d(f0, f0); \
|
| + __ cfc1(a2, FCSR); \
|
| + __ sw(a2, MemOperand(a0, OFFSET_OF(T, x##_err2_out))); \
|
| + \
|
| + __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, err3_in))); \
|
| + __ ctc1(zero_reg, FCSR); \
|
| + __ x##_w_d(f0, f0); \
|
| + __ cfc1(a2, FCSR); \
|
| + __ sw(a2, MemOperand(a0, OFFSET_OF(T, x##_err3_out))); \
|
| + \
|
| + __ ldc1(f0, MemOperand(a0, OFFSET_OF(T, err4_in))); \
|
| + __ ctc1(zero_reg, FCSR); \
|
| + __ x##_w_d(f0, f0); \
|
| + __ cfc1(a2, FCSR); \
|
| + __ sw(a2, MemOperand(a0, OFFSET_OF(T, x##_err4_out))); \
|
| + __ swc1(f0, MemOperand(a0, OFFSET_OF(T, x##_invalid_result)));
|
| +
|
| + RUN_ROUND_TEST(round)
|
| + RUN_ROUND_TEST(floor)
|
| + RUN_ROUND_TEST(ceil)
|
| + RUN_ROUND_TEST(trunc)
|
| + RUN_ROUND_TEST(cvt)
|
| +
|
| + // Restore FCSR.
|
| + __ ctc1(a1, FCSR);
|
|
|
| - __ jr(ra);
|
| - __ nop();
|
| + __ jr(ra);
|
| + __ nop();
|
|
|
| - CodeDesc desc;
|
| - assm.GetCode(&desc);
|
| - Object* code = HEAP->CreateCode(
|
| - desc,
|
| - Code::ComputeFlags(Code::STUB),
|
| - Handle<Code>())->ToObjectChecked();
|
| - CHECK(code->IsCode());
|
| - F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
| + CodeDesc desc;
|
| + assm.GetCode(&desc);
|
| + Object* code = HEAP->CreateCode(
|
| + desc,
|
| + Code::ComputeFlags(Code::STUB),
|
| + Handle<Code>())->ToObjectChecked();
|
| + CHECK(code->IsCode());
|
| + F3 f = FUNCTION_CAST<F3>(Code::cast(code)->entry());
|
|
|
| - t.round_up_in = 123.51;
|
| - t.round_down_in = 123.49;
|
| - t.neg_round_up_in = -123.5;
|
| - t.neg_round_down_in = -123.49;
|
| - t.err1_in = 123.51;
|
| - t.err2_in = 1;
|
| - t.err3_in = static_cast<double>(1) + 0xFFFFFFFF;
|
| - t.err4_in = NAN;
|
| + t.round_up_in = 123.51;
|
| + t.round_down_in = 123.49;
|
| + t.neg_round_up_in = -123.5;
|
| + t.neg_round_down_in = -123.49;
|
| + t.err1_in = 123.51;
|
| + t.err2_in = 1;
|
| + t.err3_in = static_cast<double>(1) + 0xFFFFFFFF;
|
| + t.err4_in = NAN;
|
|
|
| - Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| - USE(dummy);
|
| + Object* dummy = CALL_GENERATED_CODE(f, &t, 0, 0, 0, 0);
|
| + USE(dummy);
|
|
|
| #define GET_FPU_ERR(x) (static_cast<int>(x & kFCSRFlagMask))
|
| #define CHECK_ROUND_RESULT(type) \
|
| @@ -1271,11 +1246,10 @@ TEST(MIPS14) {
|
| CHECK(GET_FPU_ERR(t.type##_err4_out) & kFCSRInvalidOpFlagMask); \
|
| CHECK_EQ(kFPUInvalidResult, t.type##_invalid_result);
|
|
|
| - CHECK_ROUND_RESULT(round);
|
| - CHECK_ROUND_RESULT(floor);
|
| - CHECK_ROUND_RESULT(ceil);
|
| - CHECK_ROUND_RESULT(cvt);
|
| - }
|
| + CHECK_ROUND_RESULT(round);
|
| + CHECK_ROUND_RESULT(floor);
|
| + CHECK_ROUND_RESULT(ceil);
|
| + CHECK_ROUND_RESULT(cvt);
|
| }
|
|
|
|
|
|
|