OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1384 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1395 if (type.IsBoolean()) { | 1395 if (type.IsBoolean()) { |
1396 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 1396 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
1397 EmitBranch(true_block, false_block, equal); | 1397 EmitBranch(true_block, false_block, equal); |
1398 } else if (type.IsSmi()) { | 1398 } else if (type.IsSmi()) { |
1399 __ SmiCompare(reg, Smi::FromInt(0)); | 1399 __ SmiCompare(reg, Smi::FromInt(0)); |
1400 EmitBranch(true_block, false_block, not_equal); | 1400 EmitBranch(true_block, false_block, not_equal); |
1401 } else { | 1401 } else { |
1402 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 1402 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
1403 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1403 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
1404 | 1404 |
1405 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); | 1405 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
1406 __ j(equal, false_label); | 1406 // Avoid deopts in the case where we've never executed this path before. |
1407 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 1407 if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); |
1408 __ j(equal, true_label); | |
1409 __ CompareRoot(reg, Heap::kFalseValueRootIndex); | |
1410 __ j(equal, false_label); | |
1411 __ Cmp(reg, Smi::FromInt(0)); | |
1412 __ j(equal, false_label); | |
1413 __ JumpIfSmi(reg, true_label); | |
1414 | 1408 |
1415 // Test for double values. Plus/minus zero and NaN are false. | 1409 if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
1416 Label call_stub; | 1410 // undefined -> false. |
1417 __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset), | 1411 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); |
1418 Heap::kHeapNumberMapRootIndex); | 1412 __ j(equal, false_label); |
1419 __ j(not_equal, &call_stub, Label::kNear); | 1413 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1414 // We've seen undefined for the first time -> deopt. |
| 1415 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); |
| 1416 DeoptimizeIf(equal, instr->environment()); |
| 1417 } |
1420 | 1418 |
1421 // HeapNumber => false iff +0, -0, or NaN. These three cases set the | 1419 if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
1422 // zero flag when compared to zero using ucomisd. | 1420 // true -> true. |
1423 __ xorps(xmm0, xmm0); | 1421 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
1424 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); | 1422 __ j(equal, true_label); |
1425 __ j(zero, false_label); | 1423 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
1426 __ jmp(true_label); | 1424 // We've seen a boolean for the first time -> deopt. |
| 1425 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
| 1426 DeoptimizeIf(equal, instr->environment()); |
| 1427 } |
1427 | 1428 |
1428 // The conversion stub doesn't cause garbage collections so it's | 1429 if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
1429 // safe to not record a safepoint after the call. | 1430 // false -> false. |
1430 __ bind(&call_stub); | 1431 __ CompareRoot(reg, Heap::kFalseValueRootIndex); |
1431 ToBooleanStub stub(rax); | 1432 __ j(equal, false_label); |
1432 __ Pushad(); | 1433 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
1433 __ push(reg); | 1434 // We've seen a boolean for the first time -> deopt. |
1434 __ CallStub(&stub); | 1435 __ CompareRoot(reg, Heap::kFalseValueRootIndex); |
1435 __ testq(rax, rax); | 1436 DeoptimizeIf(equal, instr->environment()); |
1436 __ Popad(); | 1437 } |
1437 EmitBranch(true_block, false_block, not_zero); | 1438 |
| 1439 if (expected.Contains(ToBooleanStub::NULL_TYPE)) { |
| 1440 // 'null' -> false. |
| 1441 __ CompareRoot(reg, Heap::kNullValueRootIndex); |
| 1442 __ j(equal, false_label); |
| 1443 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1444 // We've seen null for the first time -> deopt. |
| 1445 __ CompareRoot(reg, Heap::kNullValueRootIndex); |
| 1446 DeoptimizeIf(equal, instr->environment()); |
| 1447 } |
| 1448 |
| 1449 if (expected.Contains(ToBooleanStub::SMI)) { |
| 1450 // Smis: 0 -> false, all other -> true. |
| 1451 __ Cmp(reg, Smi::FromInt(0)); |
| 1452 __ j(equal, false_label); |
| 1453 __ JumpIfSmi(reg, true_label); |
| 1454 } else if (expected.NeedsMap()) { |
| 1455 // If we need a map later and have a Smi -> deopt. |
| 1456 __ testb(reg, Immediate(kSmiTagMask)); |
| 1457 DeoptimizeIf(zero, instr->environment()); |
| 1458 } |
| 1459 |
| 1460 const Register map = kScratchRegister; |
| 1461 if (expected.NeedsMap()) { |
| 1462 __ movq(map, FieldOperand(reg, HeapObject::kMapOffset)); |
| 1463 // Everything with a map could be undetectable, so check this now. |
| 1464 __ testb(FieldOperand(map, Map::kBitFieldOffset), |
| 1465 Immediate(1 << Map::kIsUndetectable)); |
| 1466 // Undetectable -> false. |
| 1467 __ j(not_zero, false_label); |
| 1468 } |
| 1469 |
| 1470 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { |
| 1471 // spec object -> true. |
| 1472 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); |
| 1473 __ j(above_equal, true_label); |
| 1474 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1475 // We've seen a spec object for the first time -> deopt. |
| 1476 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); |
| 1477 DeoptimizeIf(above_equal, instr->environment()); |
| 1478 } |
| 1479 |
| 1480 if (expected.Contains(ToBooleanStub::STRING)) { |
| 1481 // String value -> false iff empty. |
| 1482 Label not_string; |
| 1483 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); |
| 1484 __ j(above_equal, ¬_string, Label::kNear); |
| 1485 __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0)); |
| 1486 __ j(not_zero, true_label); |
| 1487 __ jmp(false_label); |
| 1488 __ bind(¬_string); |
| 1489 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1490 // We've seen a string for the first time -> deopt |
| 1491 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); |
| 1492 DeoptimizeIf(below, instr->environment()); |
| 1493 } |
| 1494 |
| 1495 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { |
| 1496 // heap number -> false iff +0, -0, or NaN. |
| 1497 Label not_heap_number; |
| 1498 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
| 1499 __ j(not_equal, ¬_heap_number, Label::kNear); |
| 1500 __ xorps(xmm0, xmm0); |
| 1501 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); |
| 1502 __ j(zero, false_label); |
| 1503 __ jmp(true_label); |
| 1504 __ bind(¬_heap_number); |
| 1505 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1506 // We've seen a heap number for the first time -> deopt. |
| 1507 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
| 1508 DeoptimizeIf(equal, instr->environment()); |
| 1509 } |
| 1510 |
| 1511 if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1512 // internal objects -> true |
| 1513 __ jmp(true_label); |
| 1514 } else { |
| 1515 // We've seen something for the first time -> deopt. |
| 1516 DeoptimizeIf(no_condition, instr->environment()); |
| 1517 } |
1438 } | 1518 } |
1439 } | 1519 } |
1440 } | 1520 } |
1441 | 1521 |
1442 | 1522 |
1443 void LCodeGen::EmitGoto(int block) { | 1523 void LCodeGen::EmitGoto(int block) { |
1444 block = chunk_->LookupDestination(block); | 1524 block = chunk_->LookupDestination(block); |
1445 int next_block = GetNextEmittedBlock(current_block_); | 1525 int next_block = GetNextEmittedBlock(current_block_); |
1446 if (block != next_block) { | 1526 if (block != next_block) { |
1447 __ jmp(chunk_->GetAssemblyLabel(block)); | 1527 __ jmp(chunk_->GetAssemblyLabel(block)); |
(...skipping 2656 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4104 RegisterEnvironmentForDeoptimization(environment); | 4184 RegisterEnvironmentForDeoptimization(environment); |
4105 ASSERT(osr_pc_offset_ == -1); | 4185 ASSERT(osr_pc_offset_ == -1); |
4106 osr_pc_offset_ = masm()->pc_offset(); | 4186 osr_pc_offset_ = masm()->pc_offset(); |
4107 } | 4187 } |
4108 | 4188 |
4109 #undef __ | 4189 #undef __ |
4110 | 4190 |
4111 } } // namespace v8::internal | 4191 } } // namespace v8::internal |
4112 | 4192 |
4113 #endif // V8_TARGET_ARCH_X64 | 4193 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |