OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1487 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1498 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); | 1498 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE); |
1499 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 1499 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
1500 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 1500 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
1501 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); | 1501 flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE); |
1502 } | 1502 } |
1503 | 1503 |
1504 if (instr->size()->IsConstantOperand()) { | 1504 if (instr->size()->IsConstantOperand()) { |
1505 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 1505 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
1506 __ Allocate(size, result, temp1, temp2, deferred->entry(), flags); | 1506 __ Allocate(size, result, temp1, temp2, deferred->entry(), flags); |
1507 } else { | 1507 } else { |
1508 Register size = ToRegister(instr->size()); | 1508 Register size = ToRegister32(instr->size()); |
1509 __ Allocate(size, result, temp1, temp2, deferred->entry(), flags); | 1509 __ Sxtw(size.X(), size); |
| 1510 __ Allocate(size.X(), result, temp1, temp2, deferred->entry(), flags); |
1510 } | 1511 } |
1511 | 1512 |
1512 __ Bind(deferred->exit()); | 1513 __ Bind(deferred->exit()); |
1513 | 1514 |
1514 if (instr->hydrogen()->MustPrefillWithFiller()) { | 1515 if (instr->hydrogen()->MustPrefillWithFiller()) { |
1515 if (instr->size()->IsConstantOperand()) { | 1516 if (instr->size()->IsConstantOperand()) { |
1516 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 1517 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
1517 __ Mov(temp1, size - kPointerSize); | 1518 __ Mov(temp1, size - kPointerSize); |
1518 } else { | 1519 } else { |
1519 __ Sub(temp1, ToRegister(instr->size()), kPointerSize); | 1520 __ Sub(temp1.W(), ToRegister32(instr->size()), kPointerSize); |
1520 } | 1521 } |
1521 __ Sub(result, result, kHeapObjectTag); | 1522 __ Sub(result, result, kHeapObjectTag); |
1522 | 1523 |
1523 // TODO(jbramley): Optimize this loop using stp. | 1524 // TODO(jbramley): Optimize this loop using stp. |
1524 Label loop; | 1525 Label loop; |
1525 __ Bind(&loop); | 1526 __ Bind(&loop); |
1526 __ Mov(temp2, Operand(isolate()->factory()->one_pointer_filler_map())); | 1527 __ Mov(temp2, Operand(isolate()->factory()->one_pointer_filler_map())); |
1527 __ Str(temp2, MemOperand(result, temp1)); | 1528 __ Str(temp2, MemOperand(result, temp1)); |
1528 __ Subs(temp1, temp1, kPointerSize); | 1529 __ Subs(temp1, temp1, kPointerSize); |
1529 __ B(ge, &loop); | 1530 __ B(ge, &loop); |
1530 | 1531 |
1531 __ Add(result, result, kHeapObjectTag); | 1532 __ Add(result, result, kHeapObjectTag); |
1532 } | 1533 } |
1533 } | 1534 } |
1534 | 1535 |
1535 | 1536 |
1536 void LCodeGen::DoDeferredAllocate(LAllocate* instr) { | 1537 void LCodeGen::DoDeferredAllocate(LAllocate* instr) { |
1537 // TODO(3095996): Get rid of this. For now, we need to make the | 1538 // TODO(3095996): Get rid of this. For now, we need to make the |
1538 // result register contain a valid pointer because it is already | 1539 // result register contain a valid pointer because it is already |
1539 // contained in the register pointer map. | 1540 // contained in the register pointer map. |
1540 __ Mov(ToRegister(instr->result()), Operand(Smi::FromInt(0))); | 1541 __ Mov(ToRegister(instr->result()), Operand(Smi::FromInt(0))); |
1541 | 1542 |
1542 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 1543 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
1543 // We're in a SafepointRegistersScope so we can use any scratch registers. | 1544 // We're in a SafepointRegistersScope so we can use any scratch registers. |
1544 Register size = x0; | 1545 Register size = x0; |
1545 if (instr->size()->IsConstantOperand()) { | 1546 if (instr->size()->IsConstantOperand()) { |
1546 __ Mov(size, Operand(ToSmi(LConstantOperand::cast(instr->size())))); | 1547 __ Mov(size, Operand(ToSmi(LConstantOperand::cast(instr->size())))); |
1547 } else { | 1548 } else { |
1548 __ SmiTag(size, ToRegister(instr->size())); | 1549 __ SmiTag(size, ToRegister32(instr->size()).X()); |
1549 } | 1550 } |
1550 int flags = AllocateDoubleAlignFlag::encode( | 1551 int flags = AllocateDoubleAlignFlag::encode( |
1551 instr->hydrogen()->MustAllocateDoubleAligned()); | 1552 instr->hydrogen()->MustAllocateDoubleAligned()); |
1552 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { | 1553 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) { |
1553 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); | 1554 ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation()); |
1554 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 1555 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
1555 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); | 1556 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); |
1556 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 1557 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
1557 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 1558 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
1558 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); | 1559 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); |
(...skipping 4158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5717 __ Bind(&out_of_object); | 5718 __ Bind(&out_of_object); |
5718 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 5719 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
5719 // Index is equal to negated out of object property index plus 1. | 5720 // Index is equal to negated out of object property index plus 1. |
5720 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 5721 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
5721 __ Ldr(result, FieldMemOperand(result, | 5722 __ Ldr(result, FieldMemOperand(result, |
5722 FixedArray::kHeaderSize - kPointerSize)); | 5723 FixedArray::kHeaderSize - kPointerSize)); |
5723 __ Bind(&done); | 5724 __ Bind(&done); |
5724 } | 5725 } |
5725 | 5726 |
5726 } } // namespace v8::internal | 5727 } } // namespace v8::internal |
OLD | NEW |