OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <assert.h> // For assert | 5 #include <assert.h> // For assert |
6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
7 | 7 |
8 #include "src/v8.h" | 8 #include "src/v8.h" |
9 | 9 |
10 #if V8_TARGET_ARCH_PPC | 10 #if V8_TARGET_ARCH_PPC |
(...skipping 1366 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1377 cmp(result, ip); | 1377 cmp(result, ip); |
1378 Check(eq, kUnexpectedAllocationTop); | 1378 Check(eq, kUnexpectedAllocationTop); |
1379 } | 1379 } |
1380 // Load allocation limit into ip. Result already contains allocation top. | 1380 // Load allocation limit into ip. Result already contains allocation top. |
1381 LoadP(ip, MemOperand(topaddr, limit - top), r0); | 1381 LoadP(ip, MemOperand(topaddr, limit - top), r0); |
1382 } | 1382 } |
1383 | 1383 |
1384 if ((flags & DOUBLE_ALIGNMENT) != 0) { | 1384 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
1385 // Align the next allocation. Storing the filler map without checking top is | 1385 // Align the next allocation. Storing the filler map without checking top is |
1386 // safe in new-space because the limit of the heap is aligned there. | 1386 // safe in new-space because the limit of the heap is aligned there. |
1387 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0); | 1387 DCHECK((flags & PRETENURE_OLD_SPACE) == 0); |
1388 #if V8_TARGET_ARCH_PPC64 | 1388 #if V8_TARGET_ARCH_PPC64 |
1389 STATIC_ASSERT(kPointerAlignment == kDoubleAlignment); | 1389 STATIC_ASSERT(kPointerAlignment == kDoubleAlignment); |
1390 #else | 1390 #else |
1391 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | 1391 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment); |
1392 andi(scratch2, result, Operand(kDoubleAlignmentMask)); | 1392 andi(scratch2, result, Operand(kDoubleAlignmentMask)); |
1393 Label aligned; | 1393 Label aligned; |
1394 beq(&aligned, cr0); | 1394 beq(&aligned, cr0); |
1395 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) { | 1395 if ((flags & PRETENURE) != 0) { |
1396 cmpl(result, ip); | 1396 cmpl(result, ip); |
1397 bge(gc_required); | 1397 bge(gc_required); |
1398 } | 1398 } |
1399 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); | 1399 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); |
1400 stw(scratch2, MemOperand(result)); | 1400 stw(scratch2, MemOperand(result)); |
1401 addi(result, result, Operand(kDoubleSize / 2)); | 1401 addi(result, result, Operand(kDoubleSize / 2)); |
1402 bind(&aligned); | 1402 bind(&aligned); |
1403 #endif | 1403 #endif |
1404 } | 1404 } |
1405 | 1405 |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1476 cmp(result, ip); | 1476 cmp(result, ip); |
1477 Check(eq, kUnexpectedAllocationTop); | 1477 Check(eq, kUnexpectedAllocationTop); |
1478 } | 1478 } |
1479 // Load allocation limit into ip. Result already contains allocation top. | 1479 // Load allocation limit into ip. Result already contains allocation top. |
1480 LoadP(ip, MemOperand(topaddr, limit - top)); | 1480 LoadP(ip, MemOperand(topaddr, limit - top)); |
1481 } | 1481 } |
1482 | 1482 |
1483 if ((flags & DOUBLE_ALIGNMENT) != 0) { | 1483 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
1484 // Align the next allocation. Storing the filler map without checking top is | 1484 // Align the next allocation. Storing the filler map without checking top is |
1485 // safe in new-space because the limit of the heap is aligned there. | 1485 // safe in new-space because the limit of the heap is aligned there. |
1486 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0); | 1486 DCHECK((flags & PRETENURE_OLD_SPACE) == 0); |
1487 #if V8_TARGET_ARCH_PPC64 | 1487 #if V8_TARGET_ARCH_PPC64 |
1488 STATIC_ASSERT(kPointerAlignment == kDoubleAlignment); | 1488 STATIC_ASSERT(kPointerAlignment == kDoubleAlignment); |
1489 #else | 1489 #else |
1490 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | 1490 STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment); |
1491 andi(scratch2, result, Operand(kDoubleAlignmentMask)); | 1491 andi(scratch2, result, Operand(kDoubleAlignmentMask)); |
1492 Label aligned; | 1492 Label aligned; |
1493 beq(&aligned, cr0); | 1493 beq(&aligned, cr0); |
1494 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) { | 1494 if ((flags & PRETENURE) != 0) { |
1495 cmpl(result, ip); | 1495 cmpl(result, ip); |
1496 bge(gc_required); | 1496 bge(gc_required); |
1497 } | 1497 } |
1498 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); | 1498 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); |
1499 stw(scratch2, MemOperand(result)); | 1499 stw(scratch2, MemOperand(result)); |
1500 addi(result, result, Operand(kDoubleSize / 2)); | 1500 addi(result, result, Operand(kDoubleSize / 2)); |
1501 bind(&aligned); | 1501 bind(&aligned); |
1502 #endif | 1502 #endif |
1503 } | 1503 } |
1504 | 1504 |
(...skipping 3037 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4542 } | 4542 } |
4543 if (mag.shift > 0) srawi(result, result, mag.shift); | 4543 if (mag.shift > 0) srawi(result, result, mag.shift); |
4544 ExtractBit(r0, dividend, 31); | 4544 ExtractBit(r0, dividend, 31); |
4545 add(result, result, r0); | 4545 add(result, result, r0); |
4546 } | 4546 } |
4547 | 4547 |
4548 } // namespace internal | 4548 } // namespace internal |
4549 } // namespace v8 | 4549 } // namespace v8 |
4550 | 4550 |
4551 #endif // V8_TARGET_ARCH_PPC | 4551 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |