Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/crankshaft/ppc/lithium-codegen-ppc.h" | 5 #include "src/crankshaft/ppc/lithium-codegen-ppc.h" |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/crankshaft/hydrogen-osr.h" | 10 #include "src/crankshaft/hydrogen-osr.h" |
| (...skipping 5327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5338 #else | 5338 #else |
| 5339 __ MovInt64ToDouble(result_reg, hi_reg, lo_reg); | 5339 __ MovInt64ToDouble(result_reg, hi_reg, lo_reg); |
| 5340 #endif | 5340 #endif |
| 5341 } | 5341 } |
| 5342 | 5342 |
| 5343 | 5343 |
| 5344 void LCodeGen::DoAllocate(LAllocate* instr) { | 5344 void LCodeGen::DoAllocate(LAllocate* instr) { |
| 5345 class DeferredAllocate final : public LDeferredCode { | 5345 class DeferredAllocate final : public LDeferredCode { |
| 5346 public: | 5346 public: |
| 5347 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) | 5347 DeferredAllocate(LCodeGen* codegen, LAllocate* instr) |
| 5348 : LDeferredCode(codegen), instr_(instr) {} | 5348 : LDeferredCode(codegen), instr_(instr) { } |
| 5349 void Generate() override { codegen()->DoDeferredAllocate(instr_); } | 5349 void Generate() override { codegen()->DoDeferredAllocate(instr_); } |
| 5350 LInstruction* instr() override { return instr_; } | 5350 LInstruction* instr() override { return instr_; } |
| 5351 | 5351 |
| 5352 private: | 5352 private: |
| 5353 LAllocate* instr_; | 5353 LAllocate* instr_; |
| 5354 }; | 5354 }; |
| 5355 | 5355 |
| 5356 DeferredAllocate* deferred = new (zone()) DeferredAllocate(this, instr); | 5356 DeferredAllocate* deferred = |
| 5357 new(zone()) DeferredAllocate(this, instr); | |
| 5357 | 5358 |
| 5358 Register result = ToRegister(instr->result()); | 5359 Register result = ToRegister(instr->result()); |
| 5359 Register scratch = ToRegister(instr->temp1()); | 5360 Register scratch = ToRegister(instr->temp1()); |
| 5360 Register scratch2 = ToRegister(instr->temp2()); | 5361 Register scratch2 = ToRegister(instr->temp2()); |
| 5361 | 5362 |
| 5362 // Allocate memory for the object. | 5363 // Allocate memory for the object. |
| 5363 AllocationFlags flags = NO_ALLOCATION_FLAGS; | 5364 AllocationFlags flags = NO_ALLOCATION_FLAGS; |
| 5364 if (instr->hydrogen()->MustAllocateDoubleAligned()) { | 5365 if (instr->hydrogen()->MustAllocateDoubleAligned()) { |
| 5365 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); | 5366 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); |
| 5366 } | 5367 } |
| 5367 if (instr->hydrogen()->IsOldSpaceAllocation()) { | 5368 if (instr->hydrogen()->IsOldSpaceAllocation()) { |
| 5368 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); | 5369 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5369 flags = static_cast<AllocationFlags>(flags | PRETENURE); | 5370 flags = static_cast<AllocationFlags>(flags | PRETENURE); |
| 5370 } | 5371 } |
| 5371 | 5372 |
| 5373 if (instr->hydrogen()->IsAllocationFoldingDominator()) { | |
| 5374 flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDING_DOMINATOR); | |
| 5375 } | |
| 5376 | |
| 5377 if (instr->hydrogen()->IsAllocationFolded()) { | |
| 5378 flags = static_cast<AllocationFlags>(flags | ALLOCATION_FOLDED); | |
| 5379 } | |
| 5380 | |
| 5372 if (instr->size()->IsConstantOperand()) { | 5381 if (instr->size()->IsConstantOperand()) { |
| 5373 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | 5382 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); |
| 5374 CHECK(size <= Page::kMaxRegularHeapObjectSize); | 5383 CHECK(size <= Page::kMaxRegularHeapObjectSize); |
| 5375 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); | 5384 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); |
| 5376 } else { | 5385 } else { |
| 5377 Register size = ToRegister(instr->size()); | 5386 Register size = ToRegister(instr->size()); |
| 5378 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); | 5387 __ Allocate(size, result, scratch, scratch2, deferred->entry(), flags); |
| 5379 } | 5388 } |
| 5380 | 5389 |
| 5381 __ bind(deferred->exit()); | 5390 __ bind(deferred->exit()); |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5433 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); | 5442 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); |
| 5434 flags = AllocateTargetSpace::update(flags, OLD_SPACE); | 5443 flags = AllocateTargetSpace::update(flags, OLD_SPACE); |
| 5435 } else { | 5444 } else { |
| 5436 flags = AllocateTargetSpace::update(flags, NEW_SPACE); | 5445 flags = AllocateTargetSpace::update(flags, NEW_SPACE); |
| 5437 } | 5446 } |
| 5438 __ Push(Smi::FromInt(flags)); | 5447 __ Push(Smi::FromInt(flags)); |
| 5439 | 5448 |
| 5440 CallRuntimeFromDeferred(Runtime::kAllocateInTargetSpace, 2, instr, | 5449 CallRuntimeFromDeferred(Runtime::kAllocateInTargetSpace, 2, instr, |
| 5441 instr->context()); | 5450 instr->context()); |
| 5442 __ StoreToSafepointRegisterSlot(r3, result); | 5451 __ StoreToSafepointRegisterSlot(r3, result); |
| 5452 | |
| 5453 if (instr->hydrogen()->IsAllocationFoldingDominator()) { | |
| 5454 AllocationFlags allocation_flags = NO_ALLOCATION_FLAGS; | |
| 5455 if (instr->hydrogen()->IsOldSpaceAllocation()) { | |
| 5456 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); | |
| 5457 allocation_flags = static_cast<AllocationFlags>(flags | PRETENURE); | |
| 5458 } | |
| 5459 // If the allocation folding dominator allocate triggered a GC, allocation | |
| 5460 // happend in the runtime. We have to reset the top pointer to virtually | |
| 5461 // undo the allocation. | |
| 5462 ExternalReference allocation_top = | |
| 5463 AllocationUtils::GetAllocationTopReference(isolate(), allocation_flags); | |
| 5464 Register top_address = scratch0(); | |
| 5465 __ subi(r3, r3, Operand(kHeapObjectTag)); | |
| 5466 __ mov(top_address, Operand(allocation_top)); | |
| 5467 __ StoreP(r3, MemOperand(top_address)); | |
| 5468 __ addi(r3, r3, Operand(kHeapObjectTag)); | |
| 5469 } | |
| 5470 } | |
| 5471 | |
| 5472 void LCodeGen::DoFastAllocate(LFastAllocate* instr) { | |
| 5473 DCHECK(instr->hydrogen()->IsAllocationFolded()); | |
| 5474 Register result = ToRegister(instr->result()); | |
| 5475 Register scratch1 = ToRegister(instr->temp1()); | |
| 5476 Register scratch2 = ToRegister(instr->temp2()); | |
| 5477 | |
| 5478 AllocationFlags flags = NO_ALLOCATION_FLAGS; | |
| 5479 if (instr->hydrogen()->MustAllocateDoubleAligned()) { | |
| 5480 flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT); | |
| 5481 } | |
| 5482 if (instr->hydrogen()->IsOldSpaceAllocation()) { | |
| 5483 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation()); | |
| 5484 flags = static_cast<AllocationFlags>(flags | PRETENURE); | |
| 5485 } | |
| 5486 if (!instr->hydrogen()->IsAllocationFoldingDominator()) { | |
| 5487 if (instr->size()->IsConstantOperand()) { | |
| 5488 int32_t size = ToInteger32(LConstantOperand::cast(instr->size())); | |
| 5489 #if !V8_TARGET_ARCH_PPC64 | |
| 5490 CHECK(size <= Page::kMaxRegularHeapObjectSize); | |
|
MTBrandyberry
2016/05/11 15:18:22
Why omit this check for 64-bit?
| |
| 5491 #endif | |
| 5492 __ FastAllocate(size, result, scratch1, scratch2, flags); | |
| 5493 } else { | |
| 5494 Register size = ToRegister(instr->size()); | |
| 5495 __ FastAllocate(size, result, scratch1, scratch2, flags); | |
| 5496 } | |
| 5497 } | |
| 5443 } | 5498 } |
| 5444 | 5499 |
| 5445 | 5500 |
| 5446 void LCodeGen::DoTypeof(LTypeof* instr) { | 5501 void LCodeGen::DoTypeof(LTypeof* instr) { |
| 5447 DCHECK(ToRegister(instr->value()).is(r6)); | 5502 DCHECK(ToRegister(instr->value()).is(r6)); |
| 5448 DCHECK(ToRegister(instr->result()).is(r3)); | 5503 DCHECK(ToRegister(instr->result()).is(r3)); |
| 5449 Label end, do_call; | 5504 Label end, do_call; |
| 5450 Register value_register = ToRegister(instr->value()); | 5505 Register value_register = ToRegister(instr->value()); |
| 5451 __ JumpIfNotSmi(value_register, &do_call); | 5506 __ JumpIfNotSmi(value_register, &do_call); |
| 5452 __ mov(r3, Operand(isolate()->factory()->number_string())); | 5507 __ mov(r3, Operand(isolate()->factory()->number_string())); |
| (...skipping 333 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5786 __ LoadP(result, | 5841 __ LoadP(result, |
| 5787 FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize)); | 5842 FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize)); |
| 5788 __ bind(deferred->exit()); | 5843 __ bind(deferred->exit()); |
| 5789 __ bind(&done); | 5844 __ bind(&done); |
| 5790 } | 5845 } |
| 5791 | 5846 |
| 5792 #undef __ | 5847 #undef __ |
| 5793 | 5848 |
| 5794 } // namespace internal | 5849 } // namespace internal |
| 5795 } // namespace v8 | 5850 } // namespace v8 |
| OLD | NEW |