| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <assert.h> // For assert | 5 #include <assert.h> // For assert |
| 6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
| 7 | 7 |
| 8 #if V8_TARGET_ARCH_PPC | 8 #if V8_TARGET_ARCH_PPC |
| 9 | 9 |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 2052 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2063 cmpi(type_reg, Operand(type)); | 2063 cmpi(type_reg, Operand(type)); |
| 2064 } | 2064 } |
| 2065 | 2065 |
| 2066 | 2066 |
| 2067 void MacroAssembler::CompareRoot(Register obj, Heap::RootListIndex index) { | 2067 void MacroAssembler::CompareRoot(Register obj, Heap::RootListIndex index) { |
| 2068 DCHECK(!obj.is(r0)); | 2068 DCHECK(!obj.is(r0)); |
| 2069 LoadRoot(r0, index); | 2069 LoadRoot(r0, index); |
| 2070 cmp(obj, r0); | 2070 cmp(obj, r0); |
| 2071 } | 2071 } |
| 2072 | 2072 |
| 2073 void MacroAssembler::CheckFastObjectElements(Register map, Register scratch, | |
| 2074 Label* fail) { | |
| 2075 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
| 2076 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
| 2077 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
| 2078 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); | |
| 2079 lbz(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | |
| 2080 cmpli(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | |
| 2081 ble(fail); | |
| 2082 cmpli(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue)); | |
| 2083 bgt(fail); | |
| 2084 } | |
| 2085 | |
| 2086 | |
| 2087 void MacroAssembler::CheckFastSmiElements(Register map, Register scratch, | |
| 2088 Label* fail) { | |
| 2089 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
| 2090 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
| 2091 lbz(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | |
| 2092 cmpli(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | |
| 2093 bgt(fail); | |
| 2094 } | |
| 2095 | |
| 2096 | |
| 2097 void MacroAssembler::StoreNumberToDoubleElements( | |
| 2098 Register value_reg, Register key_reg, Register elements_reg, | |
| 2099 Register scratch1, DoubleRegister double_scratch, Label* fail, | |
| 2100 int elements_offset) { | |
| 2101 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1)); | |
| 2102 Label smi_value, store; | |
| 2103 | |
| 2104 // Handle smi values specially. | |
| 2105 JumpIfSmi(value_reg, &smi_value); | |
| 2106 | |
| 2107 // Ensure that the object is a heap number | |
| 2108 CheckMap(value_reg, scratch1, isolate()->factory()->heap_number_map(), fail, | |
| 2109 DONT_DO_SMI_CHECK); | |
| 2110 | |
| 2111 lfd(double_scratch, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | |
| 2112 // Double value, turn potential sNaN into qNaN. | |
| 2113 CanonicalizeNaN(double_scratch); | |
| 2114 b(&store); | |
| 2115 | |
| 2116 bind(&smi_value); | |
| 2117 SmiToDouble(double_scratch, value_reg); | |
| 2118 | |
| 2119 bind(&store); | |
| 2120 SmiToDoubleArrayOffset(scratch1, key_reg); | |
| 2121 add(scratch1, elements_reg, scratch1); | |
| 2122 stfd(double_scratch, FieldMemOperand(scratch1, FixedDoubleArray::kHeaderSize - | |
| 2123 elements_offset)); | |
| 2124 } | |
| 2125 | |
| 2126 | |
| 2127 void MacroAssembler::AddAndCheckForOverflow(Register dst, Register left, | 2073 void MacroAssembler::AddAndCheckForOverflow(Register dst, Register left, |
| 2128 Register right, | 2074 Register right, |
| 2129 Register overflow_dst, | 2075 Register overflow_dst, |
| 2130 Register scratch) { | 2076 Register scratch) { |
| 2131 DCHECK(!dst.is(overflow_dst)); | 2077 DCHECK(!dst.is(overflow_dst)); |
| 2132 DCHECK(!dst.is(scratch)); | 2078 DCHECK(!dst.is(scratch)); |
| 2133 DCHECK(!overflow_dst.is(scratch)); | 2079 DCHECK(!overflow_dst.is(scratch)); |
| 2134 DCHECK(!overflow_dst.is(left)); | 2080 DCHECK(!overflow_dst.is(left)); |
| 2135 DCHECK(!overflow_dst.is(right)); | 2081 DCHECK(!overflow_dst.is(right)); |
| 2136 | 2082 |
| (...skipping 593 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2730 LoadP(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 2676 LoadP(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 2731 } | 2677 } |
| 2732 } else { | 2678 } else { |
| 2733 // Slot is in the current function context. Move it into the | 2679 // Slot is in the current function context. Move it into the |
| 2734 // destination register in case we store into it (the write barrier | 2680 // destination register in case we store into it (the write barrier |
| 2735 // cannot be allowed to destroy the context in esi). | 2681 // cannot be allowed to destroy the context in esi). |
| 2736 mr(dst, cp); | 2682 mr(dst, cp); |
| 2737 } | 2683 } |
| 2738 } | 2684 } |
| 2739 | 2685 |
| 2740 | |
| 2741 void MacroAssembler::LoadTransitionedArrayMapConditional( | |
| 2742 ElementsKind expected_kind, ElementsKind transitioned_kind, | |
| 2743 Register map_in_out, Register scratch, Label* no_map_match) { | |
| 2744 DCHECK(IsFastElementsKind(expected_kind)); | |
| 2745 DCHECK(IsFastElementsKind(transitioned_kind)); | |
| 2746 | |
| 2747 // Check that the function's map is the same as the expected cached map. | |
| 2748 LoadP(scratch, NativeContextMemOperand()); | |
| 2749 LoadP(ip, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind))); | |
| 2750 cmp(map_in_out, ip); | |
| 2751 bne(no_map_match); | |
| 2752 | |
| 2753 // Use the transitioned cached map. | |
| 2754 LoadP(map_in_out, | |
| 2755 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind))); | |
| 2756 } | |
| 2757 | |
| 2758 | |
| 2759 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { | 2686 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { |
| 2760 LoadP(dst, NativeContextMemOperand()); | 2687 LoadP(dst, NativeContextMemOperand()); |
| 2761 LoadP(dst, ContextMemOperand(dst, index)); | 2688 LoadP(dst, ContextMemOperand(dst, index)); |
| 2762 } | 2689 } |
| 2763 | 2690 |
| 2764 | 2691 |
| 2765 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 2692 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
| 2766 Register map, | 2693 Register map, |
| 2767 Register scratch) { | 2694 Register scratch) { |
| 2768 // Load the initial map. The global functions all have initial maps. | 2695 // Load the initial map. The global functions all have initial maps. |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2833 | 2760 |
| 2834 | 2761 |
| 2835 void MacroAssembler::UntagAndJumpIfSmi(Register dst, Register src, | 2762 void MacroAssembler::UntagAndJumpIfSmi(Register dst, Register src, |
| 2836 Label* smi_case) { | 2763 Label* smi_case) { |
| 2837 STATIC_ASSERT(kSmiTag == 0); | 2764 STATIC_ASSERT(kSmiTag == 0); |
| 2838 TestBitRange(src, kSmiTagSize - 1, 0, r0); | 2765 TestBitRange(src, kSmiTagSize - 1, 0, r0); |
| 2839 SmiUntag(dst, src); | 2766 SmiUntag(dst, src); |
| 2840 beq(smi_case, cr0); | 2767 beq(smi_case, cr0); |
| 2841 } | 2768 } |
| 2842 | 2769 |
| 2843 | |
| 2844 void MacroAssembler::UntagAndJumpIfNotSmi(Register dst, Register src, | |
| 2845 Label* non_smi_case) { | |
| 2846 STATIC_ASSERT(kSmiTag == 0); | |
| 2847 TestBitRange(src, kSmiTagSize - 1, 0, r0); | |
| 2848 SmiUntag(dst, src); | |
| 2849 bne(non_smi_case, cr0); | |
| 2850 } | |
| 2851 | |
| 2852 | |
| 2853 void MacroAssembler::JumpIfEitherSmi(Register reg1, Register reg2, | 2770 void MacroAssembler::JumpIfEitherSmi(Register reg1, Register reg2, |
| 2854 Label* on_either_smi) { | 2771 Label* on_either_smi) { |
| 2855 STATIC_ASSERT(kSmiTag == 0); | 2772 STATIC_ASSERT(kSmiTag == 0); |
| 2856 JumpIfSmi(reg1, on_either_smi); | 2773 JumpIfSmi(reg1, on_either_smi); |
| 2857 JumpIfSmi(reg2, on_either_smi); | 2774 JumpIfSmi(reg2, on_either_smi); |
| 2858 } | 2775 } |
| 2859 | 2776 |
| 2860 void MacroAssembler::AssertNotNumber(Register object) { | 2777 void MacroAssembler::AssertNotNumber(Register object) { |
| 2861 if (emit_debug_code()) { | 2778 if (emit_debug_code()) { |
| 2862 STATIC_ASSERT(kSmiTag == 0); | 2779 STATIC_ASSERT(kSmiTag == 0); |
| (...skipping 1622 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4485 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) { | 4402 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) { |
| 4486 int code = config->GetAllocatableGeneralCode(i); | 4403 int code = config->GetAllocatableGeneralCode(i); |
| 4487 Register candidate = Register::from_code(code); | 4404 Register candidate = Register::from_code(code); |
| 4488 if (regs & candidate.bit()) continue; | 4405 if (regs & candidate.bit()) continue; |
| 4489 return candidate; | 4406 return candidate; |
| 4490 } | 4407 } |
| 4491 UNREACHABLE(); | 4408 UNREACHABLE(); |
| 4492 return no_reg; | 4409 return no_reg; |
| 4493 } | 4410 } |
| 4494 | 4411 |
| 4495 | |
| 4496 void MacroAssembler::JumpIfDictionaryInPrototypeChain(Register object, | |
| 4497 Register scratch0, | |
| 4498 Register scratch1, | |
| 4499 Label* found) { | |
| 4500 DCHECK(!scratch1.is(scratch0)); | |
| 4501 Register current = scratch0; | |
| 4502 Label loop_again, end; | |
| 4503 | |
| 4504 // scratch contained elements pointer. | |
| 4505 mr(current, object); | |
| 4506 LoadP(current, FieldMemOperand(current, HeapObject::kMapOffset)); | |
| 4507 LoadP(current, FieldMemOperand(current, Map::kPrototypeOffset)); | |
| 4508 CompareRoot(current, Heap::kNullValueRootIndex); | |
| 4509 beq(&end); | |
| 4510 | |
| 4511 // Loop based on the map going up the prototype chain. | |
| 4512 bind(&loop_again); | |
| 4513 LoadP(current, FieldMemOperand(current, HeapObject::kMapOffset)); | |
| 4514 | |
| 4515 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE); | |
| 4516 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE); | |
| 4517 lbz(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset)); | |
| 4518 cmpi(scratch1, Operand(JS_OBJECT_TYPE)); | |
| 4519 blt(found); | |
| 4520 | |
| 4521 lbz(scratch1, FieldMemOperand(current, Map::kBitField2Offset)); | |
| 4522 DecodeField<Map::ElementsKindBits>(scratch1); | |
| 4523 cmpi(scratch1, Operand(DICTIONARY_ELEMENTS)); | |
| 4524 beq(found); | |
| 4525 LoadP(current, FieldMemOperand(current, Map::kPrototypeOffset)); | |
| 4526 CompareRoot(current, Heap::kNullValueRootIndex); | |
| 4527 bne(&loop_again); | |
| 4528 | |
| 4529 bind(&end); | |
| 4530 } | |
| 4531 | |
| 4532 | |
| 4533 #ifdef DEBUG | 4412 #ifdef DEBUG |
| 4534 bool AreAliased(Register reg1, Register reg2, Register reg3, Register reg4, | 4413 bool AreAliased(Register reg1, Register reg2, Register reg3, Register reg4, |
| 4535 Register reg5, Register reg6, Register reg7, Register reg8, | 4414 Register reg5, Register reg6, Register reg7, Register reg8, |
| 4536 Register reg9, Register reg10) { | 4415 Register reg9, Register reg10) { |
| 4537 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + reg3.is_valid() + | 4416 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + reg3.is_valid() + |
| 4538 reg4.is_valid() + reg5.is_valid() + reg6.is_valid() + | 4417 reg4.is_valid() + reg5.is_valid() + reg6.is_valid() + |
| 4539 reg7.is_valid() + reg8.is_valid() + reg9.is_valid() + | 4418 reg7.is_valid() + reg8.is_valid() + reg9.is_valid() + |
| 4540 reg10.is_valid(); | 4419 reg10.is_valid(); |
| 4541 | 4420 |
| 4542 RegList regs = 0; | 4421 RegList regs = 0; |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4619 } | 4498 } |
| 4620 if (mag.shift > 0) srawi(result, result, mag.shift); | 4499 if (mag.shift > 0) srawi(result, result, mag.shift); |
| 4621 ExtractBit(r0, dividend, 31); | 4500 ExtractBit(r0, dividend, 31); |
| 4622 add(result, result, r0); | 4501 add(result, result, r0); |
| 4623 } | 4502 } |
| 4624 | 4503 |
| 4625 } // namespace internal | 4504 } // namespace internal |
| 4626 } // namespace v8 | 4505 } // namespace v8 |
| 4627 | 4506 |
| 4628 #endif // V8_TARGET_ARCH_PPC | 4507 #endif // V8_TARGET_ARCH_PPC |
| OLD | NEW |