OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" |
(...skipping 4500 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4511 Register filler) { | 4511 Register filler) { |
4512 Label loop, entry; | 4512 Label loop, entry; |
4513 Branch(&entry); | 4513 Branch(&entry); |
4514 bind(&loop); | 4514 bind(&loop); |
4515 sw(filler, MemOperand(current_address)); | 4515 sw(filler, MemOperand(current_address)); |
4516 Addu(current_address, current_address, kPointerSize); | 4516 Addu(current_address, current_address, kPointerSize); |
4517 bind(&entry); | 4517 bind(&entry); |
4518 Branch(&loop, ult, current_address, Operand(end_address)); | 4518 Branch(&loop, ult, current_address, Operand(end_address)); |
4519 } | 4519 } |
4520 | 4520 |
4521 void MacroAssembler::CheckFastObjectElements(Register map, | |
4522 Register scratch, | |
4523 Label* fail) { | |
4524 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
4525 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
4526 STATIC_ASSERT(FAST_ELEMENTS == 2); | |
4527 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); | |
4528 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | |
4529 Branch(fail, ls, scratch, | |
4530 Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | |
4531 Branch(fail, hi, scratch, | |
4532 Operand(Map::kMaximumBitField2FastHoleyElementValue)); | |
4533 } | |
4534 | |
4535 | |
4536 void MacroAssembler::CheckFastSmiElements(Register map, | |
4537 Register scratch, | |
4538 Label* fail) { | |
4539 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); | |
4540 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); | |
4541 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset)); | |
4542 Branch(fail, hi, scratch, | |
4543 Operand(Map::kMaximumBitField2FastHoleySmiElementValue)); | |
4544 } | |
4545 | |
4546 | |
4547 void MacroAssembler::StoreNumberToDoubleElements(Register value_reg, | |
4548 Register key_reg, | |
4549 Register elements_reg, | |
4550 Register scratch1, | |
4551 Register scratch2, | |
4552 Register scratch3, | |
4553 Label* fail, | |
4554 int elements_offset) { | |
4555 DCHECK(!AreAliased(value_reg, key_reg, elements_reg, scratch1, scratch2, | |
4556 scratch3)); | |
4557 Label smi_value, done; | |
4558 | |
4559 // Handle smi values specially. | |
4560 JumpIfSmi(value_reg, &smi_value); | |
4561 | |
4562 // Ensure that the object is a heap number | |
4563 CheckMap(value_reg, | |
4564 scratch1, | |
4565 Heap::kHeapNumberMapRootIndex, | |
4566 fail, | |
4567 DONT_DO_SMI_CHECK); | |
4568 | |
4569 // Double value, turn potential sNaN into qNan. | |
4570 DoubleRegister double_result = f0; | |
4571 DoubleRegister double_scratch = f2; | |
4572 | |
4573 ldc1(double_result, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | |
4574 Branch(USE_DELAY_SLOT, &done); // Canonicalization is one instruction. | |
4575 FPUCanonicalizeNaN(double_result, double_result); | |
4576 | |
4577 bind(&smi_value); | |
4578 Register untagged_value = scratch2; | |
4579 SmiUntag(untagged_value, value_reg); | |
4580 mtc1(untagged_value, double_scratch); | |
4581 cvt_d_w(double_result, double_scratch); | |
4582 | |
4583 bind(&done); | |
4584 Addu(scratch1, elements_reg, | |
4585 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag - | |
4586 elements_offset)); | |
4587 Lsa(scratch1, scratch1, key_reg, kDoubleSizeLog2 - kSmiTagSize); | |
4588 // scratch1 is now effective address of the double element | |
4589 sdc1(double_result, MemOperand(scratch1, 0)); | |
4590 } | |
4591 | |
4592 void MacroAssembler::CompareMapAndBranch(Register obj, | 4521 void MacroAssembler::CompareMapAndBranch(Register obj, |
4593 Register scratch, | 4522 Register scratch, |
4594 Handle<Map> map, | 4523 Handle<Map> map, |
4595 Label* early_success, | 4524 Label* early_success, |
4596 Condition cond, | 4525 Condition cond, |
4597 Label* branch_to) { | 4526 Label* branch_to) { |
4598 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 4527 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); |
4599 CompareMapAndBranch(scratch, map, early_success, cond, branch_to); | 4528 CompareMapAndBranch(scratch, map, early_success, cond, branch_to); |
4600 } | 4529 } |
4601 | 4530 |
(...skipping 994 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5596 lw(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); | 5525 lw(dst, MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
5597 } | 5526 } |
5598 } else { | 5527 } else { |
5599 // Slot is in the current function context. Move it into the | 5528 // Slot is in the current function context. Move it into the |
5600 // destination register in case we store into it (the write barrier | 5529 // destination register in case we store into it (the write barrier |
5601 // cannot be allowed to destroy the context in esi). | 5530 // cannot be allowed to destroy the context in esi). |
5602 Move(dst, cp); | 5531 Move(dst, cp); |
5603 } | 5532 } |
5604 } | 5533 } |
5605 | 5534 |
5606 | |
5607 void MacroAssembler::LoadTransitionedArrayMapConditional( | |
5608 ElementsKind expected_kind, | |
5609 ElementsKind transitioned_kind, | |
5610 Register map_in_out, | |
5611 Register scratch, | |
5612 Label* no_map_match) { | |
5613 DCHECK(IsFastElementsKind(expected_kind)); | |
5614 DCHECK(IsFastElementsKind(transitioned_kind)); | |
5615 | |
5616 // Check that the function's map is the same as the expected cached map. | |
5617 lw(scratch, NativeContextMemOperand()); | |
5618 lw(at, ContextMemOperand(scratch, Context::ArrayMapIndex(expected_kind))); | |
5619 Branch(no_map_match, ne, map_in_out, Operand(at)); | |
5620 | |
5621 // Use the transitioned cached map. | |
5622 lw(map_in_out, | |
5623 ContextMemOperand(scratch, Context::ArrayMapIndex(transitioned_kind))); | |
5624 } | |
5625 | |
5626 | |
5627 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { | 5535 void MacroAssembler::LoadNativeContextSlot(int index, Register dst) { |
5628 lw(dst, NativeContextMemOperand()); | 5536 lw(dst, NativeContextMemOperand()); |
5629 lw(dst, ContextMemOperand(dst, index)); | 5537 lw(dst, ContextMemOperand(dst, index)); |
5630 } | 5538 } |
5631 | 5539 |
5632 | 5540 |
5633 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, | 5541 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function, |
5634 Register map, | 5542 Register map, |
5635 Register scratch) { | 5543 Register scratch) { |
5636 // Load the initial map. The global functions all have initial maps. | 5544 // Load the initial map. The global functions all have initial maps. |
(...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5948 } | 5856 } |
5949 | 5857 |
5950 | 5858 |
5951 void MacroAssembler::UntagAndJumpIfSmi(Register dst, | 5859 void MacroAssembler::UntagAndJumpIfSmi(Register dst, |
5952 Register src, | 5860 Register src, |
5953 Label* smi_case) { | 5861 Label* smi_case) { |
5954 JumpIfSmi(src, smi_case, at, USE_DELAY_SLOT); | 5862 JumpIfSmi(src, smi_case, at, USE_DELAY_SLOT); |
5955 SmiUntag(dst, src); | 5863 SmiUntag(dst, src); |
5956 } | 5864 } |
5957 | 5865 |
5958 | |
5959 void MacroAssembler::UntagAndJumpIfNotSmi(Register dst, | |
5960 Register src, | |
5961 Label* non_smi_case) { | |
5962 JumpIfNotSmi(src, non_smi_case, at, USE_DELAY_SLOT); | |
5963 SmiUntag(dst, src); | |
5964 } | |
5965 | |
5966 void MacroAssembler::JumpIfSmi(Register value, | 5866 void MacroAssembler::JumpIfSmi(Register value, |
5967 Label* smi_label, | 5867 Label* smi_label, |
5968 Register scratch, | 5868 Register scratch, |
5969 BranchDelaySlot bd) { | 5869 BranchDelaySlot bd) { |
5970 DCHECK_EQ(0, kSmiTag); | 5870 DCHECK_EQ(0, kSmiTag); |
5971 andi(scratch, value, kSmiTagMask); | 5871 andi(scratch, value, kSmiTagMask); |
5972 Branch(bd, smi_label, eq, scratch, Operand(zero_reg)); | 5872 Branch(bd, smi_label, eq, scratch, Operand(zero_reg)); |
5973 } | 5873 } |
5974 | 5874 |
5975 void MacroAssembler::JumpIfNotSmi(Register value, | 5875 void MacroAssembler::JumpIfNotSmi(Register value, |
(...skipping 639 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6615 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) { | 6515 for (int i = 0; i < config->num_allocatable_general_registers(); ++i) { |
6616 int code = config->GetAllocatableGeneralCode(i); | 6516 int code = config->GetAllocatableGeneralCode(i); |
6617 Register candidate = Register::from_code(code); | 6517 Register candidate = Register::from_code(code); |
6618 if (regs & candidate.bit()) continue; | 6518 if (regs & candidate.bit()) continue; |
6619 return candidate; | 6519 return candidate; |
6620 } | 6520 } |
6621 UNREACHABLE(); | 6521 UNREACHABLE(); |
6622 return no_reg; | 6522 return no_reg; |
6623 } | 6523 } |
6624 | 6524 |
6625 | |
6626 void MacroAssembler::JumpIfDictionaryInPrototypeChain( | |
6627 Register object, | |
6628 Register scratch0, | |
6629 Register scratch1, | |
6630 Label* found) { | |
6631 DCHECK(!scratch1.is(scratch0)); | |
6632 Factory* factory = isolate()->factory(); | |
6633 Register current = scratch0; | |
6634 Label loop_again, end; | |
6635 | |
6636 // Scratch contained elements pointer. | |
6637 Move(current, object); | |
6638 lw(current, FieldMemOperand(current, HeapObject::kMapOffset)); | |
6639 lw(current, FieldMemOperand(current, Map::kPrototypeOffset)); | |
6640 Branch(&end, eq, current, Operand(factory->null_value())); | |
6641 | |
6642 // Loop based on the map going up the prototype chain. | |
6643 bind(&loop_again); | |
6644 lw(current, FieldMemOperand(current, HeapObject::kMapOffset)); | |
6645 lbu(scratch1, FieldMemOperand(current, Map::kInstanceTypeOffset)); | |
6646 STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE); | |
6647 STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE); | |
6648 Branch(found, lo, scratch1, Operand(JS_OBJECT_TYPE)); | |
6649 lb(scratch1, FieldMemOperand(current, Map::kBitField2Offset)); | |
6650 DecodeField<Map::ElementsKindBits>(scratch1); | |
6651 Branch(found, eq, scratch1, Operand(DICTIONARY_ELEMENTS)); | |
6652 lw(current, FieldMemOperand(current, Map::kPrototypeOffset)); | |
6653 Branch(&loop_again, ne, current, Operand(factory->null_value())); | |
6654 | |
6655 bind(&end); | |
6656 } | |
6657 | |
6658 | |
6659 bool AreAliased(Register reg1, Register reg2, Register reg3, Register reg4, | 6525 bool AreAliased(Register reg1, Register reg2, Register reg3, Register reg4, |
6660 Register reg5, Register reg6, Register reg7, Register reg8, | 6526 Register reg5, Register reg6, Register reg7, Register reg8, |
6661 Register reg9, Register reg10) { | 6527 Register reg9, Register reg10) { |
6662 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + reg3.is_valid() + | 6528 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() + reg3.is_valid() + |
6663 reg4.is_valid() + reg5.is_valid() + reg6.is_valid() + | 6529 reg4.is_valid() + reg5.is_valid() + reg6.is_valid() + |
6664 reg7.is_valid() + reg8.is_valid() + reg9.is_valid() + | 6530 reg7.is_valid() + reg8.is_valid() + reg9.is_valid() + |
6665 reg10.is_valid(); | 6531 reg10.is_valid(); |
6666 | 6532 |
6667 RegList regs = 0; | 6533 RegList regs = 0; |
6668 if (reg1.is_valid()) regs |= reg1.bit(); | 6534 if (reg1.is_valid()) regs |= reg1.bit(); |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6743 if (mag.shift > 0) sra(result, result, mag.shift); | 6609 if (mag.shift > 0) sra(result, result, mag.shift); |
6744 srl(at, dividend, 31); | 6610 srl(at, dividend, 31); |
6745 Addu(result, result, Operand(at)); | 6611 Addu(result, result, Operand(at)); |
6746 } | 6612 } |
6747 | 6613 |
6748 | 6614 |
6749 } // namespace internal | 6615 } // namespace internal |
6750 } // namespace v8 | 6616 } // namespace v8 |
6751 | 6617 |
6752 #endif // V8_TARGET_ARCH_MIPS | 6618 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |