| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 4567 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4578 LoadRoot(at, Heap::kUndefinedValueRootIndex); | 4578 LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 4579 Branch(not_int32, ne, object, Operand(at)); | 4579 Branch(not_int32, ne, object, Operand(at)); |
| 4580 // |undefined| is truncated to 0. | 4580 // |undefined| is truncated to 0. |
| 4581 li(dst, Operand(Smi::FromInt(0))); | 4581 li(dst, Operand(Smi::FromInt(0))); |
| 4582 // Fall through. | 4582 // Fall through. |
| 4583 | 4583 |
| 4584 bind(&done); | 4584 bind(&done); |
| 4585 } | 4585 } |
| 4586 | 4586 |
| 4587 | 4587 |
| 4588 void MacroAssembler::Prologue(PrologueFrameMode frame_mode) { |
| 4589 if (frame_mode == BUILD_STUB_FRAME) { |
| 4590 Push(ra, fp, cp); |
| 4591 Push(Smi::FromInt(StackFrame::STUB)); |
| 4592 // Adjust FP to point to saved FP. |
| 4593 Addu(fp, sp, Operand(2 * kPointerSize)); |
| 4594 } else { |
| 4595 PredictableCodeSizeScope predictible_code_size_scope( |
| 4596 this, kNoCodeAgeSequenceLength * Assembler::kInstrSize); |
| 4597 // The following three instructions must remain together and unmodified |
| 4598 // for code aging to work properly. |
| 4599 if (FLAG_optimize_for_size && FLAG_age_code) { |
| 4600 // Pre-age the code. |
| 4601 Code* stub = Code::GetPreAgedCodeAgeStub(isolate()); |
| 4602 nop(Assembler::CODE_AGE_MARKER_NOP); |
| 4603 // Save the function's original return address |
| 4604 // (it will be clobbered by Call(t9)) |
| 4605 mov(at, ra); |
| 4606 // Load the stub address to t9 and call it |
| 4607 li(t9, |
| 4608 Operand(reinterpret_cast<uint32_t>(stub->instruction_start()))); |
| 4609 Call(t9); |
| 4610 // Record the stub address in the empty space for GetCodeAgeAndParity() |
| 4611 dd(reinterpret_cast<uint32_t>(stub->instruction_start())); |
| 4612 } else { |
| 4613 Push(ra, fp, cp, a1); |
| 4614 nop(Assembler::CODE_AGE_SEQUENCE_NOP); |
| 4615 // Adjust fp to point to caller's fp. |
| 4616 Addu(fp, sp, Operand(2 * kPointerSize)); |
| 4617 } |
| 4618 } |
| 4619 } |
| 4620 |
| 4621 |
| 4588 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 4622 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
| 4589 addiu(sp, sp, -5 * kPointerSize); | 4623 addiu(sp, sp, -5 * kPointerSize); |
| 4590 li(t8, Operand(Smi::FromInt(type))); | 4624 li(t8, Operand(Smi::FromInt(type))); |
| 4591 li(t9, Operand(CodeObject()), CONSTANT_SIZE); | 4625 li(t9, Operand(CodeObject()), CONSTANT_SIZE); |
| 4592 sw(ra, MemOperand(sp, 4 * kPointerSize)); | 4626 sw(ra, MemOperand(sp, 4 * kPointerSize)); |
| 4593 sw(fp, MemOperand(sp, 3 * kPointerSize)); | 4627 sw(fp, MemOperand(sp, 3 * kPointerSize)); |
| 4594 sw(cp, MemOperand(sp, 2 * kPointerSize)); | 4628 sw(cp, MemOperand(sp, 2 * kPointerSize)); |
| 4595 sw(t8, MemOperand(sp, 1 * kPointerSize)); | 4629 sw(t8, MemOperand(sp, 1 * kPointerSize)); |
| 4596 sw(t9, MemOperand(sp, 0 * kPointerSize)); | 4630 sw(t9, MemOperand(sp, 0 * kPointerSize)); |
| 4597 addiu(fp, sp, 3 * kPointerSize); | 4631 addiu(fp, sp, 3 * kPointerSize); |
| (...skipping 965 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5563 bind(&in_bounds); | 5597 bind(&in_bounds); |
| 5564 cvt_w_d(temp_double_reg, input_reg); | 5598 cvt_w_d(temp_double_reg, input_reg); |
| 5565 mfc1(result_reg, temp_double_reg); | 5599 mfc1(result_reg, temp_double_reg); |
| 5566 bind(&done); | 5600 bind(&done); |
| 5567 } | 5601 } |
| 5568 | 5602 |
| 5569 | 5603 |
| 5570 void MacroAssembler::TestJSArrayForAllocationMemento( | 5604 void MacroAssembler::TestJSArrayForAllocationMemento( |
| 5571 Register receiver_reg, | 5605 Register receiver_reg, |
| 5572 Register scratch_reg, | 5606 Register scratch_reg, |
| 5607 Label* no_memento_found, |
| 5573 Condition cond, | 5608 Condition cond, |
| 5574 Label* allocation_memento_present) { | 5609 Label* allocation_memento_present) { |
| 5575 Label no_memento_available; | |
| 5576 ExternalReference new_space_start = | 5610 ExternalReference new_space_start = |
| 5577 ExternalReference::new_space_start(isolate()); | 5611 ExternalReference::new_space_start(isolate()); |
| 5578 ExternalReference new_space_allocation_top = | 5612 ExternalReference new_space_allocation_top = |
| 5579 ExternalReference::new_space_allocation_top_address(isolate()); | 5613 ExternalReference::new_space_allocation_top_address(isolate()); |
| 5580 Addu(scratch_reg, receiver_reg, | 5614 Addu(scratch_reg, receiver_reg, |
| 5581 Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); | 5615 Operand(JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag)); |
| 5582 Branch(&no_memento_available, lt, scratch_reg, Operand(new_space_start)); | 5616 Branch(no_memento_found, lt, scratch_reg, Operand(new_space_start)); |
| 5583 li(at, Operand(new_space_allocation_top)); | 5617 li(at, Operand(new_space_allocation_top)); |
| 5584 lw(at, MemOperand(at)); | 5618 lw(at, MemOperand(at)); |
| 5585 Branch(&no_memento_available, gt, scratch_reg, Operand(at)); | 5619 Branch(no_memento_found, gt, scratch_reg, Operand(at)); |
| 5586 lw(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize)); | 5620 lw(scratch_reg, MemOperand(scratch_reg, -AllocationMemento::kSize)); |
| 5587 Branch(allocation_memento_present, cond, scratch_reg, | 5621 if (allocation_memento_present) { |
| 5588 Operand(isolate()->factory()->allocation_memento_map())); | 5622 Branch(allocation_memento_present, cond, scratch_reg, |
| 5589 bind(&no_memento_available); | 5623 Operand(isolate()->factory()->allocation_memento_map())); |
| 5624 } |
| 5590 } | 5625 } |
| 5591 | 5626 |
| 5592 | 5627 |
| 5593 Register GetRegisterThatIsNotOneOf(Register reg1, | 5628 Register GetRegisterThatIsNotOneOf(Register reg1, |
| 5594 Register reg2, | 5629 Register reg2, |
| 5595 Register reg3, | 5630 Register reg3, |
| 5596 Register reg4, | 5631 Register reg4, |
| 5597 Register reg5, | 5632 Register reg5, |
| 5598 Register reg6) { | 5633 Register reg6) { |
| 5599 RegList regs = 0; | 5634 RegList regs = 0; |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5674 opcode == BGTZL); | 5709 opcode == BGTZL); |
| 5675 opcode = (cond == eq) ? BEQ : BNE; | 5710 opcode = (cond == eq) ? BEQ : BNE; |
| 5676 instr = (instr & ~kOpcodeMask) | opcode; | 5711 instr = (instr & ~kOpcodeMask) | opcode; |
| 5677 masm_.emit(instr); | 5712 masm_.emit(instr); |
| 5678 } | 5713 } |
| 5679 | 5714 |
| 5680 | 5715 |
| 5681 } } // namespace v8::internal | 5716 } } // namespace v8::internal |
| 5682 | 5717 |
| 5683 #endif // V8_TARGET_ARCH_MIPS | 5718 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |