OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 741 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
752 } else { | 752 } else { |
753 srl(at, rs, rt.imm32_); | 753 srl(at, rs, rt.imm32_); |
754 sll(rd, rs, (0x20 - rt.imm32_) & 0x1f); | 754 sll(rd, rs, (0x20 - rt.imm32_) & 0x1f); |
755 or_(rd, rd, at); | 755 or_(rd, rd, at); |
756 } | 756 } |
757 } | 757 } |
758 } | 758 } |
759 } | 759 } |
760 | 760 |
761 | 761 |
762 static const int kInvalidRootIndex = -1; | |
763 | |
764 int MacroAssembler::FindRootIndex(Object* heap_object) { | |
765 Heap* heap = HEAP; | |
766 if (heap->InNewSpace(heap_object)) return kInvalidRootIndex; | |
767 for (int i = 0; i < Heap::kRootListLength; i++) { | |
768 Object* root = heap->roots_array_start()[i]; | |
769 if (!root->IsSmi() && root == heap_object) return i; | |
770 } | |
771 return kInvalidRootIndex; | |
772 } | |
773 | |
774 | |
762 //------------Pseudo-instructions------------- | 775 //------------Pseudo-instructions------------- |
763 | 776 |
764 void MacroAssembler::li(Register rd, Operand j, bool gen2instr) { | 777 void MacroAssembler::li(Register rd, Operand j, LiFlags mode) { |
765 ASSERT(!j.is_reg()); | 778 ASSERT(!j.is_reg()); |
766 BlockTrampolinePoolScope block_trampoline_pool(this); | 779 BlockTrampolinePoolScope block_trampoline_pool(this); |
767 if (!MustUseReg(j.rmode_) && !gen2instr) { | 780 if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) { |
768 // Normal load of an immediate value which does not need Relocation Info. | 781 // Normal load of an immediate value which does not need Relocation Info. |
769 if (is_int16(j.imm32_)) { | 782 if (is_int16(j.imm32_)) { |
770 addiu(rd, zero_reg, j.imm32_); | 783 addiu(rd, zero_reg, j.imm32_); |
771 } else if (!(j.imm32_ & kHiMask)) { | 784 } else if (!(j.imm32_ & kHiMask)) { |
772 ori(rd, zero_reg, j.imm32_); | 785 ori(rd, zero_reg, j.imm32_); |
773 } else if (!(j.imm32_ & kImm16Mask)) { | 786 } else if (!(j.imm32_ & kImm16Mask)) { |
774 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); | 787 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); |
775 } else { | 788 } else { |
776 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); | 789 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); |
777 ori(rd, rd, (j.imm32_ & kImm16Mask)); | 790 ori(rd, rd, (j.imm32_ & kImm16Mask)); |
778 } | 791 } |
779 } else if (MustUseReg(j.rmode_) || gen2instr) { | 792 } else if (can_use_relative_load(j.rmode_) && mode == OPTIMIZE_SIZE) { |
793 int32_t index = FindRootIndex(*(reinterpret_cast<Object**>(j.imm32_))); | |
Erik Corry
2012/02/29 14:38:13
This will be too slow. I suggest you make it into
kalmard
2012/03/12 12:40:00
The FindRootIndex() lookup only happens at seriali
Erik Corry
2012/03/17 02:49:18
In that case you should assert that the Serializer
| |
794 if (index != kInvalidRootIndex) { | |
795 // Replace lui/ori pair for references that are found in root array with | |
796 // relative load using LoadRoot with no relocation info. | |
797 LoadRoot(rd, static_cast<Heap::RootListIndex>(index)); | |
798 } else { | |
799 if (MustUseReg(j.rmode_)) { | |
800 RecordRelocInfo(j.rmode_, j.imm32_); | |
801 } | |
802 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); | |
803 ori(rd, rd, (j.imm32_ & kImm16Mask)); | |
804 } | |
805 } else { | |
780 if (MustUseReg(j.rmode_)) { | 806 if (MustUseReg(j.rmode_)) { |
781 RecordRelocInfo(j.rmode_, j.imm32_); | 807 RecordRelocInfo(j.rmode_, j.imm32_); |
782 } | 808 } |
783 // We always need the same number of instructions as we may need to patch | 809 // We always need the same number of instructions as we may need to patch |
784 // this code to load another value which may need 2 instructions to load. | 810 // this code to load another value which may need 2 instructions to load. |
785 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); | 811 lui(rd, (j.imm32_ >> kLuiShift) & kImm16Mask); |
786 ori(rd, rd, (j.imm32_ & kImm16Mask)); | 812 ori(rd, rd, (j.imm32_ & kImm16Mask)); |
787 } | 813 } |
788 } | 814 } |
789 | 815 |
(...skipping 1587 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2377 Register rs, | 2403 Register rs, |
2378 const Operand& rt, | 2404 const Operand& rt, |
2379 BranchDelaySlot bd) { | 2405 BranchDelaySlot bd) { |
2380 BlockTrampolinePoolScope block_trampoline_pool(this); | 2406 BlockTrampolinePoolScope block_trampoline_pool(this); |
2381 Label start; | 2407 Label start; |
2382 bind(&start); | 2408 bind(&start); |
2383 int32_t target_int = reinterpret_cast<int32_t>(target); | 2409 int32_t target_int = reinterpret_cast<int32_t>(target); |
2384 // Must record previous source positions before the | 2410 // Must record previous source positions before the |
2385 // li() generates a new code target. | 2411 // li() generates a new code target. |
2386 positions_recorder()->WriteRecordedPositions(); | 2412 positions_recorder()->WriteRecordedPositions(); |
2387 li(t9, Operand(target_int, rmode), true); | 2413 li(t9, Operand(target_int, rmode), CONSTANT_SIZE); |
2388 Call(t9, cond, rs, rt, bd); | 2414 Call(t9, cond, rs, rt, bd); |
2389 ASSERT_EQ(CallSize(target, rmode, cond, rs, rt, bd), | 2415 ASSERT_EQ(CallSize(target, rmode, cond, rs, rt, bd), |
2390 SizeOfCodeGeneratedSince(&start)); | 2416 SizeOfCodeGeneratedSince(&start)); |
2391 } | 2417 } |
2392 | 2418 |
2393 | 2419 |
2394 int MacroAssembler::CallSize(Handle<Code> code, | 2420 int MacroAssembler::CallSize(Handle<Code> code, |
2395 RelocInfo::Mode rmode, | 2421 RelocInfo::Mode rmode, |
2396 unsigned ast_id, | 2422 unsigned ast_id, |
2397 Condition cond, | 2423 Condition cond, |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2586 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); | 2612 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize); |
2587 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); | 2613 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize); |
2588 | 2614 |
2589 // For the JSEntry handler, we must preserve a0-a3 and s0. | 2615 // For the JSEntry handler, we must preserve a0-a3 and s0. |
2590 // t1-t3 are available. We will build up the handler from the bottom by | 2616 // t1-t3 are available. We will build up the handler from the bottom by |
2591 // pushing on the stack. | 2617 // pushing on the stack. |
2592 // Set up the code object (t1) and the state (t2) for pushing. | 2618 // Set up the code object (t1) and the state (t2) for pushing. |
2593 unsigned state = | 2619 unsigned state = |
2594 StackHandler::IndexField::encode(handler_index) | | 2620 StackHandler::IndexField::encode(handler_index) | |
2595 StackHandler::KindField::encode(kind); | 2621 StackHandler::KindField::encode(kind); |
2596 li(t1, Operand(CodeObject())); | 2622 li(t1, Operand(CodeObject()), CONSTANT_SIZE); |
2597 li(t2, Operand(state)); | 2623 li(t2, Operand(state)); |
2598 | 2624 |
2599 // Push the frame pointer, context, state, and code object. | 2625 // Push the frame pointer, context, state, and code object. |
2600 if (kind == StackHandler::JS_ENTRY) { | 2626 if (kind == StackHandler::JS_ENTRY) { |
2601 ASSERT_EQ(Smi::FromInt(0), 0); | 2627 ASSERT_EQ(Smi::FromInt(0), 0); |
2602 // The second zero_reg indicates no context. | 2628 // The second zero_reg indicates no context. |
2603 // The first zero_reg is the NULL frame pointer. | 2629 // The first zero_reg is the NULL frame pointer. |
2604 // The operands are reversed to match the order of MultiPush/Pop. | 2630 // The operands are reversed to match the order of MultiPush/Pop. |
2605 Push(zero_reg, zero_reg, t2, t1); | 2631 Push(zero_reg, zero_reg, t2, t1); |
2606 } else { | 2632 } else { |
(...skipping 1711 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4318 bind(&fail); | 4344 bind(&fail); |
4319 Abort("Global functions must have initial map"); | 4345 Abort("Global functions must have initial map"); |
4320 bind(&ok); | 4346 bind(&ok); |
4321 } | 4347 } |
4322 } | 4348 } |
4323 | 4349 |
4324 | 4350 |
4325 void MacroAssembler::EnterFrame(StackFrame::Type type) { | 4351 void MacroAssembler::EnterFrame(StackFrame::Type type) { |
4326 addiu(sp, sp, -5 * kPointerSize); | 4352 addiu(sp, sp, -5 * kPointerSize); |
4327 li(t8, Operand(Smi::FromInt(type))); | 4353 li(t8, Operand(Smi::FromInt(type))); |
4328 li(t9, Operand(CodeObject())); | 4354 li(t9, Operand(CodeObject()), CONSTANT_SIZE); |
4329 sw(ra, MemOperand(sp, 4 * kPointerSize)); | 4355 sw(ra, MemOperand(sp, 4 * kPointerSize)); |
4330 sw(fp, MemOperand(sp, 3 * kPointerSize)); | 4356 sw(fp, MemOperand(sp, 3 * kPointerSize)); |
4331 sw(cp, MemOperand(sp, 2 * kPointerSize)); | 4357 sw(cp, MemOperand(sp, 2 * kPointerSize)); |
4332 sw(t8, MemOperand(sp, 1 * kPointerSize)); | 4358 sw(t8, MemOperand(sp, 1 * kPointerSize)); |
4333 sw(t9, MemOperand(sp, 0 * kPointerSize)); | 4359 sw(t9, MemOperand(sp, 0 * kPointerSize)); |
4334 addiu(fp, sp, 3 * kPointerSize); | 4360 addiu(fp, sp, 3 * kPointerSize); |
4335 } | 4361 } |
4336 | 4362 |
4337 | 4363 |
4338 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 4364 void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
(...skipping 23 matching lines...) Expand all Loading... | |
4362 // Save registers. | 4388 // Save registers. |
4363 addiu(sp, sp, -4 * kPointerSize); | 4389 addiu(sp, sp, -4 * kPointerSize); |
4364 sw(ra, MemOperand(sp, 3 * kPointerSize)); | 4390 sw(ra, MemOperand(sp, 3 * kPointerSize)); |
4365 sw(fp, MemOperand(sp, 2 * kPointerSize)); | 4391 sw(fp, MemOperand(sp, 2 * kPointerSize)); |
4366 addiu(fp, sp, 2 * kPointerSize); // Set up new frame pointer. | 4392 addiu(fp, sp, 2 * kPointerSize); // Set up new frame pointer. |
4367 | 4393 |
4368 if (emit_debug_code()) { | 4394 if (emit_debug_code()) { |
4369 sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset)); | 4395 sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
4370 } | 4396 } |
4371 | 4397 |
4372 li(t8, Operand(CodeObject())); // Accessed from ExitFrame::code_slot. | 4398 // Accessed from ExitFrame::code_slot. |
4399 li(t8, Operand(CodeObject()), CONSTANT_SIZE); | |
4373 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); | 4400 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); |
4374 | 4401 |
4375 // Save the frame pointer and the context in top. | 4402 // Save the frame pointer and the context in top. |
4376 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); | 4403 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); |
4377 sw(fp, MemOperand(t8)); | 4404 sw(fp, MemOperand(t8)); |
4378 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); | 4405 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); |
4379 sw(cp, MemOperand(t8)); | 4406 sw(cp, MemOperand(t8)); |
4380 | 4407 |
4381 const int frame_alignment = MacroAssembler::ActivationFrameAlignment(); | 4408 const int frame_alignment = MacroAssembler::ActivationFrameAlignment(); |
4382 if (save_doubles) { | 4409 if (save_doubles) { |
(...skipping 814 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5197 opcode == BGTZL); | 5224 opcode == BGTZL); |
5198 opcode = (cond == eq) ? BEQ : BNE; | 5225 opcode = (cond == eq) ? BEQ : BNE; |
5199 instr = (instr & ~kOpcodeMask) | opcode; | 5226 instr = (instr & ~kOpcodeMask) | opcode; |
5200 masm_.emit(instr); | 5227 masm_.emit(instr); |
5201 } | 5228 } |
5202 | 5229 |
5203 | 5230 |
5204 } } // namespace v8::internal | 5231 } } // namespace v8::internal |
5205 | 5232 |
5206 #endif // V8_TARGET_ARCH_MIPS | 5233 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |