| OLD | NEW | 
|     1 // Copyright 2012 the V8 project authors. All rights reserved. |     1 // Copyright 2012 the V8 project authors. All rights reserved. | 
|     2 // Redistribution and use in source and binary forms, with or without |     2 // Redistribution and use in source and binary forms, with or without | 
|     3 // modification, are permitted provided that the following conditions are |     3 // modification, are permitted provided that the following conditions are | 
|     4 // met: |     4 // met: | 
|     5 // |     5 // | 
|     6 //     * Redistributions of source code must retain the above copyright |     6 //     * Redistributions of source code must retain the above copyright | 
|     7 //       notice, this list of conditions and the following disclaimer. |     7 //       notice, this list of conditions and the following disclaimer. | 
|     8 //     * Redistributions in binary form must reproduce the above |     8 //     * Redistributions in binary form must reproduce the above | 
|     9 //       copyright notice, this list of conditions and the following |     9 //       copyright notice, this list of conditions and the following | 
|    10 //       disclaimer in the documentation and/or other materials provided |    10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|    76  |    76  | 
|    77 void MacroAssembler::StoreRoot(Register source, |    77 void MacroAssembler::StoreRoot(Register source, | 
|    78                                Heap::RootListIndex index, |    78                                Heap::RootListIndex index, | 
|    79                                Condition cond, |    79                                Condition cond, | 
|    80                                Register src1, const Operand& src2) { |    80                                Register src1, const Operand& src2) { | 
|    81   Branch(2, NegateCondition(cond), src1, src2); |    81   Branch(2, NegateCondition(cond), src1, src2); | 
|    82   sw(source, MemOperand(s6, index << kPointerSizeLog2)); |    82   sw(source, MemOperand(s6, index << kPointerSizeLog2)); | 
|    83 } |    83 } | 
|    84  |    84  | 
|    85  |    85  | 
|    86 void MacroAssembler::LoadHeapObject(Register result, |  | 
|    87                                     Handle<HeapObject> object) { |  | 
|    88   AllowDeferredHandleDereference using_raw_address; |  | 
|    89   if (isolate()->heap()->InNewSpace(*object)) { |  | 
|    90     Handle<Cell> cell = isolate()->factory()->NewCell(object); |  | 
|    91     li(result, Operand(cell)); |  | 
|    92     lw(result, FieldMemOperand(result, Cell::kValueOffset)); |  | 
|    93   } else { |  | 
|    94     li(result, Operand(object)); |  | 
|    95   } |  | 
|    96 } |  | 
|    97  |  | 
|    98  |  | 
|    99 // Push and pop all registers that can hold pointers. |    86 // Push and pop all registers that can hold pointers. | 
|   100 void MacroAssembler::PushSafepointRegisters() { |    87 void MacroAssembler::PushSafepointRegisters() { | 
|   101   // Safepoints expect a block of kNumSafepointRegisters values on the |    88   // Safepoints expect a block of kNumSafepointRegisters values on the | 
|   102   // stack, so adjust the stack for unsaved registers. |    89   // stack, so adjust the stack for unsaved registers. | 
|   103   const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |    90   const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 
|   104   ASSERT(num_unsaved >= 0); |    91   ASSERT(num_unsaved >= 0); | 
|   105   if (num_unsaved > 0) { |    92   if (num_unsaved > 0) { | 
|   106     Subu(sp, sp, Operand(num_unsaved * kPointerSize)); |    93     Subu(sp, sp, Operand(num_unsaved * kPointerSize)); | 
|   107   } |    94   } | 
|   108   MultiPush(kSafepointSavedRegisters); |    95   MultiPush(kSafepointSavedRegisters); | 
| (...skipping 652 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   761         sll(rd, rs, (0x20 - rt.imm32_) & 0x1f); |   748         sll(rd, rs, (0x20 - rt.imm32_) & 0x1f); | 
|   762         or_(rd, rd, at); |   749         or_(rd, rd, at); | 
|   763       } |   750       } | 
|   764     } |   751     } | 
|   765   } |   752   } | 
|   766 } |   753 } | 
|   767  |   754  | 
|   768  |   755  | 
|   769 //------------Pseudo-instructions------------- |   756 //------------Pseudo-instructions------------- | 
|   770  |   757  | 
 |   758 void MacroAssembler::li(Register dst, Handle<Object> value, LiFlags mode) { | 
 |   759   AllowDeferredHandleDereference smi_check; | 
 |   760   if (value->IsSmi()) { | 
 |   761     li(dst, Operand(value), mode); | 
 |   762   } else { | 
 |   763     ASSERT(value->IsHeapObject()); | 
 |   764     if (isolate()->heap()->InNewSpace(*value)) { | 
 |   765       Handle<Cell> cell = isolate()->factory()->NewCell(value); | 
 |   766       li(dst, Operand(cell)); | 
 |   767       lw(dst, FieldMemOperand(dst, Cell::kValueOffset)); | 
 |   768     } else { | 
 |   769       li(dst, Operand(value)); | 
 |   770     } | 
 |   771   } | 
 |   772 } | 
 |   773  | 
 |   774  | 
|   771 void MacroAssembler::li(Register rd, Operand j, LiFlags mode) { |   775 void MacroAssembler::li(Register rd, Operand j, LiFlags mode) { | 
|   772   ASSERT(!j.is_reg()); |   776   ASSERT(!j.is_reg()); | 
|   773   BlockTrampolinePoolScope block_trampoline_pool(this); |   777   BlockTrampolinePoolScope block_trampoline_pool(this); | 
|   774   if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) { |   778   if (!MustUseReg(j.rmode_) && mode == OPTIMIZE_SIZE) { | 
|   775     // Normal load of an immediate value which does not need Relocation Info. |   779     // Normal load of an immediate value which does not need Relocation Info. | 
|   776     if (is_int16(j.imm32_)) { |   780     if (is_int16(j.imm32_)) { | 
|   777       addiu(rd, zero_reg, j.imm32_); |   781       addiu(rd, zero_reg, j.imm32_); | 
|   778     } else if (!(j.imm32_ & kHiMask)) { |   782     } else if (!(j.imm32_ & kHiMask)) { | 
|   779       ori(rd, zero_reg, j.imm32_); |   783       ori(rd, zero_reg, j.imm32_); | 
|   780     } else if (!(j.imm32_ & kImm16Mask)) { |   784     } else if (!(j.imm32_ & kImm16Mask)) { | 
| (...skipping 2909 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  3690 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, |  3694 void MacroAssembler::InvokeFunction(Handle<JSFunction> function, | 
|  3691                                     const ParameterCount& expected, |  3695                                     const ParameterCount& expected, | 
|  3692                                     const ParameterCount& actual, |  3696                                     const ParameterCount& actual, | 
|  3693                                     InvokeFlag flag, |  3697                                     InvokeFlag flag, | 
|  3694                                     const CallWrapper& call_wrapper, |  3698                                     const CallWrapper& call_wrapper, | 
|  3695                                     CallKind call_kind) { |  3699                                     CallKind call_kind) { | 
|  3696   // You can't call a function without a valid frame. |  3700   // You can't call a function without a valid frame. | 
|  3697   ASSERT(flag == JUMP_FUNCTION || has_frame()); |  3701   ASSERT(flag == JUMP_FUNCTION || has_frame()); | 
|  3698  |  3702  | 
|  3699   // Get the function and setup the context. |  3703   // Get the function and setup the context. | 
|  3700   LoadHeapObject(a1, function); |  3704   li(a1, function); | 
|  3701   lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |  3705   lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | 
|  3702  |  3706  | 
|  3703   // We call indirectly through the code field in the function to |  3707   // We call indirectly through the code field in the function to | 
|  3704   // allow recompilation to take effect without changing any of the |  3708   // allow recompilation to take effect without changing any of the | 
|  3705   // call sites. |  3709   // call sites. | 
|  3706   lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |  3710   lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | 
|  3707   InvokeCode(a3, expected, actual, flag, call_wrapper, call_kind); |  3711   InvokeCode(a3, expected, actual, flag, call_wrapper, call_kind); | 
|  3708 } |  3712 } | 
|  3709  |  3713  | 
|  3710  |  3714  | 
| (...skipping 1999 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  5710        opcode == BGTZL); |  5714        opcode == BGTZL); | 
|  5711   opcode = (cond == eq) ? BEQ : BNE; |  5715   opcode = (cond == eq) ? BEQ : BNE; | 
|  5712   instr = (instr & ~kOpcodeMask) | opcode; |  5716   instr = (instr & ~kOpcodeMask) | opcode; | 
|  5713   masm_.emit(instr); |  5717   masm_.emit(instr); | 
|  5714 } |  5718 } | 
|  5715  |  5719  | 
|  5716  |  5720  | 
|  5717 } }  // namespace v8::internal |  5721 } }  // namespace v8::internal | 
|  5718  |  5722  | 
|  5719 #endif  // V8_TARGET_ARCH_MIPS |  5723 #endif  // V8_TARGET_ARCH_MIPS | 
| OLD | NEW |