| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 60 | 60 |
| 61 | 61 |
| 62 class StoreBufferOverflowStub: public PlatformCodeStub { | 62 class StoreBufferOverflowStub: public PlatformCodeStub { |
| 63 public: | 63 public: |
| 64 explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) | 64 explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) |
| 65 : save_doubles_(save_fp) { } | 65 : save_doubles_(save_fp) { } |
| 66 | 66 |
| 67 void Generate(MacroAssembler* masm); | 67 void Generate(MacroAssembler* masm); |
| 68 | 68 |
| 69 virtual bool IsPregenerated(); | 69 virtual bool IsPregenerated(); |
| 70 static void GenerateFixedRegStubsAheadOfTime(); | 70 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); |
| 71 virtual bool SometimesSetsUpAFrame() { return false; } | 71 virtual bool SometimesSetsUpAFrame() { return false; } |
| 72 | 72 |
| 73 private: | 73 private: |
| 74 SaveFPRegsMode save_doubles_; | 74 SaveFPRegsMode save_doubles_; |
| 75 | 75 |
| 76 Major MajorKey() { return StoreBufferOverflow; } | 76 Major MajorKey() { return StoreBufferOverflow; } |
| 77 int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; } | 77 int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; } |
| 78 }; | 78 }; |
| 79 | 79 |
| 80 | 80 |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 306 the_heap_number_(the_heap_number), | 306 the_heap_number_(the_heap_number), |
| 307 scratch_(scratch), | 307 scratch_(scratch), |
| 308 sign_(scratch2) { | 308 sign_(scratch2) { |
| 309 ASSERT(IntRegisterBits::is_valid(the_int_.code())); | 309 ASSERT(IntRegisterBits::is_valid(the_int_.code())); |
| 310 ASSERT(HeapNumberRegisterBits::is_valid(the_heap_number_.code())); | 310 ASSERT(HeapNumberRegisterBits::is_valid(the_heap_number_.code())); |
| 311 ASSERT(ScratchRegisterBits::is_valid(scratch_.code())); | 311 ASSERT(ScratchRegisterBits::is_valid(scratch_.code())); |
| 312 ASSERT(SignRegisterBits::is_valid(sign_.code())); | 312 ASSERT(SignRegisterBits::is_valid(sign_.code())); |
| 313 } | 313 } |
| 314 | 314 |
| 315 bool IsPregenerated(); | 315 bool IsPregenerated(); |
| 316 static void GenerateFixedRegStubsAheadOfTime(); | 316 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); |
| 317 | 317 |
| 318 private: | 318 private: |
| 319 Register the_int_; | 319 Register the_int_; |
| 320 Register the_heap_number_; | 320 Register the_heap_number_; |
| 321 Register scratch_; | 321 Register scratch_; |
| 322 Register sign_; | 322 Register sign_; |
| 323 | 323 |
| 324 // Minor key encoding in 16 bits. | 324 // Minor key encoding in 16 bits. |
| 325 class IntRegisterBits: public BitField<int, 0, 4> {}; | 325 class IntRegisterBits: public BitField<int, 0, 4> {}; |
| 326 class HeapNumberRegisterBits: public BitField<int, 4, 4> {}; | 326 class HeapNumberRegisterBits: public BitField<int, 4, 4> {}; |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 383 value) { // One scratch reg. | 383 value) { // One scratch reg. |
| 384 } | 384 } |
| 385 | 385 |
| 386 enum Mode { | 386 enum Mode { |
| 387 STORE_BUFFER_ONLY, | 387 STORE_BUFFER_ONLY, |
| 388 INCREMENTAL, | 388 INCREMENTAL, |
| 389 INCREMENTAL_COMPACTION | 389 INCREMENTAL_COMPACTION |
| 390 }; | 390 }; |
| 391 | 391 |
| 392 virtual bool IsPregenerated(); | 392 virtual bool IsPregenerated(); |
| 393 static void GenerateFixedRegStubsAheadOfTime(); | 393 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); |
| 394 virtual bool SometimesSetsUpAFrame() { return false; } | 394 virtual bool SometimesSetsUpAFrame() { return false; } |
| 395 | 395 |
| 396 static void PatchBranchIntoNop(MacroAssembler* masm, int pos) { | 396 static void PatchBranchIntoNop(MacroAssembler* masm, int pos) { |
| 397 const unsigned offset = masm->instr_at(pos) & kImm16Mask; | 397 const unsigned offset = masm->instr_at(pos) & kImm16Mask; |
| 398 masm->instr_at_put(pos, BNE | (zero_reg.code() << kRsShift) | | 398 masm->instr_at_put(pos, BNE | (zero_reg.code() << kRsShift) | |
| 399 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); | 399 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); |
| 400 ASSERT(Assembler::IsBne(masm->instr_at(pos))); | 400 ASSERT(Assembler::IsBne(masm->instr_at(pos))); |
| 401 } | 401 } |
| 402 | 402 |
| 403 static void PatchNopIntoBranch(MacroAssembler* masm, int pos) { | 403 static void PatchNopIntoBranch(MacroAssembler* masm, int pos) { |
| (...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 785 | 785 |
| 786 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; | 786 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; |
| 787 | 787 |
| 788 LookupMode mode_; | 788 LookupMode mode_; |
| 789 }; | 789 }; |
| 790 | 790 |
| 791 | 791 |
| 792 } } // namespace v8::internal | 792 } } // namespace v8::internal |
| 793 | 793 |
| 794 #endif // V8_MIPS_CODE_STUBS_ARM_H_ | 794 #endif // V8_MIPS_CODE_STUBS_ARM_H_ |
| OLD | NEW |