OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
62 }; | 62 }; |
63 | 63 |
64 | 64 |
65 class StoreBufferOverflowStub: public PlatformCodeStub { | 65 class StoreBufferOverflowStub: public PlatformCodeStub { |
66 public: | 66 public: |
67 explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) | 67 explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) |
68 : save_doubles_(save_fp) {} | 68 : save_doubles_(save_fp) {} |
69 | 69 |
70 void Generate(MacroAssembler* masm); | 70 void Generate(MacroAssembler* masm); |
71 | 71 |
72 virtual bool IsPregenerated() { return true; } | 72 virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; } |
73 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); | 73 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); |
74 virtual bool SometimesSetsUpAFrame() { return false; } | 74 virtual bool SometimesSetsUpAFrame() { return false; } |
75 | 75 |
76 private: | 76 private: |
77 SaveFPRegsMode save_doubles_; | 77 SaveFPRegsMode save_doubles_; |
78 | 78 |
79 Major MajorKey() { return StoreBufferOverflow; } | 79 Major MajorKey() { return StoreBufferOverflow; } |
80 int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; } | 80 int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; } |
81 }; | 81 }; |
82 | 82 |
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
233 : the_int_(the_int), | 233 : the_int_(the_int), |
234 the_heap_number_(the_heap_number), | 234 the_heap_number_(the_heap_number), |
235 scratch_(scratch), | 235 scratch_(scratch), |
236 sign_(scratch2) { | 236 sign_(scratch2) { |
237 ASSERT(IntRegisterBits::is_valid(the_int_.code())); | 237 ASSERT(IntRegisterBits::is_valid(the_int_.code())); |
238 ASSERT(HeapNumberRegisterBits::is_valid(the_heap_number_.code())); | 238 ASSERT(HeapNumberRegisterBits::is_valid(the_heap_number_.code())); |
239 ASSERT(ScratchRegisterBits::is_valid(scratch_.code())); | 239 ASSERT(ScratchRegisterBits::is_valid(scratch_.code())); |
240 ASSERT(SignRegisterBits::is_valid(sign_.code())); | 240 ASSERT(SignRegisterBits::is_valid(sign_.code())); |
241 } | 241 } |
242 | 242 |
243 bool IsPregenerated(); | 243 virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE; |
244 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); | 244 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); |
245 | 245 |
246 private: | 246 private: |
247 Register the_int_; | 247 Register the_int_; |
248 Register the_heap_number_; | 248 Register the_heap_number_; |
249 Register scratch_; | 249 Register scratch_; |
250 Register sign_; | 250 Register sign_; |
251 | 251 |
252 // Minor key encoding in 16 bits. | 252 // Minor key encoding in 16 bits. |
253 class IntRegisterBits: public BitField<int, 0, 4> {}; | 253 class IntRegisterBits: public BitField<int, 0, 4> {}; |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
309 address, // An input reg. | 309 address, // An input reg. |
310 value) { // One scratch reg. | 310 value) { // One scratch reg. |
311 } | 311 } |
312 | 312 |
313 enum Mode { | 313 enum Mode { |
314 STORE_BUFFER_ONLY, | 314 STORE_BUFFER_ONLY, |
315 INCREMENTAL, | 315 INCREMENTAL, |
316 INCREMENTAL_COMPACTION | 316 INCREMENTAL_COMPACTION |
317 }; | 317 }; |
318 | 318 |
319 virtual bool IsPregenerated(); | 319 virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE; |
320 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); | 320 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate); |
321 virtual bool SometimesSetsUpAFrame() { return false; } | 321 virtual bool SometimesSetsUpAFrame() { return false; } |
322 | 322 |
323 static void PatchBranchIntoNop(MacroAssembler* masm, int pos) { | 323 static void PatchBranchIntoNop(MacroAssembler* masm, int pos) { |
324 const unsigned offset = masm->instr_at(pos) & kImm16Mask; | 324 const unsigned offset = masm->instr_at(pos) & kImm16Mask; |
325 masm->instr_at_put(pos, BNE | (zero_reg.code() << kRsShift) | | 325 masm->instr_at_put(pos, BNE | (zero_reg.code() << kRsShift) | |
326 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); | 326 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); |
327 ASSERT(Assembler::IsBne(masm->instr_at(pos))); | 327 ASSERT(Assembler::IsBne(masm->instr_at(pos))); |
328 } | 328 } |
329 | 329 |
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
561 | 561 |
562 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; | 562 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; |
563 | 563 |
564 LookupMode mode_; | 564 LookupMode mode_; |
565 }; | 565 }; |
566 | 566 |
567 | 567 |
568 } } // namespace v8::internal | 568 } } // namespace v8::internal |
569 | 569 |
570 #endif // V8_MIPS_CODE_STUBS_ARM_H_ | 570 #endif // V8_MIPS_CODE_STUBS_ARM_H_ |
OLD | NEW |