| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_MIPS_CODE_STUBS_ARM_H_ | 5 #ifndef V8_MIPS_CODE_STUBS_ARM_H_ |
| 6 #define V8_MIPS_CODE_STUBS_ARM_H_ | 6 #define V8_MIPS_CODE_STUBS_ARM_H_ |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 | 10 |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 58 | 58 |
| 59 | 59 |
| 60 class StoreRegistersStateStub: public PlatformCodeStub { | 60 class StoreRegistersStateStub: public PlatformCodeStub { |
| 61 public: | 61 public: |
| 62 explicit StoreRegistersStateStub(Isolate* isolate) | 62 explicit StoreRegistersStateStub(Isolate* isolate) |
| 63 : PlatformCodeStub(isolate) {} | 63 : PlatformCodeStub(isolate) {} |
| 64 | 64 |
| 65 static void GenerateAheadOfTime(Isolate* isolate); | 65 static void GenerateAheadOfTime(Isolate* isolate); |
| 66 | 66 |
| 67 private: | 67 private: |
| 68 void Generate(MacroAssembler* masm); | 68 DEFINE_PLATFORM_CODE_STUB(StoreRegistersState, PlatformCodeStub); |
| 69 | |
| 70 DEFINE_CODE_STUB(StoreRegistersState, PlatformCodeStub); | |
| 71 }; | 69 }; |
| 72 | 70 |
| 73 | 71 |
| 74 class RestoreRegistersStateStub: public PlatformCodeStub { | 72 class RestoreRegistersStateStub: public PlatformCodeStub { |
| 75 public: | 73 public: |
| 76 explicit RestoreRegistersStateStub(Isolate* isolate) | 74 explicit RestoreRegistersStateStub(Isolate* isolate) |
| 77 : PlatformCodeStub(isolate) {} | 75 : PlatformCodeStub(isolate) {} |
| 78 | 76 |
| 79 static void GenerateAheadOfTime(Isolate* isolate); | 77 static void GenerateAheadOfTime(Isolate* isolate); |
| 80 | 78 |
| 81 private: | 79 private: |
| 82 void Generate(MacroAssembler* masm); | 80 DEFINE_PLATFORM_CODE_STUB(RestoreRegistersState, PlatformCodeStub); |
| 83 | |
| 84 DEFINE_CODE_STUB(RestoreRegistersState, PlatformCodeStub); | |
| 85 }; | 81 }; |
| 86 | 82 |
| 87 // This stub can convert a signed int32 to a heap number (double). It does | 83 // This stub can convert a signed int32 to a heap number (double). It does |
| 88 // not work for int32s that are in Smi range! No GC occurs during this stub | 84 // not work for int32s that are in Smi range! No GC occurs during this stub |
| 89 // so you don't have to set up the frame. | 85 // so you don't have to set up the frame. |
| 90 class WriteInt32ToHeapNumberStub : public PlatformCodeStub { | 86 class WriteInt32ToHeapNumberStub : public PlatformCodeStub { |
| 91 public: | 87 public: |
| 92 WriteInt32ToHeapNumberStub(Isolate* isolate, Register the_int, | 88 WriteInt32ToHeapNumberStub(Isolate* isolate, Register the_int, |
| 93 Register the_heap_number, Register scratch, | 89 Register the_heap_number, Register scratch, |
| 94 Register scratch2) | 90 Register scratch2) |
| (...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 282 friend class RecordWriteStub; | 278 friend class RecordWriteStub; |
| 283 }; | 279 }; |
| 284 | 280 |
| 285 enum OnNoNeedToInformIncrementalMarker { | 281 enum OnNoNeedToInformIncrementalMarker { |
| 286 kReturnOnNoNeedToInformIncrementalMarker, | 282 kReturnOnNoNeedToInformIncrementalMarker, |
| 287 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker | 283 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker |
| 288 }; | 284 }; |
| 289 | 285 |
| 290 virtual inline Major MajorKey() const FINAL OVERRIDE { return RecordWrite; } | 286 virtual inline Major MajorKey() const FINAL OVERRIDE { return RecordWrite; } |
| 291 | 287 |
| 292 void Generate(MacroAssembler* masm); | 288 virtual void Generate(MacroAssembler* masm) OVERRIDE; |
| 293 void GenerateIncremental(MacroAssembler* masm, Mode mode); | 289 void GenerateIncremental(MacroAssembler* masm, Mode mode); |
| 294 void CheckNeedsToInformIncrementalMarker( | 290 void CheckNeedsToInformIncrementalMarker( |
| 295 MacroAssembler* masm, | 291 MacroAssembler* masm, |
| 296 OnNoNeedToInformIncrementalMarker on_no_need, | 292 OnNoNeedToInformIncrementalMarker on_no_need, |
| 297 Mode mode); | 293 Mode mode); |
| 298 void InformIncrementalMarker(MacroAssembler* masm); | 294 void InformIncrementalMarker(MacroAssembler* masm); |
| 299 | 295 |
| 300 void Activate(Code* code) { | 296 void Activate(Code* code) { |
| 301 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code); | 297 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code); |
| 302 } | 298 } |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 335 | 331 |
| 336 | 332 |
| 337 // Trampoline stub to call into native code. To call safely into native code | 333 // Trampoline stub to call into native code. To call safely into native code |
| 338 // in the presence of compacting GC (which can move code objects) we need to | 334 // in the presence of compacting GC (which can move code objects) we need to |
| 339 // keep the code which called into native pinned in the memory. Currently the | 335 // keep the code which called into native pinned in the memory. Currently the |
| 340 // simplest approach is to generate such stub early enough so it can never be | 336 // simplest approach is to generate such stub early enough so it can never be |
| 341 // moved by GC | 337 // moved by GC |
| 342 class DirectCEntryStub: public PlatformCodeStub { | 338 class DirectCEntryStub: public PlatformCodeStub { |
| 343 public: | 339 public: |
| 344 explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {} | 340 explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {} |
| 345 void Generate(MacroAssembler* masm); | |
| 346 void GenerateCall(MacroAssembler* masm, Register target); | 341 void GenerateCall(MacroAssembler* masm, Register target); |
| 347 | 342 |
| 348 private: | 343 private: |
| 349 bool NeedsImmovableCode() { return true; } | 344 bool NeedsImmovableCode() { return true; } |
| 350 | 345 |
| 351 DEFINE_CODE_STUB(DirectCEntry, PlatformCodeStub); | 346 DEFINE_PLATFORM_CODE_STUB(DirectCEntry, PlatformCodeStub); |
| 352 }; | 347 }; |
| 353 | 348 |
| 354 | 349 |
| 355 class NameDictionaryLookupStub: public PlatformCodeStub { | 350 class NameDictionaryLookupStub: public PlatformCodeStub { |
| 356 public: | 351 public: |
| 357 enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; | 352 enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP }; |
| 358 | 353 |
| 359 NameDictionaryLookupStub(Isolate* isolate, LookupMode mode) | 354 NameDictionaryLookupStub(Isolate* isolate, LookupMode mode) |
| 360 : PlatformCodeStub(isolate) { | 355 : PlatformCodeStub(isolate) { |
| 361 minor_key_ = LookupModeBits::encode(mode); | 356 minor_key_ = LookupModeBits::encode(mode); |
| 362 } | 357 } |
| 363 | 358 |
| 364 void Generate(MacroAssembler* masm); | |
| 365 | |
| 366 static void GenerateNegativeLookup(MacroAssembler* masm, | 359 static void GenerateNegativeLookup(MacroAssembler* masm, |
| 367 Label* miss, | 360 Label* miss, |
| 368 Label* done, | 361 Label* done, |
| 369 Register receiver, | 362 Register receiver, |
| 370 Register properties, | 363 Register properties, |
| 371 Handle<Name> name, | 364 Handle<Name> name, |
| 372 Register scratch0); | 365 Register scratch0); |
| 373 | 366 |
| 374 static void GeneratePositiveLookup(MacroAssembler* masm, | 367 static void GeneratePositiveLookup(MacroAssembler* masm, |
| 375 Label* miss, | 368 Label* miss, |
| (...skipping 14 matching lines...) Expand all Loading... |
| 390 NameDictionary::kCapacityIndex * kPointerSize; | 383 NameDictionary::kCapacityIndex * kPointerSize; |
| 391 | 384 |
| 392 static const int kElementsStartOffset = | 385 static const int kElementsStartOffset = |
| 393 NameDictionary::kHeaderSize + | 386 NameDictionary::kHeaderSize + |
| 394 NameDictionary::kElementsStartIndex * kPointerSize; | 387 NameDictionary::kElementsStartIndex * kPointerSize; |
| 395 | 388 |
| 396 LookupMode mode() const { return LookupModeBits::decode(minor_key_); } | 389 LookupMode mode() const { return LookupModeBits::decode(minor_key_); } |
| 397 | 390 |
| 398 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; | 391 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; |
| 399 | 392 |
| 400 DEFINE_CODE_STUB(NameDictionaryLookup, PlatformCodeStub); | 393 DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub); |
| 401 }; | 394 }; |
| 402 | 395 |
| 403 | 396 |
| 404 } } // namespace v8::internal | 397 } } // namespace v8::internal |
| 405 | 398 |
| 406 #endif // V8_MIPS_CODE_STUBS_ARM_H_ | 399 #endif // V8_MIPS_CODE_STUBS_ARM_H_ |
| OLD | NEW |