| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef VM_ASSEMBLER_H_ | 5 #ifndef VM_ASSEMBLER_H_ |
| 6 #define VM_ASSEMBLER_H_ | 6 #define VM_ASSEMBLER_H_ |
| 7 | 7 |
| 8 #include "platform/assert.h" | 8 #include "platform/assert.h" |
| 9 #include "vm/allocation.h" | 9 #include "vm/allocation.h" |
| 10 #include "vm/globals.h" | 10 #include "vm/globals.h" |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 44 const char* name_; | 44 const char* name_; |
| 45 const uword address_; | 45 const uword address_; |
| 46 }; | 46 }; |
| 47 | 47 |
| 48 | 48 |
| 49 // Assembler fixups are positions in generated code that hold relocation | 49 // Assembler fixups are positions in generated code that hold relocation |
| 50 // information that needs to be processed before finalizing the code | 50 // information that needs to be processed before finalizing the code |
| 51 // into executable memory. | 51 // into executable memory. |
| 52 class AssemblerFixup : public ZoneAllocated { | 52 class AssemblerFixup : public ZoneAllocated { |
| 53 public: | 53 public: |
| 54 virtual void Process(const MemoryRegion& region, int position) = 0; | 54 virtual void Process(const MemoryRegion& region, intptr_t position) = 0; |
| 55 | 55 |
| 56 // It would be ideal if the destructor method could be made private, | 56 // It would be ideal if the destructor method could be made private, |
| 57 // but the g++ compiler complains when this is subclassed. | 57 // but the g++ compiler complains when this is subclassed. |
| 58 virtual ~AssemblerFixup() { UNREACHABLE(); } | 58 virtual ~AssemblerFixup() { UNREACHABLE(); } |
| 59 | 59 |
| 60 private: | 60 private: |
| 61 AssemblerFixup* previous_; | 61 AssemblerFixup* previous_; |
| 62 int position_; | 62 intptr_t position_; |
| 63 | 63 |
| 64 AssemblerFixup* previous() const { return previous_; } | 64 AssemblerFixup* previous() const { return previous_; } |
| 65 void set_previous(AssemblerFixup* previous) { previous_ = previous; } | 65 void set_previous(AssemblerFixup* previous) { previous_ = previous; } |
| 66 | 66 |
| 67 int position() const { return position_; } | 67 intptr_t position() const { return position_; } |
| 68 void set_position(int position) { position_ = position; } | 68 void set_position(intptr_t position) { position_ = position; } |
| 69 | 69 |
| 70 friend class AssemblerBuffer; | 70 friend class AssemblerBuffer; |
| 71 }; | 71 }; |
| 72 | 72 |
| 73 | 73 |
| 74 // Assembler buffers are used to emit binary code. They grow on demand. | 74 // Assembler buffers are used to emit binary code. They grow on demand. |
| 75 class AssemblerBuffer : public ValueObject { | 75 class AssemblerBuffer : public ValueObject { |
| 76 public: | 76 public: |
| 77 AssemblerBuffer(); | 77 AssemblerBuffer(); |
| 78 ~AssemblerBuffer(); | 78 ~AssemblerBuffer(); |
| 79 | 79 |
| 80 // Basic support for emitting, loading, and storing. | 80 // Basic support for emitting, loading, and storing. |
| 81 template<typename T> void Emit(T value) { | 81 template<typename T> void Emit(T value) { |
| 82 ASSERT(HasEnsuredCapacity()); | 82 ASSERT(HasEnsuredCapacity()); |
| 83 *reinterpret_cast<T*>(cursor_) = value; | 83 *reinterpret_cast<T*>(cursor_) = value; |
| 84 cursor_ += sizeof(T); | 84 cursor_ += sizeof(T); |
| 85 } | 85 } |
| 86 | 86 |
| 87 template<typename T> void Remit() { | 87 template<typename T> void Remit() { |
| 88 ASSERT(Size() >= static_cast<intptr_t>(sizeof(T))); | 88 ASSERT(Size() >= static_cast<intptr_t>(sizeof(T))); |
| 89 cursor_ -= sizeof(T); | 89 cursor_ -= sizeof(T); |
| 90 } | 90 } |
| 91 | 91 |
| 92 template<typename T> T Load(int position) { | 92 template<typename T> T Load(intptr_t position) { |
| 93 ASSERT(position >= 0 && position <= (Size() - static_cast<int>(sizeof(T)))); | 93 ASSERT(position >= 0 && |
| 94 position <= (Size() - static_cast<intptr_t>(sizeof(T)))); |
| 94 return *reinterpret_cast<T*>(contents_ + position); | 95 return *reinterpret_cast<T*>(contents_ + position); |
| 95 } | 96 } |
| 96 | 97 |
| 97 template<typename T> void Store(int position, T value) { | 98 template<typename T> void Store(intptr_t position, T value) { |
| 98 ASSERT(position >= 0 && position <= (Size() - static_cast<int>(sizeof(T)))); | 99 ASSERT(position >= 0 && |
| 100 position <= (Size() - static_cast<intptr_t>(sizeof(T)))); |
| 99 *reinterpret_cast<T*>(contents_ + position) = value; | 101 *reinterpret_cast<T*>(contents_ + position) = value; |
| 100 } | 102 } |
| 101 | 103 |
| 102 const ZoneGrowableArray<int>& pointer_offsets() const { | 104 const ZoneGrowableArray<intptr_t>& pointer_offsets() const { |
| 103 #if defined(DEBUG) | 105 #if defined(DEBUG) |
| 104 ASSERT(fixups_processed_); | 106 ASSERT(fixups_processed_); |
| 105 #endif | 107 #endif |
| 106 return *pointer_offsets_; | 108 return *pointer_offsets_; |
| 107 } | 109 } |
| 108 | 110 |
| 109 // Emit an object pointer directly in the code. | 111 // Emit an object pointer directly in the code. |
| 110 void EmitObject(const Object& object); | 112 void EmitObject(const Object& object); |
| 111 | 113 |
| 112 // Emit a fixup at the current location. | 114 // Emit a fixup at the current location. |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 167 private: | 169 private: |
| 168 // The limit is set to kMinimumGap bytes before the end of the data area. | 170 // The limit is set to kMinimumGap bytes before the end of the data area. |
| 169 // This leaves enough space for the longest possible instruction and allows | 171 // This leaves enough space for the longest possible instruction and allows |
| 170 // for a single, fast space check per instruction. | 172 // for a single, fast space check per instruction. |
| 171 static const intptr_t kMinimumGap = 32; | 173 static const intptr_t kMinimumGap = 32; |
| 172 | 174 |
| 173 uword contents_; | 175 uword contents_; |
| 174 uword cursor_; | 176 uword cursor_; |
| 175 uword limit_; | 177 uword limit_; |
| 176 AssemblerFixup* fixup_; | 178 AssemblerFixup* fixup_; |
| 177 ZoneGrowableArray<int>* pointer_offsets_; | 179 ZoneGrowableArray<intptr_t>* pointer_offsets_; |
| 178 #if defined(DEBUG) | 180 #if defined(DEBUG) |
| 179 bool fixups_processed_; | 181 bool fixups_processed_; |
| 180 #endif | 182 #endif |
| 181 | 183 |
| 182 uword cursor() const { return cursor_; } | 184 uword cursor() const { return cursor_; } |
| 183 uword limit() const { return limit_; } | 185 uword limit() const { return limit_; } |
| 184 intptr_t Capacity() const { | 186 intptr_t Capacity() const { |
| 185 ASSERT(limit_ >= contents_); | 187 ASSERT(limit_ >= contents_); |
| 186 return (limit_ - contents_) + kMinimumGap; | 188 return (limit_ - contents_) + kMinimumGap; |
| 187 } | 189 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 209 #include "vm/assembler_x64.h" | 211 #include "vm/assembler_x64.h" |
| 210 #elif defined(TARGET_ARCH_ARM) | 212 #elif defined(TARGET_ARCH_ARM) |
| 211 #include "vm/assembler_arm.h" | 213 #include "vm/assembler_arm.h" |
| 212 #elif defined(TARGET_ARCH_MIPS) | 214 #elif defined(TARGET_ARCH_MIPS) |
| 213 #include "vm/assembler_mips.h" | 215 #include "vm/assembler_mips.h" |
| 214 #else | 216 #else |
| 215 #error Unknown architecture. | 217 #error Unknown architecture. |
| 216 #endif | 218 #endif |
| 217 | 219 |
| 218 #endif // VM_ASSEMBLER_H_ | 220 #endif // VM_ASSEMBLER_H_ |
| OLD | NEW |