OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
72 // generates ARM machine code, together with a portable ARM simulator | 72 // generates ARM machine code, together with a portable ARM simulator |
73 // compiled for the host architecture in question. | 73 // compiled for the host architecture in question. |
74 // | 74 // |
75 // Since Native Client is ILP-32 on all architectures we use | 75 // Since Native Client is ILP-32 on all architectures we use |
76 // V8_HOST_ARCH_IA32 on both 32- and 64-bit x86. | 76 // V8_HOST_ARCH_IA32 on both 32- and 64-bit x86. |
77 #define V8_HOST_ARCH_IA32 1 | 77 #define V8_HOST_ARCH_IA32 1 |
78 #define V8_HOST_ARCH_32_BIT 1 | 78 #define V8_HOST_ARCH_32_BIT 1 |
79 #define V8_HOST_CAN_READ_UNALIGNED 1 | 79 #define V8_HOST_CAN_READ_UNALIGNED 1 |
80 #else | 80 #else |
81 #define V8_HOST_ARCH_X64 1 | 81 #define V8_HOST_ARCH_X64 1 |
82 #if defined(__x86_64__) && !defined(__LP64__) | |
83 #define V8_HOST_ARCH_32_BIT 1 | |
84 #else | |
82 #define V8_HOST_ARCH_64_BIT 1 | 85 #define V8_HOST_ARCH_64_BIT 1 |
86 #endif // defined(__x86_64__) && !defined(__LP64__) | |
83 #define V8_HOST_CAN_READ_UNALIGNED 1 | 87 #define V8_HOST_CAN_READ_UNALIGNED 1 |
84 #endif // __native_client__ | 88 #endif // __native_client__ |
85 #elif defined(_M_IX86) || defined(__i386__) | 89 #elif defined(_M_IX86) || defined(__i386__) |
86 #define V8_HOST_ARCH_IA32 1 | 90 #define V8_HOST_ARCH_IA32 1 |
87 #define V8_HOST_ARCH_32_BIT 1 | 91 #define V8_HOST_ARCH_32_BIT 1 |
88 #define V8_HOST_CAN_READ_UNALIGNED 1 | 92 #define V8_HOST_CAN_READ_UNALIGNED 1 |
89 #elif defined(__ARMEL__) | 93 #elif defined(__ARMEL__) |
90 #define V8_HOST_ARCH_ARM 1 | 94 #define V8_HOST_ARCH_ARM 1 |
91 #define V8_HOST_ARCH_32_BIT 1 | 95 #define V8_HOST_ARCH_32_BIT 1 |
92 // Some CPU-OS combinations allow unaligned access on ARM. We assume | 96 // Some CPU-OS combinations allow unaligned access on ARM. We assume |
93 // that unaligned accesses are not allowed unless the build system | 97 // that unaligned accesses are not allowed unless the build system |
94 // defines the CAN_USE_UNALIGNED_ACCESSES macro to be non-zero. | 98 // defines the CAN_USE_UNALIGNED_ACCESSES macro to be non-zero. |
95 #if CAN_USE_UNALIGNED_ACCESSES | 99 #if CAN_USE_UNALIGNED_ACCESSES |
96 #define V8_HOST_CAN_READ_UNALIGNED 1 | 100 #define V8_HOST_CAN_READ_UNALIGNED 1 |
97 #endif | 101 #endif |
98 #elif defined(__MIPSEL__) | 102 #elif defined(__MIPSEL__) |
99 #define V8_HOST_ARCH_MIPS 1 | 103 #define V8_HOST_ARCH_MIPS 1 |
100 #define V8_HOST_ARCH_32_BIT 1 | 104 #define V8_HOST_ARCH_32_BIT 1 |
101 #else | 105 #else |
102 #error Host architecture was not detected as supported by v8 | 106 #error Host architecture was not detected as supported by v8 |
103 #endif | 107 #endif |
104 | 108 |
105 // Target architecture detection. This may be set externally. If not, detect | 109 // Target architecture detection. This may be set externally. If not, detect |
106 // in the same way as the host architecture, that is, target the native | 110 // in the same way as the host architecture, that is, target the native |
107 // environment as presented by the compiler. | 111 // environment as presented by the compiler. |
108 #if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_IA32 && \ | 112 #if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_IA32 && !V8_TARGET_ARCH_ARM && \ |
109 !V8_TARGET_ARCH_ARM && !V8_TARGET_ARCH_MIPS | 113 !V8_TARGET_ARCH_MIPS && !V8_TARGET_ARCH_X32 |
110 #if defined(_M_X64) || defined(__x86_64__) | 114 #if defined(_M_X64) || defined(__x86_64__) |
111 #define V8_TARGET_ARCH_X64 1 | 115 #define V8_TARGET_ARCH_X64 1 |
112 #elif defined(_M_IX86) || defined(__i386__) | 116 #elif defined(_M_IX86) || defined(__i386__) |
113 #define V8_TARGET_ARCH_IA32 1 | 117 #define V8_TARGET_ARCH_IA32 1 |
114 #elif defined(__ARMEL__) | 118 #elif defined(__ARMEL__) |
115 #define V8_TARGET_ARCH_ARM 1 | 119 #define V8_TARGET_ARCH_ARM 1 |
116 #elif defined(__MIPSEL__) | 120 #elif defined(__MIPSEL__) |
117 #define V8_TARGET_ARCH_MIPS 1 | 121 #define V8_TARGET_ARCH_MIPS 1 |
118 #else | 122 #else |
119 #error Target architecture was not detected as supported by v8 | 123 #error Target architecture was not detected as supported by v8 |
120 #endif | 124 #endif |
121 #endif | 125 #endif |
122 | 126 |
123 // Check for supported combinations of host and target architectures. | 127 // Check for supported combinations of host and target architectures. |
124 #if V8_TARGET_ARCH_IA32 && !V8_HOST_ARCH_IA32 | 128 #if V8_TARGET_ARCH_IA32 && !V8_HOST_ARCH_IA32 |
125 #error Target architecture ia32 is only supported on ia32 host | 129 #error Target architecture ia32 is only supported on ia32 host |
126 #endif | 130 #endif |
127 #if V8_TARGET_ARCH_X64 && !V8_HOST_ARCH_X64 | 131 #if V8_TARGET_ARCH_X64 && !V8_HOST_ARCH_X64 |
128 #error Target architecture x64 is only supported on x64 host | 132 #error Target architecture x64 is only supported on x64 host |
129 #endif | 133 #endif |
134 #if (V8_TARGET_ARCH_X32 && !V8_HOST_ARCH_X64 && !V8_HOST_ARCH_32_BIT) | |
135 #error Target architecture x32 is only supported on x64 host with x32 support | |
136 #endif | |
130 #if (V8_TARGET_ARCH_ARM && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_ARM)) | 137 #if (V8_TARGET_ARCH_ARM && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_ARM)) |
131 #error Target architecture arm is only supported on arm and ia32 host | 138 #error Target architecture arm is only supported on arm and ia32 host |
132 #endif | 139 #endif |
133 #if (V8_TARGET_ARCH_MIPS && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_MIPS)) | 140 #if (V8_TARGET_ARCH_MIPS && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_MIPS)) |
134 #error Target architecture mips is only supported on mips and ia32 host | 141 #error Target architecture mips is only supported on mips and ia32 host |
135 #endif | 142 #endif |
136 | 143 |
137 // Determine whether we are running in a simulated environment. | 144 // Determine whether we are running in a simulated environment. |
138 // Setting USE_SIMULATOR explicitly from the build script will force | 145 // Setting USE_SIMULATOR explicitly from the build script will force |
139 // the use of a simulated environment. | 146 // the use of a simulated environment. |
140 #if !defined(USE_SIMULATOR) | 147 #if !defined(USE_SIMULATOR) |
141 #if (V8_TARGET_ARCH_ARM && !V8_HOST_ARCH_ARM) | 148 #if (V8_TARGET_ARCH_ARM && !V8_HOST_ARCH_ARM) |
142 #define USE_SIMULATOR 1 | 149 #define USE_SIMULATOR 1 |
143 #endif | 150 #endif |
144 #if (V8_TARGET_ARCH_MIPS && !V8_HOST_ARCH_MIPS) | 151 #if (V8_TARGET_ARCH_MIPS && !V8_HOST_ARCH_MIPS) |
145 #define USE_SIMULATOR 1 | 152 #define USE_SIMULATOR 1 |
146 #endif | 153 #endif |
147 #endif | 154 #endif |
148 | 155 |
149 // Determine architecture endiannes (we only support little-endian). | 156 // Determine architecture endiannes (we only support little-endian). |
150 #if V8_TARGET_ARCH_IA32 | 157 #if V8_TARGET_ARCH_IA32 |
151 #define V8_TARGET_LITTLE_ENDIAN 1 | 158 #define V8_TARGET_LITTLE_ENDIAN 1 |
152 #elif V8_TARGET_ARCH_X64 | 159 #elif V8_TARGET_ARCH_X64 |
153 #define V8_TARGET_LITTLE_ENDIAN 1 | 160 #define V8_TARGET_LITTLE_ENDIAN 1 |
161 #elif V8_TARGET_ARCH_X32 | |
162 #define V8_TARGET_LITTLE_ENDIAN 1 | |
154 #elif V8_TARGET_ARCH_ARM | 163 #elif V8_TARGET_ARCH_ARM |
155 #define V8_TARGET_LITTLE_ENDIAN 1 | 164 #define V8_TARGET_LITTLE_ENDIAN 1 |
156 #elif V8_TARGET_ARCH_MIPS | 165 #elif V8_TARGET_ARCH_MIPS |
157 #define V8_TARGET_LITTLE_ENDIAN 1 | 166 #define V8_TARGET_LITTLE_ENDIAN 1 |
158 #else | 167 #else |
159 #error Unknown target architecture endiannes | 168 #error Unknown target architecture endiannes |
160 #endif | 169 #endif |
161 | 170 |
162 // Support for alternative bool type. This is only enabled if the code is | 171 // Support for alternative bool type. This is only enabled if the code is |
163 // compiled with USE_MYBOOL defined. This catches some nasty type bugs. | 172 // compiled with USE_MYBOOL defined. This catches some nasty type bugs. |
(...skipping 29 matching lines...) Expand all Loading... | |
193 #define V8_INT64_C(x) (x ## LL) | 202 #define V8_INT64_C(x) (x ## LL) |
194 #define V8_INTPTR_C(x) (x ## LL) | 203 #define V8_INTPTR_C(x) (x ## LL) |
195 #define V8_PTR_PREFIX "I64" | 204 #define V8_PTR_PREFIX "I64" |
196 #else | 205 #else |
197 #define V8_UINT64_C(x) (x ## UL) | 206 #define V8_UINT64_C(x) (x ## UL) |
198 #define V8_INT64_C(x) (x ## L) | 207 #define V8_INT64_C(x) (x ## L) |
199 #define V8_INTPTR_C(x) (x ## L) | 208 #define V8_INTPTR_C(x) (x ## L) |
200 #define V8_PTR_PREFIX "l" | 209 #define V8_PTR_PREFIX "l" |
201 #endif | 210 #endif |
202 #else // V8_HOST_ARCH_64_BIT | 211 #else // V8_HOST_ARCH_64_BIT |
212 #if V8_TARGET_ARCH_X32 | |
213 #define V8_UINT64_C(x) (x ## ULL) | |
214 #define V8_INT64_C(x) (x ## LL) | |
215 #endif | |
203 #define V8_INTPTR_C(x) (x) | 216 #define V8_INTPTR_C(x) (x) |
204 #define V8_PTR_PREFIX "" | 217 #define V8_PTR_PREFIX "" |
205 #endif // V8_HOST_ARCH_64_BIT | 218 #endif // V8_HOST_ARCH_64_BIT |
206 | 219 |
207 // The following macro works on both 32 and 64-bit platforms. | 220 // The following macro works on both 32 and 64-bit platforms. |
208 // Usage: instead of writing 0x1234567890123456 | 221 // Usage: instead of writing 0x1234567890123456 |
209 // write V8_2PART_UINT64_C(0x12345678,90123456); | 222 // write V8_2PART_UINT64_C(0x12345678,90123456); |
210 #define V8_2PART_UINT64_C(a, b) (((static_cast<uint64_t>(a) << 32) + 0x##b##u)) | 223 #define V8_2PART_UINT64_C(a, b) (((static_cast<uint64_t>(a) << 32) + 0x##b##u)) |
211 | 224 |
212 #define V8PRIxPTR V8_PTR_PREFIX "x" | 225 #define V8PRIxPTR V8_PTR_PREFIX "x" |
(...skipping 21 matching lines...) Expand all Loading... | |
234 const int kMinInt = -kMaxInt - 1; | 247 const int kMinInt = -kMaxInt - 1; |
235 | 248 |
236 const uint32_t kMaxUInt32 = 0xFFFFFFFFu; | 249 const uint32_t kMaxUInt32 = 0xFFFFFFFFu; |
237 | 250 |
238 const int kCharSize = sizeof(char); // NOLINT | 251 const int kCharSize = sizeof(char); // NOLINT |
239 const int kShortSize = sizeof(short); // NOLINT | 252 const int kShortSize = sizeof(short); // NOLINT |
240 const int kIntSize = sizeof(int); // NOLINT | 253 const int kIntSize = sizeof(int); // NOLINT |
241 const int kDoubleSize = sizeof(double); // NOLINT | 254 const int kDoubleSize = sizeof(double); // NOLINT |
242 const int kIntptrSize = sizeof(intptr_t); // NOLINT | 255 const int kIntptrSize = sizeof(intptr_t); // NOLINT |
243 const int kPointerSize = sizeof(void*); // NOLINT | 256 const int kPointerSize = sizeof(void*); // NOLINT |
257 #if V8_TARGET_ARCH_X32 | |
258 const int kHWRegSize = kPointerSize + kPointerSize; | |
danno
2013/07/17 13:33:21
You should define the following for all platforms
| |
259 #endif | |
244 | 260 |
245 const int kDoubleSizeLog2 = 3; | 261 const int kDoubleSizeLog2 = 3; |
246 | 262 |
247 // Size of the state of a the random number generator. | 263 // Size of the state of a the random number generator. |
248 const int kRandomStateSize = 2 * kIntSize; | 264 const int kRandomStateSize = 2 * kIntSize; |
249 | 265 |
250 #if V8_HOST_ARCH_64_BIT | 266 #if V8_HOST_ARCH_64_BIT && !V8_TARGET_ARCH_X32 |
251 const int kPointerSizeLog2 = 3; | 267 const int kPointerSizeLog2 = 3; |
252 const intptr_t kIntptrSignBit = V8_INT64_C(0x8000000000000000); | 268 const intptr_t kIntptrSignBit = V8_INT64_C(0x8000000000000000); |
253 const uintptr_t kUintptrAllBitsSet = V8_UINT64_C(0xFFFFFFFFFFFFFFFF); | 269 const uintptr_t kUintptrAllBitsSet = V8_UINT64_C(0xFFFFFFFFFFFFFFFF); |
254 #else | 270 #else |
255 const int kPointerSizeLog2 = 2; | 271 const int kPointerSizeLog2 = 2; |
256 const intptr_t kIntptrSignBit = 0x80000000; | 272 const intptr_t kIntptrSignBit = 0x80000000; |
257 const uintptr_t kUintptrAllBitsSet = 0xFFFFFFFFu; | 273 const uintptr_t kUintptrAllBitsSet = 0xFFFFFFFFu; |
258 #endif | 274 #endif |
259 | 275 |
260 const int kBitsPerByte = 8; | 276 const int kBitsPerByte = 8; |
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
429 // the backend, so both modes are represented by the kStrictMode value. | 445 // the backend, so both modes are represented by the kStrictMode value. |
430 enum StrictModeFlag { | 446 enum StrictModeFlag { |
431 kNonStrictMode, | 447 kNonStrictMode, |
432 kStrictMode | 448 kStrictMode |
433 }; | 449 }; |
434 | 450 |
435 | 451 |
436 } } // namespace v8::internal | 452 } } // namespace v8::internal |
437 | 453 |
438 #endif // V8_GLOBALS_H_ | 454 #endif // V8_GLOBALS_H_ |
OLD | NEW |