Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1300)

Side by Side Diff: third_party/lzma_sdk/CpuArch.h

Issue 1700453002: Update lzma_sdk sources to 15.14. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Chromium modifications Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « third_party/lzma_sdk/Compiler.h ('k') | third_party/lzma_sdk/CpuArch.c » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 /* CpuArch.h -- CPU specific code 1 /* CpuArch.h -- CPU specific code
2 2010-10-26: Igor Pavlov : Public domain */ 2 2015-12-01: Igor Pavlov : Public domain */
3 3
4 #ifndef __CPU_ARCH_H 4 #ifndef __CPU_ARCH_H
5 #define __CPU_ARCH_H 5 #define __CPU_ARCH_H
6 6
7 #include <stdlib.h> 7 #include "7zTypes.h"
8 #include "Types.h"
9 8
10 EXTERN_C_BEGIN 9 EXTERN_C_BEGIN
11 10
12 /* 11 /*
13 MY_CPU_LE means that CPU is LITTLE ENDIAN. 12 MY_CPU_LE means that CPU is LITTLE ENDIAN.
14 If MY_CPU_LE is not defined, we don't know about that property of platform (it c an be LITTLE ENDIAN). 13 MY_CPU_BE means that CPU is BIG ENDIAN.
14 If MY_CPU_LE and MY_CPU_BE are not defined, we don't know about ENDIANNESS of pl atform.
15 15
16 MY_CPU_LE_UNALIGN means that CPU is LITTLE ENDIAN and CPU supports unaligned mem ory accesses. 16 MY_CPU_LE_UNALIGN means that CPU is LITTLE ENDIAN and CPU supports unaligned mem ory accesses.
17 If MY_CPU_LE_UNALIGN is not defined, we don't know about these properties of pla tform.
18 */ 17 */
19 18
20 #if defined(_M_X64) || defined(_M_AMD64) || defined(__x86_64__) 19 #if defined(_M_X64) \
21 #define MY_CPU_AMD64 20 || defined(_M_AMD64) \
21 || defined(__x86_64__) \
22 || defined(__AMD64__) \
23 || defined(__amd64__)
24 #define MY_CPU_AMD64
22 #endif 25 #endif
23 26
24 #if defined(MY_CPU_AMD64) || defined(_M_IA64) 27 #if defined(MY_CPU_AMD64) \
25 #define MY_CPU_64BIT 28 || defined(_M_IA64) \
29 || defined(__AARCH64EL__) \
30 || defined(__AARCH64EB__)
31 #define MY_CPU_64BIT
26 #endif 32 #endif
27 33
28 #if defined(_M_IX86) || defined(__i386__) 34 #if defined(_M_IX86) || defined(__i386__)
29 #define MY_CPU_X86 35 #define MY_CPU_X86
30 #endif 36 #endif
31 37
32 #if defined(MY_CPU_X86) || defined(MY_CPU_AMD64) 38 #if defined(MY_CPU_X86) || defined(MY_CPU_AMD64)
33 #define MY_CPU_X86_OR_AMD64 39 #define MY_CPU_X86_OR_AMD64
34 #endif 40 #endif
35 41
36 #if defined(MY_CPU_X86) || defined(_M_ARM) 42 #if defined(MY_CPU_X86) \
37 #define MY_CPU_32BIT 43 || defined(_M_ARM) \
44 || defined(__ARMEL__) \
45 || defined(__THUMBEL__) \
46 || defined(__ARMEB__) \
47 || defined(__THUMBEB__)
48 #define MY_CPU_32BIT
38 #endif 49 #endif
39 50
40 #if defined(_WIN32) && defined(_M_ARM) 51 #if defined(_WIN32) && defined(_M_ARM)
41 #define MY_CPU_ARM_LE 52 #define MY_CPU_ARM_LE
42 #endif 53 #endif
43 54
44 #if defined(_WIN32) && defined(_M_IA64) 55 #if defined(_WIN32) && defined(_M_IA64)
45 #define MY_CPU_IA64_LE 56 #define MY_CPU_IA64_LE
46 #endif 57 #endif
47 58
48 #if defined(MY_CPU_X86_OR_AMD64) 59 #if defined(MY_CPU_X86_OR_AMD64) \
49 #define MY_CPU_LE_UNALIGN 60 || defined(MY_CPU_ARM_LE) \
61 || defined(MY_CPU_IA64_LE) \
62 || defined(__LITTLE_ENDIAN__) \
63 || defined(__ARMEL__) \
64 || defined(__THUMBEL__) \
65 || defined(__AARCH64EL__) \
66 || defined(__MIPSEL__) \
67 || defined(__MIPSEL) \
68 || defined(_MIPSEL) \
69 || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__))
70 #define MY_CPU_LE
50 #endif 71 #endif
51 72
52 #if defined(MY_CPU_X86_OR_AMD64) || defined(MY_CPU_ARM_LE) || defined(MY_CPU_IA 64_LE) || defined(__ARMEL__) || defined(__MIPSEL__) || defined(__LITTLE_ENDIAN__ ) 73 #if defined(__BIG_ENDIAN__) \
53 #define MY_CPU_LE 74 || defined(__ARMEB__) \
54 #endif 75 || defined(__THUMBEB__) \
55 76 || defined(__AARCH64EB__) \
56 #if defined(__BIG_ENDIAN__) 77 || defined(__MIPSEB__) \
57 #define MY_CPU_BE 78 || defined(__MIPSEB) \
79 || defined(_MIPSEB) \
80 || defined(__m68k__) \
81 || defined(__s390__) \
82 || defined(__s390x__) \
83 || defined(__zarch__) \
84 || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__))
85 #define MY_CPU_BE
58 #endif 86 #endif
59 87
60 #if defined(MY_CPU_LE) && defined(MY_CPU_BE) 88 #if defined(MY_CPU_LE) && defined(MY_CPU_BE)
61 Stop_Compiling_Bad_Endian 89 Stop_Compiling_Bad_Endian
62 #endif 90 #endif
63 91
92
93 #ifdef MY_CPU_LE
94 #if defined(MY_CPU_X86_OR_AMD64) \
95 /* || defined(__AARCH64EL__) */
96 #define MY_CPU_LE_UNALIGN
97 #endif
98 #endif
99
100
64 #ifdef MY_CPU_LE_UNALIGN 101 #ifdef MY_CPU_LE_UNALIGN
65 102
66 #define GetUi16(p) (*(const UInt16 *)(p)) 103 #define GetUi16(p) (*(const UInt16 *)(const void *)(p))
67 #define GetUi32(p) (*(const UInt32 *)(p)) 104 #define GetUi32(p) (*(const UInt32 *)(const void *)(p))
68 #define GetUi64(p) (*(const UInt64 *)(p)) 105 #define GetUi64(p) (*(const UInt64 *)(const void *)(p))
69 #define SetUi16(p, d) *(UInt16 *)(p) = (d); 106
70 #define SetUi32(p, d) *(UInt32 *)(p) = (d); 107 #define SetUi16(p, v) { *(UInt16 *)(p) = (v); }
71 #define SetUi64(p, d) *(UInt64 *)(p) = (d); 108 #define SetUi32(p, v) { *(UInt32 *)(p) = (v); }
109 #define SetUi64(p, v) { *(UInt64 *)(p) = (v); }
72 110
73 #else 111 #else
74 112
75 #define GetUi16(p) (((const Byte *)(p))[0] | ((UInt16)((const Byte *)(p))[1] << 8)) 113 #define GetUi16(p) ( (UInt16) ( \
114 ((const Byte *)(p))[0] | \
115 ((UInt16)((const Byte *)(p))[1] << 8) ))
76 116
77 #define GetUi32(p) ( \ 117 #define GetUi32(p) ( \
78 ((const Byte *)(p))[0] | \ 118 ((const Byte *)(p))[0] | \
79 ((UInt32)((const Byte *)(p))[1] << 8) | \ 119 ((UInt32)((const Byte *)(p))[1] << 8) | \
80 ((UInt32)((const Byte *)(p))[2] << 16) | \ 120 ((UInt32)((const Byte *)(p))[2] << 16) | \
81 ((UInt32)((const Byte *)(p))[3] << 24)) 121 ((UInt32)((const Byte *)(p))[3] << 24))
82 122
83 #define GetUi64(p) (GetUi32(p) | ((UInt64)GetUi32(((const Byte *)(p)) + 4) << 32 )) 123 #define GetUi64(p) (GetUi32(p) | ((UInt64)GetUi32(((const Byte *)(p)) + 4) << 32 ))
84 124
85 #define SetUi16(p, d) { UInt32 _x_ = (d); \ 125 #define SetUi16(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \
86 ((Byte *)(p))[0] = (Byte)_x_; \ 126 _ppp_[0] = (Byte)_vvv_; \
87 ((Byte *)(p))[1] = (Byte)(_x_ >> 8); } 127 _ppp_[1] = (Byte)(_vvv_ >> 8); }
88 128
89 #define SetUi32(p, d) { UInt32 _x_ = (d); \ 129 #define SetUi32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \
90 ((Byte *)(p))[0] = (Byte)_x_; \ 130 _ppp_[0] = (Byte)_vvv_; \
91 ((Byte *)(p))[1] = (Byte)(_x_ >> 8); \ 131 _ppp_[1] = (Byte)(_vvv_ >> 8); \
92 ((Byte *)(p))[2] = (Byte)(_x_ >> 16); \ 132 _ppp_[2] = (Byte)(_vvv_ >> 16); \
93 ((Byte *)(p))[3] = (Byte)(_x_ >> 24); } 133 _ppp_[3] = (Byte)(_vvv_ >> 24); }
94 134
95 #define SetUi64(p, d) { UInt64 _x64_ = (d); \ 135 #define SetUi64(p, v) { Byte *_ppp2_ = (Byte *)(p); UInt64 _vvv2_ = (v); \
96 SetUi32(p, (UInt32)_x64_); \ 136 SetUi32(_ppp2_ , (UInt32)_vvv2_); \
97 SetUi32(((Byte *)(p)) + 4, (UInt32)(_x64_ >> 32)); } 137 SetUi32(_ppp2_ + 4, (UInt32)(_vvv2_ >> 32)); }
98 138
99 #endif 139 #endif
100 140
101 #if defined(MY_CPU_LE_UNALIGN) && defined(_WIN64) && (_MSC_VER >= 1300) 141
142 #if defined(MY_CPU_LE_UNALIGN) && /* defined(_WIN64) && */ (_MSC_VER >= 1300)
143
144 /* Note: we use bswap instruction, that is unsupported in 386 cpu */
145
146 #include <stdlib.h>
102 147
103 #pragma intrinsic(_byteswap_ulong) 148 #pragma intrinsic(_byteswap_ulong)
104 #pragma intrinsic(_byteswap_uint64) 149 #pragma intrinsic(_byteswap_uint64)
105 #define GetBe32(p) _byteswap_ulong(*(const UInt32 *)(const Byte *)(p)) 150 #define GetBe32(p) _byteswap_ulong(*(const UInt32 *)(const Byte *)(p))
106 #define GetBe64(p) _byteswap_uint64(*(const UInt64 *)(const Byte *)(p)) 151 #define GetBe64(p) _byteswap_uint64(*(const UInt64 *)(const Byte *)(p))
107 152
153 #define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = _byteswap_ulong(v)
154
155 #elif defined(MY_CPU_LE_UNALIGN) && defined (__GNUC__) && (__GNUC__ > 4 || (__GN UC__ == 4 && __GNUC_MINOR__ >= 3))
156
157 #define GetBe32(p) __builtin_bswap32(*(const UInt32 *)(const Byte *)(p))
158 #define GetBe64(p) __builtin_bswap64(*(const UInt64 *)(const Byte *)(p))
159
160 #define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = __builtin_bswap32(v)
161
108 #else 162 #else
109 163
110 #define GetBe32(p) ( \ 164 #define GetBe32(p) ( \
111 ((UInt32)((const Byte *)(p))[0] << 24) | \ 165 ((UInt32)((const Byte *)(p))[0] << 24) | \
112 ((UInt32)((const Byte *)(p))[1] << 16) | \ 166 ((UInt32)((const Byte *)(p))[1] << 16) | \
113 ((UInt32)((const Byte *)(p))[2] << 8) | \ 167 ((UInt32)((const Byte *)(p))[2] << 8) | \
114 ((const Byte *)(p))[3] ) 168 ((const Byte *)(p))[3] )
115 169
116 #define GetBe64(p) (((UInt64)GetBe32(p) << 32) | GetBe32(((const Byte *)(p)) + 4 )) 170 #define GetBe64(p) (((UInt64)GetBe32(p) << 32) | GetBe32(((const Byte *)(p)) + 4 ))
117 171
172 #define SetBe32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \
173 _ppp_[0] = (Byte)(_vvv_ >> 24); \
174 _ppp_[1] = (Byte)(_vvv_ >> 16); \
175 _ppp_[2] = (Byte)(_vvv_ >> 8); \
176 _ppp_[3] = (Byte)_vvv_; }
177
118 #endif 178 #endif
119 179
120 #define GetBe16(p) (((UInt16)((const Byte *)(p))[0] << 8) | ((const Byte *)(p))[ 1]) 180
181 #define GetBe16(p) ( (UInt16) ( \
182 ((UInt16)((const Byte *)(p))[0] << 8) | \
183 ((const Byte *)(p))[1] ))
184
121 185
122 186
123 #ifdef MY_CPU_X86_OR_AMD64 187 #ifdef MY_CPU_X86_OR_AMD64
124 188
125 typedef struct 189 typedef struct
126 { 190 {
127 UInt32 maxFunc; 191 UInt32 maxFunc;
128 UInt32 vendor[3]; 192 UInt32 vendor[3];
129 UInt32 ver; 193 UInt32 ver;
130 UInt32 b; 194 UInt32 b;
131 UInt32 c; 195 UInt32 c;
132 UInt32 d; 196 UInt32 d;
133 } Cx86cpuid; 197 } Cx86cpuid;
134 198
135 enum 199 enum
136 { 200 {
137 CPU_FIRM_INTEL, 201 CPU_FIRM_INTEL,
138 CPU_FIRM_AMD, 202 CPU_FIRM_AMD,
139 CPU_FIRM_VIA 203 CPU_FIRM_VIA
140 }; 204 };
141 205
206 void MyCPUID(UInt32 function, UInt32 *a, UInt32 *b, UInt32 *c, UInt32 *d);
207
142 Bool x86cpuid_CheckAndRead(Cx86cpuid *p); 208 Bool x86cpuid_CheckAndRead(Cx86cpuid *p);
143 int x86cpuid_GetFirm(const Cx86cpuid *p); 209 int x86cpuid_GetFirm(const Cx86cpuid *p);
144 210
145 #define x86cpuid_GetFamily(p) (((p)->ver >> 8) & 0xFF00F) 211 #define x86cpuid_GetFamily(ver) (((ver >> 16) & 0xFF0) | ((ver >> 8) & 0xF))
146 #define x86cpuid_GetModel(p) (((p)->ver >> 4) & 0xF00F) 212 #define x86cpuid_GetModel(ver) (((ver >> 12) & 0xF0) | ((ver >> 4) & 0xF))
147 #define x86cpuid_GetStepping(p) ((p)->ver & 0xF) 213 #define x86cpuid_GetStepping(ver) (ver & 0xF)
148 214
149 Bool CPU_Is_InOrder(); 215 Bool CPU_Is_InOrder();
150 Bool CPU_Is_Aes_Supported(); 216 Bool CPU_Is_Aes_Supported();
151 217
152 #endif 218 #endif
153 219
154 EXTERN_C_END 220 EXTERN_C_END
155 221
156 #endif 222 #endif
OLDNEW
« no previous file with comments | « third_party/lzma_sdk/Compiler.h ('k') | third_party/lzma_sdk/CpuArch.c » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698