OLD | NEW |
1 // Copyright 2016 the V8 project authors. All rights reserved. | 1 // Copyright 2017 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
11 // with the distribution. | 11 // with the distribution. |
12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
15 // | 15 // |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include "src/v8.h" | 28 #include "src/v8.h" |
29 #include "test/cctest/cctest.h" | 29 #include "test/cctest/cctest.h" |
30 | 30 |
31 #include "src/arm/simulator-arm.h" | 31 #include "src/arm64/simulator-arm64.h" |
32 #include "src/disassembler.h" | |
33 #include "src/factory.h" | 32 #include "src/factory.h" |
34 #include "src/macro-assembler.h" | 33 #include "src/macro-assembler.h" |
35 | 34 |
36 #if defined(USE_SIMULATOR) | 35 #if defined(USE_SIMULATOR) |
37 | 36 |
38 #ifndef V8_TARGET_LITTLE_ENDIAN | 37 #ifndef V8_TARGET_LITTLE_ENDIAN |
39 #error Expected ARM to be little-endian | 38 #error Expected ARM to be little-endian |
40 #endif | 39 #endif |
41 | 40 |
42 using namespace v8::base; | 41 using namespace v8::base; |
43 using namespace v8::internal; | 42 using namespace v8::internal; |
44 | 43 |
45 // Define these function prototypes to match JSEntryFunction in execution.cc. | 44 #define __ masm. |
46 typedef Object* (*F1)(int x, int p1, int p2, int p3, int p4); | |
47 typedef Object* (*F3)(void* p0, int p1, int p2, int p3, int p4); | |
48 | |
49 #define __ assm. | |
50 | 45 |
51 struct MemoryAccess { | 46 struct MemoryAccess { |
52 enum class Kind { | 47 enum class Kind { |
53 None, | 48 None, |
54 Load, | 49 Load, |
55 LoadExcl, | 50 LoadExcl, |
56 Store, | 51 Store, |
57 StoreExcl, | 52 StoreExcl, |
58 }; | 53 }; |
59 | 54 |
60 enum class Size { | 55 enum class Size { |
61 Byte, | 56 Byte, |
62 HalfWord, | 57 HalfWord, |
63 Word, | 58 Word, |
64 }; | 59 }; |
65 | 60 |
66 MemoryAccess() : kind(Kind::None) {} | 61 MemoryAccess() : kind(Kind::None) {} |
67 MemoryAccess(Kind kind, Size size, size_t offset, int value = 0) | 62 MemoryAccess(Kind kind, Size size, size_t offset, int value = 0) |
68 : kind(kind), size(size), offset(offset), value(value) {} | 63 : kind(kind), size(size), offset(offset), value(value) {} |
69 | 64 |
70 Kind kind; | 65 Kind kind = Kind::None; |
71 Size size; | 66 Size size = Size::Byte; |
72 size_t offset; | 67 size_t offset = 0; |
73 int value; | 68 int value = 0; |
74 }; | 69 }; |
75 | 70 |
76 struct TestData { | 71 struct TestData { |
77 explicit TestData(int w) : w(w) {} | 72 explicit TestData(int w) : w(w) {} |
78 | 73 |
79 union { | 74 union { |
80 int32_t w; | 75 int32_t w; |
81 int16_t h; | 76 int16_t h; |
82 int8_t b; | 77 int8_t b; |
83 }; | 78 }; |
84 int dummy; | 79 int dummy; |
85 }; | 80 }; |
86 | 81 |
87 static void AssembleMemoryAccess(Assembler* assembler, MemoryAccess access, | 82 static void AssembleMemoryAccess(MacroAssembler* assembler, MemoryAccess access, |
88 Register dest_reg, Register value_reg, | 83 Register dest_reg, Register value_reg, |
89 Register addr_reg) { | 84 Register addr_reg) { |
90 Assembler& assm = *assembler; | 85 MacroAssembler& masm = *assembler; |
91 __ add(addr_reg, r0, Operand(access.offset)); | 86 __ Add(addr_reg, x0, Operand(access.offset)); |
92 | 87 |
93 switch (access.kind) { | 88 switch (access.kind) { |
94 case MemoryAccess::Kind::None: | 89 case MemoryAccess::Kind::None: |
95 break; | 90 break; |
96 | 91 |
97 case MemoryAccess::Kind::Load: | 92 case MemoryAccess::Kind::Load: |
98 switch (access.size) { | 93 switch (access.size) { |
99 case MemoryAccess::Size::Byte: | 94 case MemoryAccess::Size::Byte: |
100 __ ldrb(value_reg, MemOperand(addr_reg)); | 95 __ ldrb(value_reg, MemOperand(addr_reg)); |
101 break; | 96 break; |
102 | 97 |
103 case MemoryAccess::Size::HalfWord: | 98 case MemoryAccess::Size::HalfWord: |
104 __ ldrh(value_reg, MemOperand(addr_reg)); | 99 __ ldrh(value_reg, MemOperand(addr_reg)); |
105 break; | 100 break; |
106 | 101 |
107 case MemoryAccess::Size::Word: | 102 case MemoryAccess::Size::Word: |
108 __ ldr(value_reg, MemOperand(addr_reg)); | 103 __ ldr(value_reg, MemOperand(addr_reg)); |
109 break; | 104 break; |
110 } | 105 } |
111 break; | 106 break; |
112 | 107 |
113 case MemoryAccess::Kind::LoadExcl: | 108 case MemoryAccess::Kind::LoadExcl: |
114 switch (access.size) { | 109 switch (access.size) { |
115 case MemoryAccess::Size::Byte: | 110 case MemoryAccess::Size::Byte: |
116 __ ldrexb(value_reg, addr_reg); | 111 __ ldaxrb(value_reg, addr_reg); |
117 break; | 112 break; |
118 | 113 |
119 case MemoryAccess::Size::HalfWord: | 114 case MemoryAccess::Size::HalfWord: |
120 __ ldrexh(value_reg, addr_reg); | 115 __ ldaxrh(value_reg, addr_reg); |
121 break; | 116 break; |
122 | 117 |
123 case MemoryAccess::Size::Word: | 118 case MemoryAccess::Size::Word: |
124 __ ldrex(value_reg, addr_reg); | 119 __ ldaxr(value_reg, addr_reg); |
125 break; | 120 break; |
126 } | 121 } |
127 break; | 122 break; |
128 | 123 |
129 case MemoryAccess::Kind::Store: | 124 case MemoryAccess::Kind::Store: |
130 switch (access.size) { | 125 switch (access.size) { |
131 case MemoryAccess::Size::Byte: | 126 case MemoryAccess::Size::Byte: |
132 __ mov(value_reg, Operand(access.value)); | 127 __ Mov(value_reg, Operand(access.value)); |
133 __ strb(value_reg, MemOperand(addr_reg)); | 128 __ strb(value_reg, MemOperand(addr_reg)); |
134 break; | 129 break; |
135 | 130 |
136 case MemoryAccess::Size::HalfWord: | 131 case MemoryAccess::Size::HalfWord: |
137 __ mov(value_reg, Operand(access.value)); | 132 __ Mov(value_reg, Operand(access.value)); |
138 __ strh(value_reg, MemOperand(addr_reg)); | 133 __ strh(value_reg, MemOperand(addr_reg)); |
139 break; | 134 break; |
140 | 135 |
141 case MemoryAccess::Size::Word: | 136 case MemoryAccess::Size::Word: |
142 __ mov(value_reg, Operand(access.value)); | 137 __ Mov(value_reg, Operand(access.value)); |
143 __ str(value_reg, MemOperand(addr_reg)); | 138 __ str(value_reg, MemOperand(addr_reg)); |
144 break; | 139 break; |
145 } | 140 } |
146 break; | 141 break; |
147 | 142 |
148 case MemoryAccess::Kind::StoreExcl: | 143 case MemoryAccess::Kind::StoreExcl: |
149 switch (access.size) { | 144 switch (access.size) { |
150 case MemoryAccess::Size::Byte: | 145 case MemoryAccess::Size::Byte: |
151 __ mov(value_reg, Operand(access.value)); | 146 __ Mov(value_reg, Operand(access.value)); |
152 __ strexb(dest_reg, value_reg, addr_reg); | 147 __ stlxrb(dest_reg, value_reg, addr_reg); |
153 break; | 148 break; |
154 | 149 |
155 case MemoryAccess::Size::HalfWord: | 150 case MemoryAccess::Size::HalfWord: |
156 __ mov(value_reg, Operand(access.value)); | 151 __ Mov(value_reg, Operand(access.value)); |
157 __ strexh(dest_reg, value_reg, addr_reg); | 152 __ stlxrh(dest_reg, value_reg, addr_reg); |
158 break; | 153 break; |
159 | 154 |
160 case MemoryAccess::Size::Word: | 155 case MemoryAccess::Size::Word: |
161 __ mov(value_reg, Operand(access.value)); | 156 __ Mov(value_reg, Operand(access.value)); |
162 __ strex(dest_reg, value_reg, addr_reg); | 157 __ stlxr(dest_reg, value_reg, addr_reg); |
163 break; | 158 break; |
164 } | 159 } |
165 break; | 160 break; |
166 } | 161 } |
167 } | 162 } |
168 | 163 |
169 static void AssembleLoadExcl(Assembler* assembler, MemoryAccess access, | 164 static void AssembleLoadExcl(MacroAssembler* assembler, MemoryAccess access, |
170 Register value_reg, Register addr_reg) { | 165 Register value_reg, Register addr_reg) { |
171 DCHECK(access.kind == MemoryAccess::Kind::LoadExcl); | 166 DCHECK(access.kind == MemoryAccess::Kind::LoadExcl); |
172 AssembleMemoryAccess(assembler, access, no_reg, value_reg, addr_reg); | 167 AssembleMemoryAccess(assembler, access, no_reg, value_reg, addr_reg); |
173 } | 168 } |
174 | 169 |
175 static void AssembleStoreExcl(Assembler* assembler, MemoryAccess access, | 170 static void AssembleStoreExcl(MacroAssembler* assembler, MemoryAccess access, |
176 Register dest_reg, Register value_reg, | 171 Register dest_reg, Register value_reg, |
177 Register addr_reg) { | 172 Register addr_reg) { |
178 DCHECK(access.kind == MemoryAccess::Kind::StoreExcl); | 173 DCHECK(access.kind == MemoryAccess::Kind::StoreExcl); |
179 AssembleMemoryAccess(assembler, access, dest_reg, value_reg, addr_reg); | 174 AssembleMemoryAccess(assembler, access, dest_reg, value_reg, addr_reg); |
180 } | 175 } |
181 | 176 |
182 static void TestInvalidateExclusiveAccess( | 177 static void TestInvalidateExclusiveAccess( |
183 TestData initial_data, MemoryAccess access1, MemoryAccess access2, | 178 TestData initial_data, MemoryAccess access1, MemoryAccess access2, |
184 MemoryAccess access3, int expected_res, TestData expected_data) { | 179 MemoryAccess access3, int expected_res, TestData expected_data) { |
185 Isolate* isolate = CcTest::i_isolate(); | 180 Isolate* isolate = CcTest::i_isolate(); |
186 HandleScope scope(isolate); | 181 HandleScope scope(isolate); |
| 182 MacroAssembler masm(isolate, NULL, 0, v8::internal::CodeObjectRequired::kYes); |
187 | 183 |
188 Assembler assm(isolate, NULL, 0); | 184 AssembleLoadExcl(&masm, access1, w1, x1); |
189 | 185 AssembleMemoryAccess(&masm, access2, w3, w2, x1); |
190 AssembleLoadExcl(&assm, access1, r1, r1); | 186 AssembleStoreExcl(&masm, access3, w0, w3, x1); |
191 AssembleMemoryAccess(&assm, access2, r3, r2, r1); | 187 __ br(lr); |
192 AssembleStoreExcl(&assm, access3, r0, r3, r1); | |
193 | |
194 __ mov(pc, Operand(lr)); | |
195 | 188 |
196 CodeDesc desc; | 189 CodeDesc desc; |
197 assm.GetCode(&desc); | 190 masm.GetCode(&desc); |
198 Handle<Code> code = isolate->factory()->NewCode( | 191 Handle<Code> code = isolate->factory()->NewCode( |
199 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); | 192 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); |
200 F3 f = FUNCTION_CAST<F3>(code->entry()); | |
201 TestData t = initial_data; | 193 TestData t = initial_data; |
| 194 Simulator::CallArgument args[] = { |
| 195 Simulator::CallArgument(reinterpret_cast<uintptr_t>(&t)), |
| 196 Simulator::CallArgument::End()}; |
| 197 Simulator::current(isolate)->CallVoid(code->entry(), args); |
| 198 int res = Simulator::current(isolate)->wreg(0); |
202 | 199 |
203 int res = | |
204 reinterpret_cast<int>(CALL_GENERATED_CODE(isolate, f, &t, 0, 0, 0, 0)); | |
205 CHECK_EQ(expected_res, res); | 200 CHECK_EQ(expected_res, res); |
206 switch (access3.size) { | 201 switch (access3.size) { |
207 case MemoryAccess::Size::Byte: | 202 case MemoryAccess::Size::Byte: |
208 CHECK_EQ(expected_data.b, t.b); | 203 CHECK_EQ(expected_data.b, t.b); |
209 break; | 204 break; |
210 | 205 |
211 case MemoryAccess::Size::HalfWord: | 206 case MemoryAccess::Size::HalfWord: |
212 CHECK_EQ(expected_data.h, t.h); | 207 CHECK_EQ(expected_data.h, t.h); |
213 break; | 208 break; |
214 | 209 |
215 case MemoryAccess::Size::Word: | 210 case MemoryAccess::Size::Word: |
216 CHECK_EQ(expected_data.w, t.w); | 211 CHECK_EQ(expected_data.w, t.w); |
217 break; | 212 break; |
218 } | 213 } |
219 } | 214 } |
220 | 215 |
221 TEST(simulator_invalidate_exclusive_access) { | 216 TEST(simulator_invalidate_exclusive_access) { |
222 using Kind = MemoryAccess::Kind; | 217 using Kind = MemoryAccess::Kind; |
223 using Size = MemoryAccess::Size; | 218 using Size = MemoryAccess::Size; |
224 | 219 |
225 MemoryAccess ldrex_w(Kind::LoadExcl, Size::Word, offsetof(TestData, w)); | 220 MemoryAccess ldaxr_w(Kind::LoadExcl, Size::Word, offsetof(TestData, w)); |
226 MemoryAccess strex_w(Kind::StoreExcl, Size::Word, offsetof(TestData, w), 7); | 221 MemoryAccess stlxr_w(Kind::StoreExcl, Size::Word, offsetof(TestData, w), 7); |
227 | 222 |
228 // Address mismatch. | 223 // Address mismatch. |
229 TestInvalidateExclusiveAccess( | 224 TestInvalidateExclusiveAccess( |
230 TestData(1), ldrex_w, | 225 TestData(1), ldaxr_w, |
231 MemoryAccess(Kind::LoadExcl, Size::Word, offsetof(TestData, dummy)), | 226 MemoryAccess(Kind::LoadExcl, Size::Word, offsetof(TestData, dummy)), |
232 strex_w, 1, TestData(1)); | 227 stlxr_w, 1, TestData(1)); |
233 | 228 |
234 // Size mismatch. | 229 // Size mismatch. |
235 TestInvalidateExclusiveAccess( | 230 TestInvalidateExclusiveAccess( |
236 TestData(1), ldrex_w, MemoryAccess(), | 231 TestData(1), ldaxr_w, MemoryAccess(), |
237 MemoryAccess(Kind::StoreExcl, Size::HalfWord, offsetof(TestData, w), 7), | 232 MemoryAccess(Kind::StoreExcl, Size::HalfWord, offsetof(TestData, w), 7), |
238 1, TestData(1)); | 233 1, TestData(1)); |
239 | 234 |
240 // Load between ldrex/strex. | 235 // Load between ldaxr/stlxr. |
241 TestInvalidateExclusiveAccess( | 236 TestInvalidateExclusiveAccess( |
242 TestData(1), ldrex_w, | 237 TestData(1), ldaxr_w, |
243 MemoryAccess(Kind::Load, Size::Word, offsetof(TestData, dummy)), strex_w, | 238 MemoryAccess(Kind::Load, Size::Word, offsetof(TestData, dummy)), stlxr_w, |
244 1, TestData(1)); | 239 1, TestData(1)); |
245 | 240 |
246 // Store between ldrex/strex. | 241 // Store between ldaxr/stlxr. |
247 TestInvalidateExclusiveAccess( | 242 TestInvalidateExclusiveAccess( |
248 TestData(1), ldrex_w, | 243 TestData(1), ldaxr_w, |
249 MemoryAccess(Kind::Store, Size::Word, offsetof(TestData, dummy)), strex_w, | 244 MemoryAccess(Kind::Store, Size::Word, offsetof(TestData, dummy)), stlxr_w, |
250 1, TestData(1)); | 245 1, TestData(1)); |
251 | 246 |
252 // Match | 247 // Match |
253 TestInvalidateExclusiveAccess(TestData(1), ldrex_w, MemoryAccess(), strex_w, | 248 TestInvalidateExclusiveAccess(TestData(1), ldaxr_w, MemoryAccess(), stlxr_w, |
254 0, TestData(7)); | 249 0, TestData(7)); |
255 } | 250 } |
256 | 251 |
257 static int ExecuteMemoryAccess(Isolate* isolate, TestData* test_data, | 252 static int ExecuteMemoryAccess(Isolate* isolate, TestData* test_data, |
258 MemoryAccess access) { | 253 MemoryAccess access) { |
259 HandleScope scope(isolate); | 254 HandleScope scope(isolate); |
260 Assembler assm(isolate, NULL, 0); | 255 MacroAssembler masm(isolate, NULL, 0, v8::internal::CodeObjectRequired::kYes); |
261 AssembleMemoryAccess(&assm, access, r0, r2, r1); | 256 AssembleMemoryAccess(&masm, access, w0, w2, x1); |
262 __ bx(lr); | 257 __ br(lr); |
263 | 258 |
264 CodeDesc desc; | 259 CodeDesc desc; |
265 assm.GetCode(&desc); | 260 masm.GetCode(&desc); |
266 Handle<Code> code = isolate->factory()->NewCode( | 261 Handle<Code> code = isolate->factory()->NewCode( |
267 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); | 262 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); |
268 F3 f = FUNCTION_CAST<F3>(code->entry()); | 263 Simulator::CallArgument args[] = { |
269 | 264 Simulator::CallArgument(reinterpret_cast<uintptr_t>(test_data)), |
270 return reinterpret_cast<int>( | 265 Simulator::CallArgument::End()}; |
271 CALL_GENERATED_CODE(isolate, f, test_data, 0, 0, 0, 0)); | 266 Simulator::current(isolate)->CallVoid(code->entry(), args); |
| 267 return Simulator::current(isolate)->wreg(0); |
272 } | 268 } |
273 | 269 |
274 class MemoryAccessThread : public v8::base::Thread { | 270 class MemoryAccessThread : public v8::base::Thread { |
275 public: | 271 public: |
276 MemoryAccessThread() | 272 MemoryAccessThread() |
277 : Thread(Options("MemoryAccessThread")), | 273 : Thread(Options("MemoryAccessThread")), |
278 test_data_(NULL), | 274 test_data_(NULL), |
279 is_finished_(false), | 275 is_finished_(false), |
280 has_request_(false), | 276 has_request_(false), |
281 did_request_(false) {} | 277 did_request_(false) {} |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
339 using Size = MemoryAccess::Size; | 335 using Size = MemoryAccess::Size; |
340 | 336 |
341 Isolate* isolate = CcTest::i_isolate(); | 337 Isolate* isolate = CcTest::i_isolate(); |
342 HandleScope scope(isolate); | 338 HandleScope scope(isolate); |
343 | 339 |
344 TestData test_data(1); | 340 TestData test_data(1); |
345 | 341 |
346 MemoryAccessThread thread; | 342 MemoryAccessThread thread; |
347 thread.Start(); | 343 thread.Start(); |
348 | 344 |
349 MemoryAccess ldrex_w(Kind::LoadExcl, Size::Word, offsetof(TestData, w)); | 345 MemoryAccess ldaxr_w(Kind::LoadExcl, Size::Word, offsetof(TestData, w)); |
350 MemoryAccess strex_w(Kind::StoreExcl, Size::Word, offsetof(TestData, w), 7); | 346 MemoryAccess stlxr_w(Kind::StoreExcl, Size::Word, offsetof(TestData, w), 7); |
351 | 347 |
352 // Exclusive store completed by another thread first. | 348 // Exclusive store completed by another thread first. |
353 test_data = TestData(1); | 349 test_data = TestData(1); |
354 thread.NextAndWait(&test_data, MemoryAccess(Kind::LoadExcl, Size::Word, | 350 thread.NextAndWait(&test_data, MemoryAccess(Kind::LoadExcl, Size::Word, |
355 offsetof(TestData, w))); | 351 offsetof(TestData, w))); |
356 ExecuteMemoryAccess(isolate, &test_data, ldrex_w); | 352 ExecuteMemoryAccess(isolate, &test_data, ldaxr_w); |
357 thread.NextAndWait(&test_data, MemoryAccess(Kind::StoreExcl, Size::Word, | 353 thread.NextAndWait(&test_data, MemoryAccess(Kind::StoreExcl, Size::Word, |
358 offsetof(TestData, w), 5)); | 354 offsetof(TestData, w), 5)); |
359 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, strex_w)); | 355 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, stlxr_w)); |
360 CHECK_EQ(5, test_data.w); | 356 CHECK_EQ(5, test_data.w); |
361 | 357 |
362 // Exclusive store completed by another thread; different address, but masked | 358 // Exclusive store completed by another thread; different address, but masked |
363 // to same | 359 // to same |
364 test_data = TestData(1); | 360 test_data = TestData(1); |
365 ExecuteMemoryAccess(isolate, &test_data, ldrex_w); | 361 ExecuteMemoryAccess(isolate, &test_data, ldaxr_w); |
366 thread.NextAndWait(&test_data, MemoryAccess(Kind::LoadExcl, Size::Word, | 362 thread.NextAndWait(&test_data, MemoryAccess(Kind::LoadExcl, Size::Word, |
367 offsetof(TestData, dummy))); | 363 offsetof(TestData, dummy))); |
368 thread.NextAndWait(&test_data, MemoryAccess(Kind::StoreExcl, Size::Word, | 364 thread.NextAndWait(&test_data, MemoryAccess(Kind::StoreExcl, Size::Word, |
369 offsetof(TestData, dummy), 5)); | 365 offsetof(TestData, dummy), 5)); |
370 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, strex_w)); | 366 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, stlxr_w)); |
371 CHECK_EQ(1, test_data.w); | 367 CHECK_EQ(1, test_data.w); |
372 | 368 |
373 // Test failure when store between ldrex/strex. | 369 // Test failure when store between ldaxr/stlxr. |
374 test_data = TestData(1); | 370 test_data = TestData(1); |
375 ExecuteMemoryAccess(isolate, &test_data, ldrex_w); | 371 ExecuteMemoryAccess(isolate, &test_data, ldaxr_w); |
376 thread.NextAndWait(&test_data, MemoryAccess(Kind::Store, Size::Word, | 372 thread.NextAndWait(&test_data, MemoryAccess(Kind::Store, Size::Word, |
377 offsetof(TestData, dummy))); | 373 offsetof(TestData, dummy))); |
378 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, strex_w)); | 374 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, stlxr_w)); |
379 CHECK_EQ(1, test_data.w); | 375 CHECK_EQ(1, test_data.w); |
380 | 376 |
381 thread.Finish(); | 377 thread.Finish(); |
382 thread.Join(); | 378 thread.Join(); |
383 } | 379 } |
384 | 380 |
385 #undef __ | 381 #undef __ |
386 | 382 |
387 #endif // USE_SIMULATOR | 383 #endif // USE_SIMULATOR |
OLD | NEW |