OLD | NEW |
| (Empty) |
1 // Copyright 2017 the V8 project authors. All rights reserved. | |
2 // Redistribution and use in source and binary forms, with or without | |
3 // modification, are permitted provided that the following conditions are | |
4 // met: | |
5 // | |
6 // * Redistributions of source code must retain the above copyright | |
7 // notice, this list of conditions and the following disclaimer. | |
8 // * Redistributions in binary form must reproduce the above | |
9 // copyright notice, this list of conditions and the following | |
10 // disclaimer in the documentation and/or other materials provided | |
11 // with the distribution. | |
12 // * Neither the name of Google Inc. nor the names of its | |
13 // contributors may be used to endorse or promote products derived | |
14 // from this software without specific prior written permission. | |
15 // | |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
27 | |
28 #include "src/v8.h" | |
29 #include "test/cctest/cctest.h" | |
30 | |
31 #include "src/arm64/simulator-arm64.h" | |
32 #include "src/factory.h" | |
33 #include "src/macro-assembler.h" | |
34 | |
35 #if defined(USE_SIMULATOR) | |
36 | |
37 #ifndef V8_TARGET_LITTLE_ENDIAN | |
38 #error Expected ARM to be little-endian | |
39 #endif | |
40 | |
41 using namespace v8::base; | |
42 using namespace v8::internal; | |
43 | |
44 #define __ masm. | |
45 | |
46 struct MemoryAccess { | |
47 enum class Kind { | |
48 None, | |
49 Load, | |
50 LoadExcl, | |
51 Store, | |
52 StoreExcl, | |
53 }; | |
54 | |
55 enum class Size { | |
56 Byte, | |
57 HalfWord, | |
58 Word, | |
59 }; | |
60 | |
61 MemoryAccess() : kind(Kind::None) {} | |
62 MemoryAccess(Kind kind, Size size, size_t offset, int value = 0) | |
63 : kind(kind), size(size), offset(offset), value(value) {} | |
64 | |
65 Kind kind; | |
66 Size size; | |
67 size_t offset; | |
68 int value; | |
69 }; | |
70 | |
71 struct TestData { | |
72 explicit TestData(int w) : w(w) {} | |
73 | |
74 union { | |
75 int32_t w; | |
76 int16_t h; | |
77 int8_t b; | |
78 }; | |
79 int dummy; | |
80 }; | |
81 | |
82 static void AssembleMemoryAccess(MacroAssembler* assembler, MemoryAccess access, | |
83 Register dest_reg, Register value_reg, | |
84 Register addr_reg) { | |
85 MacroAssembler& masm = *assembler; | |
86 __ Add(addr_reg, x0, Operand(access.offset)); | |
87 | |
88 switch (access.kind) { | |
89 case MemoryAccess::Kind::None: | |
90 break; | |
91 | |
92 case MemoryAccess::Kind::Load: | |
93 switch (access.size) { | |
94 case MemoryAccess::Size::Byte: | |
95 __ ldrb(value_reg, MemOperand(addr_reg)); | |
96 break; | |
97 | |
98 case MemoryAccess::Size::HalfWord: | |
99 __ ldrh(value_reg, MemOperand(addr_reg)); | |
100 break; | |
101 | |
102 case MemoryAccess::Size::Word: | |
103 __ ldr(value_reg, MemOperand(addr_reg)); | |
104 break; | |
105 } | |
106 break; | |
107 | |
108 case MemoryAccess::Kind::LoadExcl: | |
109 switch (access.size) { | |
110 case MemoryAccess::Size::Byte: | |
111 __ ldaxrb(value_reg, addr_reg); | |
112 break; | |
113 | |
114 case MemoryAccess::Size::HalfWord: | |
115 __ ldaxrh(value_reg, addr_reg); | |
116 break; | |
117 | |
118 case MemoryAccess::Size::Word: | |
119 __ ldaxr(value_reg, addr_reg); | |
120 break; | |
121 } | |
122 break; | |
123 | |
124 case MemoryAccess::Kind::Store: | |
125 switch (access.size) { | |
126 case MemoryAccess::Size::Byte: | |
127 __ Mov(value_reg, Operand(access.value)); | |
128 __ strb(value_reg, MemOperand(addr_reg)); | |
129 break; | |
130 | |
131 case MemoryAccess::Size::HalfWord: | |
132 __ Mov(value_reg, Operand(access.value)); | |
133 __ strh(value_reg, MemOperand(addr_reg)); | |
134 break; | |
135 | |
136 case MemoryAccess::Size::Word: | |
137 __ Mov(value_reg, Operand(access.value)); | |
138 __ str(value_reg, MemOperand(addr_reg)); | |
139 break; | |
140 } | |
141 break; | |
142 | |
143 case MemoryAccess::Kind::StoreExcl: | |
144 switch (access.size) { | |
145 case MemoryAccess::Size::Byte: | |
146 __ Mov(value_reg, Operand(access.value)); | |
147 __ stlxrb(dest_reg, value_reg, addr_reg); | |
148 break; | |
149 | |
150 case MemoryAccess::Size::HalfWord: | |
151 __ Mov(value_reg, Operand(access.value)); | |
152 __ stlxrh(dest_reg, value_reg, addr_reg); | |
153 break; | |
154 | |
155 case MemoryAccess::Size::Word: | |
156 __ Mov(value_reg, Operand(access.value)); | |
157 __ stlxr(dest_reg, value_reg, addr_reg); | |
158 break; | |
159 } | |
160 break; | |
161 } | |
162 } | |
163 | |
164 static void AssembleLoadExcl(MacroAssembler* assembler, MemoryAccess access, | |
165 Register value_reg, Register addr_reg) { | |
166 DCHECK(access.kind == MemoryAccess::Kind::LoadExcl); | |
167 AssembleMemoryAccess(assembler, access, no_reg, value_reg, addr_reg); | |
168 } | |
169 | |
170 static void AssembleStoreExcl(MacroAssembler* assembler, MemoryAccess access, | |
171 Register dest_reg, Register value_reg, | |
172 Register addr_reg) { | |
173 DCHECK(access.kind == MemoryAccess::Kind::StoreExcl); | |
174 AssembleMemoryAccess(assembler, access, dest_reg, value_reg, addr_reg); | |
175 } | |
176 | |
177 static void TestInvalidateExclusiveAccess( | |
178 TestData initial_data, MemoryAccess access1, MemoryAccess access2, | |
179 MemoryAccess access3, int expected_res, TestData expected_data) { | |
180 Isolate* isolate = CcTest::i_isolate(); | |
181 HandleScope scope(isolate); | |
182 MacroAssembler masm(isolate, NULL, 0, v8::internal::CodeObjectRequired::kYes); | |
183 | |
184 AssembleLoadExcl(&masm, access1, w1, x1); | |
185 AssembleMemoryAccess(&masm, access2, w3, w2, x1); | |
186 AssembleStoreExcl(&masm, access3, w0, w3, x1); | |
187 __ br(lr); | |
188 | |
189 CodeDesc desc; | |
190 masm.GetCode(&desc); | |
191 Handle<Code> code = isolate->factory()->NewCode( | |
192 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); | |
193 TestData t = initial_data; | |
194 Simulator::CallArgument args[] = { | |
195 Simulator::CallArgument(reinterpret_cast<uintptr_t>(&t)), | |
196 Simulator::CallArgument::End()}; | |
197 Simulator::current(isolate)->CallVoid(code->entry(), args); | |
198 int res = Simulator::current(isolate)->wreg(0); | |
199 | |
200 CHECK_EQ(expected_res, res); | |
201 switch (access3.size) { | |
202 case MemoryAccess::Size::Byte: | |
203 CHECK_EQ(expected_data.b, t.b); | |
204 break; | |
205 | |
206 case MemoryAccess::Size::HalfWord: | |
207 CHECK_EQ(expected_data.h, t.h); | |
208 break; | |
209 | |
210 case MemoryAccess::Size::Word: | |
211 CHECK_EQ(expected_data.w, t.w); | |
212 break; | |
213 } | |
214 } | |
215 | |
216 TEST(simulator_invalidate_exclusive_access) { | |
217 using Kind = MemoryAccess::Kind; | |
218 using Size = MemoryAccess::Size; | |
219 | |
220 MemoryAccess ldaxr_w(Kind::LoadExcl, Size::Word, offsetof(TestData, w)); | |
221 MemoryAccess stlxr_w(Kind::StoreExcl, Size::Word, offsetof(TestData, w), 7); | |
222 | |
223 // Address mismatch. | |
224 TestInvalidateExclusiveAccess( | |
225 TestData(1), ldaxr_w, | |
226 MemoryAccess(Kind::LoadExcl, Size::Word, offsetof(TestData, dummy)), | |
227 stlxr_w, 1, TestData(1)); | |
228 | |
229 // Size mismatch. | |
230 TestInvalidateExclusiveAccess( | |
231 TestData(1), ldaxr_w, MemoryAccess(), | |
232 MemoryAccess(Kind::StoreExcl, Size::HalfWord, offsetof(TestData, w), 7), | |
233 1, TestData(1)); | |
234 | |
235 // Load between ldaxr/stlxr. | |
236 TestInvalidateExclusiveAccess( | |
237 TestData(1), ldaxr_w, | |
238 MemoryAccess(Kind::Load, Size::Word, offsetof(TestData, dummy)), stlxr_w, | |
239 1, TestData(1)); | |
240 | |
241 // Store between ldaxr/stlxr. | |
242 TestInvalidateExclusiveAccess( | |
243 TestData(1), ldaxr_w, | |
244 MemoryAccess(Kind::Store, Size::Word, offsetof(TestData, dummy)), stlxr_w, | |
245 1, TestData(1)); | |
246 | |
247 // Match | |
248 TestInvalidateExclusiveAccess(TestData(1), ldaxr_w, MemoryAccess(), stlxr_w, | |
249 0, TestData(7)); | |
250 } | |
251 | |
252 static int ExecuteMemoryAccess(Isolate* isolate, TestData* test_data, | |
253 MemoryAccess access) { | |
254 HandleScope scope(isolate); | |
255 MacroAssembler masm(isolate, NULL, 0, v8::internal::CodeObjectRequired::kYes); | |
256 AssembleMemoryAccess(&masm, access, w0, w2, x1); | |
257 __ br(lr); | |
258 | |
259 CodeDesc desc; | |
260 masm.GetCode(&desc); | |
261 Handle<Code> code = isolate->factory()->NewCode( | |
262 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); | |
263 Simulator::CallArgument args[] = { | |
264 Simulator::CallArgument(reinterpret_cast<uintptr_t>(test_data)), | |
265 Simulator::CallArgument::End()}; | |
266 Simulator::current(isolate)->CallVoid(code->entry(), args); | |
267 return Simulator::current(isolate)->wreg(0); | |
268 } | |
269 | |
270 class MemoryAccessThread : public v8::base::Thread { | |
271 public: | |
272 MemoryAccessThread() | |
273 : Thread(Options("MemoryAccessThread")), | |
274 test_data_(NULL), | |
275 is_finished_(false), | |
276 has_request_(false), | |
277 did_request_(false) {} | |
278 | |
279 virtual void Run() { | |
280 v8::Isolate::CreateParams create_params; | |
281 create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); | |
282 v8::Isolate* isolate = v8::Isolate::New(create_params); | |
283 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); | |
284 v8::Isolate::Scope scope(isolate); | |
285 | |
286 v8::base::LockGuard<v8::base::Mutex> lock_guard(&mutex_); | |
287 while (!is_finished_) { | |
288 while (!(has_request_ || is_finished_)) { | |
289 has_request_cv_.Wait(&mutex_); | |
290 } | |
291 | |
292 if (is_finished_) { | |
293 break; | |
294 } | |
295 | |
296 ExecuteMemoryAccess(i_isolate, test_data_, access_); | |
297 has_request_ = false; | |
298 did_request_ = true; | |
299 did_request_cv_.NotifyOne(); | |
300 } | |
301 } | |
302 | |
303 void NextAndWait(TestData* test_data, MemoryAccess access) { | |
304 DCHECK(!has_request_); | |
305 v8::base::LockGuard<v8::base::Mutex> lock_guard(&mutex_); | |
306 test_data_ = test_data; | |
307 access_ = access; | |
308 has_request_ = true; | |
309 has_request_cv_.NotifyOne(); | |
310 while (!did_request_) { | |
311 did_request_cv_.Wait(&mutex_); | |
312 } | |
313 did_request_ = false; | |
314 } | |
315 | |
316 void Finish() { | |
317 v8::base::LockGuard<v8::base::Mutex> lock_guard(&mutex_); | |
318 is_finished_ = true; | |
319 has_request_cv_.NotifyOne(); | |
320 } | |
321 | |
322 private: | |
323 TestData* test_data_; | |
324 MemoryAccess access_; | |
325 bool is_finished_; | |
326 bool has_request_; | |
327 bool did_request_; | |
328 v8::base::Mutex mutex_; | |
329 v8::base::ConditionVariable has_request_cv_; | |
330 v8::base::ConditionVariable did_request_cv_; | |
331 }; | |
332 | |
333 TEST(simulator_invalidate_exclusive_access_threaded) { | |
334 using Kind = MemoryAccess::Kind; | |
335 using Size = MemoryAccess::Size; | |
336 | |
337 Isolate* isolate = CcTest::i_isolate(); | |
338 HandleScope scope(isolate); | |
339 | |
340 TestData test_data(1); | |
341 | |
342 MemoryAccessThread thread; | |
343 thread.Start(); | |
344 | |
345 MemoryAccess ldaxr_w(Kind::LoadExcl, Size::Word, offsetof(TestData, w)); | |
346 MemoryAccess stlxr_w(Kind::StoreExcl, Size::Word, offsetof(TestData, w), 7); | |
347 | |
348 // Exclusive store completed by another thread first. | |
349 test_data = TestData(1); | |
350 thread.NextAndWait(&test_data, MemoryAccess(Kind::LoadExcl, Size::Word, | |
351 offsetof(TestData, w))); | |
352 ExecuteMemoryAccess(isolate, &test_data, ldaxr_w); | |
353 thread.NextAndWait(&test_data, MemoryAccess(Kind::StoreExcl, Size::Word, | |
354 offsetof(TestData, w), 5)); | |
355 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, stlxr_w)); | |
356 CHECK_EQ(5, test_data.w); | |
357 | |
358 // Exclusive store completed by another thread; different address, but masked | |
359 // to same | |
360 test_data = TestData(1); | |
361 ExecuteMemoryAccess(isolate, &test_data, ldaxr_w); | |
362 thread.NextAndWait(&test_data, MemoryAccess(Kind::LoadExcl, Size::Word, | |
363 offsetof(TestData, dummy))); | |
364 thread.NextAndWait(&test_data, MemoryAccess(Kind::StoreExcl, Size::Word, | |
365 offsetof(TestData, dummy), 5)); | |
366 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, stlxr_w)); | |
367 CHECK_EQ(1, test_data.w); | |
368 | |
369 // Test failure when store between ldaxr/stlxr. | |
370 test_data = TestData(1); | |
371 ExecuteMemoryAccess(isolate, &test_data, ldaxr_w); | |
372 thread.NextAndWait(&test_data, MemoryAccess(Kind::Store, Size::Word, | |
373 offsetof(TestData, dummy))); | |
374 CHECK_EQ(1, ExecuteMemoryAccess(isolate, &test_data, stlxr_w)); | |
375 CHECK_EQ(1, test_data.w); | |
376 | |
377 thread.Finish(); | |
378 thread.Join(); | |
379 } | |
380 | |
381 #undef __ | |
382 | |
383 #endif // USE_SIMULATOR | |
OLD | NEW |