Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(268)

Side by Side Diff: src/wasm/wasm-interpreter.cc

Issue 1972153002: [wasm] Implement an interpreter for WASM. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Rebase Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/wasm/wasm-interpreter.h ('k') | src/wasm/wasm-macro-gen.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/wasm/wasm-interpreter.h"
6 #include "src/wasm/ast-decoder.h"
7 #include "src/wasm/decoder.h"
8 #include "src/wasm/wasm-external-refs.h"
9 #include "src/wasm/wasm-module.h"
10
11 #include "src/base/accounting-allocator.h"
12 #include "src/zone-containers.h"
13
14 namespace v8 {
15 namespace internal {
16 namespace wasm {
17
18 #if DEBUG
19 #define TRACE(...) \
20 do { \
21 if (FLAG_trace_wasm_interpreter) PrintF(__VA_ARGS__); \
22 } while (false)
23 #else
24 #define TRACE(...)
25 #endif
26
27 #define FOREACH_INTERNAL_OPCODE(V) V(Breakpoint, 0xFF)
28
29 #define FOREACH_SIMPLE_BINOP(V) \
30 V(I32Add, uint32_t, +) \
31 V(I32Sub, uint32_t, -) \
32 V(I32Mul, uint32_t, *) \
33 V(I32And, uint32_t, &) \
34 V(I32Ior, uint32_t, |) \
35 V(I32Xor, uint32_t, ^) \
36 V(I32Eq, uint32_t, ==) \
37 V(I32Ne, uint32_t, !=) \
38 V(I32LtU, uint32_t, <) \
39 V(I32LeU, uint32_t, <=) \
40 V(I32GtU, uint32_t, >) \
41 V(I32GeU, uint32_t, >=) \
42 V(I32LtS, int32_t, <) \
43 V(I32LeS, int32_t, <=) \
44 V(I32GtS, int32_t, >) \
45 V(I32GeS, int32_t, >=) \
46 V(I64Add, uint64_t, +) \
47 V(I64Sub, uint64_t, -) \
48 V(I64Mul, uint64_t, *) \
49 V(I64And, uint64_t, &) \
50 V(I64Ior, uint64_t, |) \
51 V(I64Xor, uint64_t, ^) \
52 V(I64Eq, uint64_t, ==) \
53 V(I64Ne, uint64_t, !=) \
54 V(I64LtU, uint64_t, <) \
55 V(I64LeU, uint64_t, <=) \
56 V(I64GtU, uint64_t, >) \
57 V(I64GeU, uint64_t, >=) \
58 V(I64LtS, int64_t, <) \
59 V(I64LeS, int64_t, <=) \
60 V(I64GtS, int64_t, >) \
61 V(I64GeS, int64_t, >=) \
62 V(F32Add, float, +) \
63 V(F32Sub, float, -) \
64 V(F32Mul, float, *) \
65 V(F32Div, float, /) \
66 V(F32Eq, float, ==) \
67 V(F32Ne, float, !=) \
68 V(F32Lt, float, <) \
69 V(F32Le, float, <=) \
70 V(F32Gt, float, >) \
71 V(F32Ge, float, >=) \
72 V(F64Add, double, +) \
73 V(F64Sub, double, -) \
74 V(F64Mul, double, *) \
75 V(F64Div, double, /) \
76 V(F64Eq, double, ==) \
77 V(F64Ne, double, !=) \
78 V(F64Lt, double, <) \
79 V(F64Le, double, <=) \
80 V(F64Gt, double, >) \
81 V(F64Ge, double, >=)
82
83 #define FOREACH_OTHER_BINOP(V) \
84 V(I32DivS, int32_t) \
85 V(I32DivU, uint32_t) \
86 V(I32RemS, int32_t) \
87 V(I32RemU, uint32_t) \
88 V(I32Shl, uint32_t) \
89 V(I32ShrU, uint32_t) \
90 V(I32ShrS, int32_t) \
91 V(I64DivS, int64_t) \
92 V(I64DivU, uint64_t) \
93 V(I64RemS, int64_t) \
94 V(I64RemU, uint64_t) \
95 V(I64Shl, uint64_t) \
96 V(I64ShrU, uint64_t) \
97 V(I64ShrS, int64_t) \
98 V(I32Ror, int32_t) \
99 V(I32Rol, int32_t) \
100 V(I64Ror, int64_t) \
101 V(I64Rol, int64_t) \
102 V(F32Min, float) \
103 V(F32Max, float) \
104 V(F32CopySign, float) \
105 V(F64Min, double) \
106 V(F64Max, double) \
107 V(F64CopySign, double) \
108 V(I32AsmjsDivS, int32_t) \
109 V(I32AsmjsDivU, uint32_t) \
110 V(I32AsmjsRemS, int32_t) \
111 V(I32AsmjsRemU, uint32_t)
112
113 #define FOREACH_OTHER_UNOP(V) \
114 V(I32Clz, uint32_t) \
115 V(I32Ctz, uint32_t) \
116 V(I32Popcnt, uint32_t) \
117 V(I32Eqz, uint32_t) \
118 V(I64Clz, uint64_t) \
119 V(I64Ctz, uint64_t) \
120 V(I64Popcnt, uint64_t) \
121 V(I64Eqz, uint64_t) \
122 V(F32Abs, float) \
123 V(F32Neg, float) \
124 V(F32Ceil, float) \
125 V(F32Floor, float) \
126 V(F32Trunc, float) \
127 V(F32NearestInt, float) \
128 V(F32Sqrt, float) \
129 V(F64Abs, double) \
130 V(F64Neg, double) \
131 V(F64Ceil, double) \
132 V(F64Floor, double) \
133 V(F64Trunc, double) \
134 V(F64NearestInt, double) \
135 V(F64Sqrt, double) \
136 V(I32SConvertF32, float) \
137 V(I32SConvertF64, double) \
138 V(I32UConvertF32, float) \
139 V(I32UConvertF64, double) \
140 V(I32ConvertI64, int64_t) \
141 V(I64SConvertF32, float) \
142 V(I64SConvertF64, double) \
143 V(I64UConvertF32, float) \
144 V(I64UConvertF64, double) \
145 V(I64SConvertI32, int32_t) \
146 V(I64UConvertI32, uint32_t) \
147 V(F32SConvertI32, int32_t) \
148 V(F32UConvertI32, uint32_t) \
149 V(F32SConvertI64, int64_t) \
150 V(F32UConvertI64, uint64_t) \
151 V(F32ConvertF64, double) \
152 V(F32ReinterpretI32, int32_t) \
153 V(F64SConvertI32, int32_t) \
154 V(F64UConvertI32, uint32_t) \
155 V(F64SConvertI64, int64_t) \
156 V(F64UConvertI64, uint64_t) \
157 V(F64ConvertF32, float) \
158 V(F64ReinterpretI64, int64_t) \
159 V(I32ReinterpretF32, float) \
160 V(I64ReinterpretF64, double) \
161 V(I32AsmjsSConvertF32, float) \
162 V(I32AsmjsUConvertF32, float) \
163 V(I32AsmjsSConvertF64, double) \
164 V(I32AsmjsUConvertF64, double)
165
166 static inline int32_t ExecuteI32DivS(int32_t a, int32_t b, TrapReason* trap) {
167 if (b == 0) {
168 *trap = kTrapDivByZero;
169 return 0;
170 }
171 if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
172 *trap = kTrapDivUnrepresentable;
173 return 0;
174 }
175 return a / b;
176 }
177
178 static inline uint32_t ExecuteI32DivU(uint32_t a, uint32_t b,
179 TrapReason* trap) {
180 if (b == 0) {
181 *trap = kTrapDivByZero;
182 return 0;
183 }
184 return a / b;
185 }
186
187 static inline int32_t ExecuteI32RemS(int32_t a, int32_t b, TrapReason* trap) {
188 if (b == 0) {
189 *trap = kTrapRemByZero;
190 return 0;
191 }
192 if (b == -1) return 0;
193 return a % b;
194 }
195
196 static inline uint32_t ExecuteI32RemU(uint32_t a, uint32_t b,
197 TrapReason* trap) {
198 if (b == 0) {
199 *trap = kTrapRemByZero;
200 return 0;
201 }
202 return a % b;
203 }
204
205 static inline uint32_t ExecuteI32Shl(uint32_t a, uint32_t b, TrapReason* trap) {
206 return a << (b & 0x1f);
207 }
208
209 static inline uint32_t ExecuteI32ShrU(uint32_t a, uint32_t b,
210 TrapReason* trap) {
211 return a >> (b & 0x1f);
212 }
213
214 static inline int32_t ExecuteI32ShrS(int32_t a, int32_t b, TrapReason* trap) {
215 return a >> (b & 0x1f);
216 }
217
218 static inline int64_t ExecuteI64DivS(int64_t a, int64_t b, TrapReason* trap) {
219 if (b == 0) {
220 *trap = kTrapDivByZero;
221 return 0;
222 }
223 if (b == -1 && a == std::numeric_limits<int64_t>::min()) {
224 *trap = kTrapDivUnrepresentable;
225 return 0;
226 }
227 return a / b;
228 }
229
230 static inline uint64_t ExecuteI64DivU(uint64_t a, uint64_t b,
231 TrapReason* trap) {
232 if (b == 0) {
233 *trap = kTrapDivByZero;
234 return 0;
235 }
236 return a / b;
237 }
238
239 static inline int64_t ExecuteI64RemS(int64_t a, int64_t b, TrapReason* trap) {
240 if (b == 0) {
241 *trap = kTrapRemByZero;
242 return 0;
243 }
244 if (b == -1) return 0;
245 return a % b;
246 }
247
248 static inline uint64_t ExecuteI64RemU(uint64_t a, uint64_t b,
249 TrapReason* trap) {
250 if (b == 0) {
251 *trap = kTrapRemByZero;
252 return 0;
253 }
254 return a % b;
255 }
256
257 static inline uint64_t ExecuteI64Shl(uint64_t a, uint64_t b, TrapReason* trap) {
258 return a << (b & 0x3f);
259 }
260
261 static inline uint64_t ExecuteI64ShrU(uint64_t a, uint64_t b,
262 TrapReason* trap) {
263 return a >> (b & 0x3f);
264 }
265
266 static inline int64_t ExecuteI64ShrS(int64_t a, int64_t b, TrapReason* trap) {
267 return a >> (b & 0x3f);
268 }
269
270 static inline uint32_t ExecuteI32Ror(uint32_t a, uint32_t b, TrapReason* trap) {
271 uint32_t shift = (b & 0x1f);
272 return (a >> shift) | (a << (32 - shift));
273 }
274
275 static inline uint32_t ExecuteI32Rol(uint32_t a, uint32_t b, TrapReason* trap) {
276 uint32_t shift = (b & 0x1f);
277 return (a << shift) | (a >> (32 - shift));
278 }
279
280 static inline uint64_t ExecuteI64Ror(uint64_t a, uint64_t b, TrapReason* trap) {
281 uint32_t shift = (b & 0x3f);
282 return (a >> shift) | (a << (64 - shift));
283 }
284
285 static inline uint64_t ExecuteI64Rol(uint64_t a, uint64_t b, TrapReason* trap) {
286 uint32_t shift = (b & 0x3f);
287 return (a << shift) | (a >> (64 - shift));
288 }
289
290 static float quiet(float a) {
291 static const uint32_t kSignalingBit = 1 << 22;
292 uint32_t q = bit_cast<uint32_t>(std::numeric_limits<float>::quiet_NaN());
293 if ((q & kSignalingBit) != 0) {
294 // On some machines, the signaling bit set indicates it's a quiet NaN.
295 return bit_cast<float>(bit_cast<uint32_t>(a) | kSignalingBit);
296 } else {
297 // On others, the signaling bit set indicates it's a signaling NaN.
298 return bit_cast<float>(bit_cast<uint32_t>(a) & ~kSignalingBit);
299 }
300 }
301
302 static double quiet(double a) {
303 static const uint64_t kSignalingBit = 1ULL << 51;
304 uint64_t q = bit_cast<uint64_t>(std::numeric_limits<double>::quiet_NaN());
305 if ((q & kSignalingBit) != 0) {
306 // On some machines, the signaling bit set indicates it's a quiet NaN.
307 return bit_cast<double>(bit_cast<uint64_t>(a) | kSignalingBit);
308 } else {
309 // On others, the signaling bit set indicates it's a signaling NaN.
310 return bit_cast<double>(bit_cast<uint64_t>(a) & ~kSignalingBit);
311 }
312 }
313
314 static inline float ExecuteF32Min(float a, float b, TrapReason* trap) {
315 if (std::isnan(a)) return quiet(a);
316 if (std::isnan(b)) return quiet(b);
317 return std::min(a, b);
318 }
319
320 static inline float ExecuteF32Max(float a, float b, TrapReason* trap) {
321 if (std::isnan(a)) return quiet(a);
322 if (std::isnan(b)) return quiet(b);
323 return std::max(a, b);
324 }
325
326 static inline float ExecuteF32CopySign(float a, float b, TrapReason* trap) {
327 return copysignf(a, b);
328 }
329
330 static inline double ExecuteF64Min(double a, double b, TrapReason* trap) {
331 if (std::isnan(a)) return quiet(a);
332 if (std::isnan(b)) return quiet(b);
333 return std::min(a, b);
334 }
335
336 static inline double ExecuteF64Max(double a, double b, TrapReason* trap) {
337 if (std::isnan(a)) return quiet(a);
338 if (std::isnan(b)) return quiet(b);
339 return std::max(a, b);
340 }
341
342 static inline double ExecuteF64CopySign(double a, double b, TrapReason* trap) {
343 return copysign(a, b);
344 }
345
346 static inline int32_t ExecuteI32AsmjsDivS(int32_t a, int32_t b,
347 TrapReason* trap) {
348 if (b == 0) return 0;
349 if (b == -1 && a == std::numeric_limits<int32_t>::min()) {
350 return std::numeric_limits<int32_t>::min();
351 }
352 return a / b;
353 }
354
355 static inline uint32_t ExecuteI32AsmjsDivU(uint32_t a, uint32_t b,
356 TrapReason* trap) {
357 if (b == 0) return 0;
358 return a / b;
359 }
360
361 static inline int32_t ExecuteI32AsmjsRemS(int32_t a, int32_t b,
362 TrapReason* trap) {
363 if (b == 0) return 0;
364 if (b == -1) return 0;
365 return a % b;
366 }
367
368 static inline uint32_t ExecuteI32AsmjsRemU(uint32_t a, uint32_t b,
369 TrapReason* trap) {
370 if (b == 0) return 0;
371 return a % b;
372 }
373
374 static inline int32_t ExecuteI32AsmjsSConvertF32(float a, TrapReason* trap) {
375 return DoubleToInt32(a);
376 }
377
378 static inline uint32_t ExecuteI32AsmjsUConvertF32(float a, TrapReason* trap) {
379 return DoubleToUint32(a);
380 }
381
382 static inline int32_t ExecuteI32AsmjsSConvertF64(double a, TrapReason* trap) {
383 return DoubleToInt32(a);
384 }
385
386 static inline uint32_t ExecuteI32AsmjsUConvertF64(double a, TrapReason* trap) {
387 return DoubleToUint32(a);
388 }
389
390 static int32_t ExecuteI32Clz(uint32_t val, TrapReason* trap) {
391 return base::bits::CountLeadingZeros32(val);
392 }
393
394 static uint32_t ExecuteI32Ctz(uint32_t val, TrapReason* trap) {
395 return base::bits::CountTrailingZeros32(val);
396 }
397
398 static uint32_t ExecuteI32Popcnt(uint32_t val, TrapReason* trap) {
399 return word32_popcnt_wrapper(&val);
400 }
401
402 static inline uint32_t ExecuteI32Eqz(uint32_t val, TrapReason* trap) {
403 return val == 0 ? 1 : 0;
404 }
405
406 static int64_t ExecuteI64Clz(uint64_t val, TrapReason* trap) {
407 return base::bits::CountLeadingZeros64(val);
408 }
409
410 static inline uint64_t ExecuteI64Ctz(uint64_t val, TrapReason* trap) {
411 return base::bits::CountTrailingZeros64(val);
412 }
413
414 static inline int64_t ExecuteI64Popcnt(uint64_t val, TrapReason* trap) {
415 return word64_popcnt_wrapper(&val);
416 }
417
418 static inline int32_t ExecuteI64Eqz(uint64_t val, TrapReason* trap) {
419 return val == 0 ? 1 : 0;
420 }
421
422 static inline float ExecuteF32Abs(float a, TrapReason* trap) {
423 return bit_cast<float>(bit_cast<uint32_t>(a) & 0x7fffffff);
424 }
425
426 static inline float ExecuteF32Neg(float a, TrapReason* trap) {
427 return bit_cast<float>(bit_cast<uint32_t>(a) ^ 0x80000000);
428 }
429
430 static inline float ExecuteF32Ceil(float a, TrapReason* trap) {
431 return ceilf(a);
432 }
433
434 static inline float ExecuteF32Floor(float a, TrapReason* trap) {
435 return floorf(a);
436 }
437
438 static inline float ExecuteF32Trunc(float a, TrapReason* trap) {
439 return truncf(a);
440 }
441
442 static inline float ExecuteF32NearestInt(float a, TrapReason* trap) {
443 return nearbyintf(a);
444 }
445
446 static inline float ExecuteF32Sqrt(float a, TrapReason* trap) {
447 return sqrtf(a);
448 }
449
450 static inline double ExecuteF64Abs(double a, TrapReason* trap) {
451 return bit_cast<double>(bit_cast<uint64_t>(a) & 0x7fffffffffffffff);
452 }
453
454 static inline double ExecuteF64Neg(double a, TrapReason* trap) {
455 return bit_cast<double>(bit_cast<uint64_t>(a) ^ 0x8000000000000000);
456 }
457
458 static inline double ExecuteF64Ceil(double a, TrapReason* trap) {
459 return ceil(a);
460 }
461
462 static inline double ExecuteF64Floor(double a, TrapReason* trap) {
463 return floor(a);
464 }
465
466 static inline double ExecuteF64Trunc(double a, TrapReason* trap) {
467 return trunc(a);
468 }
469
470 static inline double ExecuteF64NearestInt(double a, TrapReason* trap) {
471 return nearbyint(a);
472 }
473
474 static inline double ExecuteF64Sqrt(double a, TrapReason* trap) {
475 return sqrt(a);
476 }
477
478 static int32_t ExecuteI32SConvertF32(float a, TrapReason* trap) {
479 if (a < static_cast<float>(INT32_MAX) && a >= static_cast<float>(INT32_MIN)) {
480 return static_cast<int32_t>(a);
481 }
482 *trap = kTrapFloatUnrepresentable;
483 return 0;
484 }
485
486 static int32_t ExecuteI32SConvertF64(double a, TrapReason* trap) {
487 if (a < (static_cast<double>(INT32_MAX) + 1.0) &&
488 a > (static_cast<double>(INT32_MIN) - 1.0)) {
489 return static_cast<int32_t>(a);
490 }
491 *trap = kTrapFloatUnrepresentable;
492 return 0;
493 }
494
495 static uint32_t ExecuteI32UConvertF32(float a, TrapReason* trap) {
496 if (a < (static_cast<float>(UINT32_MAX) + 1.0) && a > -1) {
497 return static_cast<uint32_t>(a);
498 }
499 *trap = kTrapFloatUnrepresentable;
500 return 0;
501 }
502
503 static uint32_t ExecuteI32UConvertF64(double a, TrapReason* trap) {
504 if (a < (static_cast<float>(UINT32_MAX) + 1.0) && a > -1) {
505 return static_cast<uint32_t>(a);
506 }
507 *trap = kTrapFloatUnrepresentable;
508 return 0;
509 }
510
511 static inline uint32_t ExecuteI32ConvertI64(int64_t a, TrapReason* trap) {
512 return static_cast<uint32_t>(a & 0xFFFFFFFF);
513 }
514
515 static int64_t ExecuteI64SConvertF32(float a, TrapReason* trap) {
516 int64_t output;
517 if (!float32_to_int64_wrapper(&a, &output)) {
518 *trap = kTrapFloatUnrepresentable;
519 }
520 return output;
521 }
522
523 static int64_t ExecuteI64SConvertF64(double a, TrapReason* trap) {
524 int64_t output;
525 if (!float64_to_int64_wrapper(&a, &output)) {
526 *trap = kTrapFloatUnrepresentable;
527 }
528 return output;
529 }
530
531 static uint64_t ExecuteI64UConvertF32(float a, TrapReason* trap) {
532 uint64_t output;
533 if (!float32_to_uint64_wrapper(&a, &output)) {
534 *trap = kTrapFloatUnrepresentable;
535 }
536 return output;
537 }
538
539 static uint64_t ExecuteI64UConvertF64(double a, TrapReason* trap) {
540 uint64_t output;
541 if (!float64_to_uint64_wrapper(&a, &output)) {
542 *trap = kTrapFloatUnrepresentable;
543 }
544 return output;
545 }
546
547 static inline int64_t ExecuteI64SConvertI32(int32_t a, TrapReason* trap) {
548 return static_cast<int64_t>(a);
549 }
550
551 static inline int64_t ExecuteI64UConvertI32(uint32_t a, TrapReason* trap) {
552 return static_cast<uint64_t>(a);
553 }
554
555 static inline float ExecuteF32SConvertI32(int32_t a, TrapReason* trap) {
556 return static_cast<float>(a);
557 }
558
559 static inline float ExecuteF32UConvertI32(uint32_t a, TrapReason* trap) {
560 return static_cast<float>(a);
561 }
562
563 static inline float ExecuteF32SConvertI64(int64_t a, TrapReason* trap) {
564 float output;
565 int64_to_float32_wrapper(&a, &output);
566 return output;
567 }
568
569 static inline float ExecuteF32UConvertI64(uint64_t a, TrapReason* trap) {
570 float output;
571 uint64_to_float32_wrapper(&a, &output);
572 return output;
573 }
574
575 static inline float ExecuteF32ConvertF64(double a, TrapReason* trap) {
576 return static_cast<float>(a);
577 }
578
579 static inline float ExecuteF32ReinterpretI32(int32_t a, TrapReason* trap) {
580 return bit_cast<float>(a);
581 }
582
583 static inline double ExecuteF64SConvertI32(int32_t a, TrapReason* trap) {
584 return static_cast<double>(a);
585 }
586
587 static inline double ExecuteF64UConvertI32(uint32_t a, TrapReason* trap) {
588 return static_cast<double>(a);
589 }
590
591 static inline double ExecuteF64SConvertI64(int64_t a, TrapReason* trap) {
592 double output;
593 int64_to_float64_wrapper(&a, &output);
594 return output;
595 }
596
597 static inline double ExecuteF64UConvertI64(uint64_t a, TrapReason* trap) {
598 double output;
599 uint64_to_float64_wrapper(&a, &output);
600 return output;
601 }
602
603 static inline double ExecuteF64ConvertF32(float a, TrapReason* trap) {
604 return static_cast<double>(a);
605 }
606
607 static inline double ExecuteF64ReinterpretI64(int64_t a, TrapReason* trap) {
608 return bit_cast<double>(a);
609 }
610
611 static inline int32_t ExecuteI32ReinterpretF32(float a, TrapReason* trap) {
612 return bit_cast<int32_t>(a);
613 }
614
615 static inline int64_t ExecuteI64ReinterpretF64(double a, TrapReason* trap) {
616 return bit_cast<int64_t>(a);
617 }
618
619 enum InternalOpcode {
620 #define DECL_INTERNAL_ENUM(name, value) kInternal##name = value,
621 FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_ENUM)
622 #undef DECL_INTERNAL_ENUM
623 };
624
625 static const char* OpcodeName(uint32_t val) {
626 switch (val) {
627 #define DECL_INTERNAL_CASE(name, value) \
628 case kInternal##name: \
629 return "Internal" #name;
630 FOREACH_INTERNAL_OPCODE(DECL_INTERNAL_CASE)
631 #undef DECL_INTERNAL_CASE
632 }
633 return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
634 }
635
636 static const int kRunSteps = 1000;
637
638 // A helper class to compute the control transfers for each bytecode offset.
639 // Control transfers allow Br, BrIf, BrTable, If, Else, and End bytecodes to
640 // be directly executed without the need to dynamically track blocks.
641 class ControlTransfers : public ZoneObject {
642 public:
643 ControlTransferMap map_;
644
645 ControlTransfers(Zone* zone, size_t locals_encoded_size, const byte* start,
646 const byte* end)
647 : map_(zone) {
648 // A control reference including from PC, from value depth, and whether
649 // a value is explicitly passed (e.g. br/br_if/br_table with value).
650 struct CRef {
651 const byte* pc;
652 sp_t value_depth;
653 bool explicit_value;
654 };
655
656 // Represents a control flow label.
657 struct CLabel : public ZoneObject {
658 const byte* target;
659 size_t value_depth;
660 ZoneVector<CRef> refs;
661
662 CLabel(Zone* zone, size_t v)
663 : target(nullptr), value_depth(v), refs(zone) {}
664
665 // Bind this label to the given PC.
666 void Bind(ControlTransferMap* map, const byte* start, const byte* pc,
667 bool expect_value) {
668 DCHECK_NULL(target);
669 target = pc;
670 for (auto from : refs) {
671 auto pcdiff = static_cast<pcdiff_t>(target - from.pc);
672 auto spdiff = static_cast<spdiff_t>(from.value_depth - value_depth);
673 ControlTransfer::StackAction action = ControlTransfer::kNoAction;
674 if (expect_value && !from.explicit_value) {
675 action = spdiff == 0 ? ControlTransfer::kPushVoid
676 : ControlTransfer::kPopAndRepush;
677 }
678 pc_t offset = static_cast<size_t>(from.pc - start);
679 (*map)[offset] = {pcdiff, spdiff, action};
680 }
681 }
682
683 // Reference this label from the given location.
684 void Ref(ControlTransferMap* map, const byte* start, CRef from) {
685 DCHECK_GE(from.value_depth, value_depth);
686 if (target) {
687 auto pcdiff = static_cast<pcdiff_t>(target - from.pc);
688 auto spdiff = static_cast<spdiff_t>(from.value_depth - value_depth);
689 pc_t offset = static_cast<size_t>(from.pc - start);
690 (*map)[offset] = {pcdiff, spdiff, ControlTransfer::kNoAction};
691 } else {
692 refs.push_back(from);
693 }
694 }
695 };
696
697 // An entry in the control stack.
698 struct Control {
699 const byte* pc;
700 CLabel* end_label;
701 CLabel* else_label;
702
703 void Ref(ControlTransferMap* map, const byte* start, const byte* from_pc,
704 size_t from_value_depth, bool explicit_value) {
705 end_label->Ref(map, start, {from_pc, from_value_depth, explicit_value});
706 }
707 };
708
709 // Compute the ControlTransfer map.
710 // This works by maintaining a stack of control constructs similar to the
711 // AST decoder. The {control_stack} allows matching {br,br_if,br_table}
712 // bytecodes with their target, as well as determining whether the current
713 // bytecodes are within the true or false block of an else.
714 // The value stack depth is tracked as {value_depth} and is needed to
715 // determine how many values to pop off the stack for explicit and
716 // implicit control flow.
717
718 std::vector<Control> control_stack;
719 size_t value_depth = 0;
720 Decoder decoder(start, end); // for reading operands.
721 const byte* pc = start + locals_encoded_size;
722
723 while (pc < end) {
724 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
725 TRACE("@%td: control %s (depth = %zu)\n", (pc - start),
726 WasmOpcodes::OpcodeName(opcode), value_depth);
727 switch (opcode) {
728 case kExprBlock: {
729 TRACE("control @%td $%zu: Block\n", (pc - start), value_depth);
730 CLabel* label = new (zone) CLabel(zone, value_depth);
731 control_stack.push_back({pc, label, nullptr});
732 break;
733 }
734 case kExprLoop: {
735 TRACE("control @%td $%zu: Loop\n", (pc - start), value_depth);
736 CLabel* label1 = new (zone) CLabel(zone, value_depth);
737 CLabel* label2 = new (zone) CLabel(zone, value_depth);
738 control_stack.push_back({pc, label1, nullptr});
739 control_stack.push_back({pc, label2, nullptr});
740 label2->Bind(&map_, start, pc, false);
741 break;
742 }
743 case kExprIf: {
744 TRACE("control @%td $%zu: If\n", (pc - start), value_depth);
745 value_depth--;
746 CLabel* end_label = new (zone) CLabel(zone, value_depth);
747 CLabel* else_label = new (zone) CLabel(zone, value_depth);
748 control_stack.push_back({pc, end_label, else_label});
749 else_label->Ref(&map_, start, {pc, value_depth, false});
750 break;
751 }
752 case kExprElse: {
753 Control* c = &control_stack.back();
754 TRACE("control @%td $%zu: Else\n", (pc - start), value_depth);
755 c->end_label->Ref(&map_, start, {pc, value_depth, false});
756 value_depth = c->end_label->value_depth;
757 DCHECK_NOT_NULL(c->else_label);
758 c->else_label->Bind(&map_, start, pc + 1, false);
759 c->else_label = nullptr;
760 break;
761 }
762 case kExprEnd: {
763 Control* c = &control_stack.back();
764 TRACE("control @%td $%zu: End\n", (pc - start), value_depth);
765 if (c->end_label->target) {
766 // only loops have bound labels.
767 DCHECK_EQ(kExprLoop, *c->pc);
768 control_stack.pop_back();
769 c = &control_stack.back();
770 }
771 if (c->else_label) c->else_label->Bind(&map_, start, pc + 1, true);
772 c->end_label->Ref(&map_, start, {pc, value_depth, false});
773 c->end_label->Bind(&map_, start, pc + 1, true);
774 value_depth = c->end_label->value_depth + 1;
775 control_stack.pop_back();
776 break;
777 }
778 case kExprBr: {
779 BreakDepthOperand operand(&decoder, pc);
780 TRACE("control @%td $%zu: Br[arity=%u, depth=%u]\n", (pc - start),
781 value_depth, operand.arity, operand.depth);
782 value_depth -= operand.arity;
783 control_stack[control_stack.size() - operand.depth - 1].Ref(
784 &map_, start, pc, value_depth, operand.arity > 0);
785 value_depth++;
786 break;
787 }
788 case kExprBrIf: {
789 BreakDepthOperand operand(&decoder, pc);
790 TRACE("control @%td $%zu: BrIf[arity=%u, depth=%u]\n", (pc - start),
791 value_depth, operand.arity, operand.depth);
792 value_depth -= (operand.arity + 1);
793 control_stack[control_stack.size() - operand.depth - 1].Ref(
794 &map_, start, pc, value_depth, operand.arity > 0);
795 value_depth++;
796 break;
797 }
798 case kExprBrTable: {
799 BranchTableOperand operand(&decoder, pc);
800 TRACE("control @%td $%zu: BrTable[arity=%u count=%u]\n", (pc - start),
801 value_depth, operand.arity, operand.table_count);
802 value_depth -= (operand.arity + 1);
803 for (uint32_t i = 0; i < operand.table_count + 1; i++) {
804 uint32_t target = operand.read_entry(&decoder, i);
805 control_stack[control_stack.size() - target - 1].Ref(
806 &map_, start, pc + i, value_depth, operand.arity > 0);
807 }
808 value_depth++;
809 break;
810 }
811 default: {
812 value_depth = value_depth - OpcodeArity(pc, end) + 1;
813 break;
814 }
815 }
816
817 pc += OpcodeLength(pc, end);
818 }
819 }
820
821 ControlTransfer Lookup(pc_t from) {
822 auto result = map_.find(from);
823 if (result == map_.end()) {
824 V8_Fatal(__FILE__, __LINE__, "no control target for pc %zu", from);
825 }
826 return result->second;
827 }
828 };
829
830 // Code and metadata needed to execute a function.
831 struct InterpreterCode {
832 const WasmFunction* function; // wasm function
833 AstLocalDecls locals; // local declarations
834 const byte* orig_start; // start of original code
835 const byte* orig_end; // end of original code
836 byte* start; // start of (maybe altered) code
837 byte* end; // end of (maybe altered) code
838 ControlTransfers* targets; // helper for control flow.
839
840 const byte* at(pc_t pc) { return start + pc; }
841 };
842
843 // The main storage for interpreter code. It maps {WasmFunction} to the
844 // metadata needed to execute each function.
845 class CodeMap {
846 public:
847 Zone* zone_;
848 const WasmModule* module_;
849 ZoneVector<InterpreterCode> interpreter_code_;
850
851 CodeMap(const WasmModule* module, Zone* zone)
852 : zone_(zone), module_(module), interpreter_code_(zone) {
853 if (module == nullptr) return;
854 for (size_t i = 0; i < module->functions.size(); i++) {
855 const WasmFunction* function = &module->functions[i];
856 const byte* code_start =
857 module->module_start + function->code_start_offset;
858 const byte* code_end = module->module_start + function->code_end_offset;
859 AddFunction(function, code_start, code_end);
860 }
861 }
862
863 InterpreterCode* FindCode(const WasmFunction* function) {
864 if (function->func_index < interpreter_code_.size()) {
865 InterpreterCode* code = &interpreter_code_[function->func_index];
866 DCHECK_EQ(function, code->function);
867 return code;
868 }
869 return nullptr;
870 }
871
872 InterpreterCode* GetCode(uint32_t function_index) {
873 CHECK_LT(function_index, interpreter_code_.size());
874 return Preprocess(&interpreter_code_[function_index]);
875 }
876
877 InterpreterCode* GetIndirectCode(uint32_t indirect_index) {
878 if (indirect_index >= module_->function_table.size()) return nullptr;
879 uint32_t index = module_->function_table[indirect_index];
880 if (index >= interpreter_code_.size()) return nullptr;
881 return GetCode(index);
882 }
883
884 InterpreterCode* Preprocess(InterpreterCode* code) {
885 if (code->targets == nullptr && code->start) {
886 // Compute the control targets map and the local declarations.
887 CHECK(DecodeLocalDecls(code->locals, code->start, code->end));
888 code->targets =
889 new (zone_) ControlTransfers(zone_, code->locals.decls_encoded_size,
890 code->orig_start, code->orig_end);
891 }
892 return code;
893 }
894
895 int AddFunction(const WasmFunction* function, const byte* code_start,
896 const byte* code_end) {
897 InterpreterCode code = {
898 function, AstLocalDecls(zone_), code_start,
899 code_end, const_cast<byte*>(code_start), const_cast<byte*>(code_end),
900 nullptr};
901
902 DCHECK_EQ(interpreter_code_.size(), function->func_index);
903 interpreter_code_.push_back(code);
904 return static_cast<int>(interpreter_code_.size()) - 1;
905 }
906
907 bool SetFunctionCode(const WasmFunction* function, const byte* start,
908 const byte* end) {
909 InterpreterCode* code = FindCode(function);
910 if (code == nullptr) return false;
911 code->targets = nullptr;
912 code->orig_start = start;
913 code->orig_end = end;
914 code->start = const_cast<byte*>(start);
915 code->end = const_cast<byte*>(end);
916 Preprocess(code);
917 return true;
918 }
919 };
920
921 // Responsible for executing code directly.
922 class ThreadImpl : public WasmInterpreter::Thread {
923 public:
924 ThreadImpl(Zone* zone, CodeMap* codemap, WasmModuleInstance* instance)
925 : codemap_(codemap),
926 instance_(instance),
927 stack_(zone),
928 frames_(zone),
929 state_(WasmInterpreter::STOPPED),
930 trap_reason_(kTrapCount) {}
931
932 virtual ~ThreadImpl() {}
933
934 //==========================================================================
935 // Implementation of public interface for WasmInterpreter::Thread.
936 //==========================================================================
937
938 virtual WasmInterpreter::State state() { return state_; }
939
940 virtual void PushFrame(const WasmFunction* function, WasmVal* args) {
941 InterpreterCode* code = codemap()->FindCode(function);
942 CHECK_NOT_NULL(code);
943 frames_.push_back({code, 0, 0, stack_.size()});
944 for (size_t i = 0; i < function->sig->parameter_count(); i++) {
945 stack_.push_back(args[i]);
946 }
947 frames_.back().ret_pc = InitLocals(code);
948 TRACE(" => push func#%u @%zu\n", code->function->func_index,
949 frames_.back().ret_pc);
950 }
951
952 virtual WasmInterpreter::State Run() {
953 do {
954 if (state_ == WasmInterpreter::STOPPED ||
955 state_ == WasmInterpreter::PAUSED) {
956 state_ = WasmInterpreter::RUNNING;
957 Execute(frames_.back().code, frames_.back().ret_pc, kRunSteps);
958 }
959 } while (state_ == WasmInterpreter::STOPPED);
960 return state_;
961 }
962
963 virtual WasmInterpreter::State Step() {
964 UNIMPLEMENTED();
965 return WasmInterpreter::STOPPED;
966 }
967
968 virtual void Pause() { UNIMPLEMENTED(); }
969
970 virtual void Reset() {
971 TRACE("----- RESET -----\n");
972 stack_.clear();
973 frames_.clear();
974 state_ = WasmInterpreter::STOPPED;
975 trap_reason_ = kTrapCount;
976 }
977
978 virtual int GetFrameCount() { return static_cast<int>(frames_.size()); }
979
980 virtual const WasmFrame* GetFrame(int index) {
981 UNIMPLEMENTED();
982 return nullptr;
983 }
984
985 virtual WasmFrame* GetMutableFrame(int index) {
986 UNIMPLEMENTED();
987 return nullptr;
988 }
989
990 virtual WasmVal GetReturnValue() {
991 if (state_ == WasmInterpreter::TRAPPED) return WasmVal(0xdeadbeef);
992 CHECK_EQ(WasmInterpreter::FINISHED, state_);
993 CHECK_EQ(1, stack_.size());
994 return stack_[0];
995 }
996
997 bool Terminated() {
998 return state_ == WasmInterpreter::TRAPPED ||
999 state_ == WasmInterpreter::FINISHED;
1000 }
1001
1002 private:
1003 // Entries on the stack of functions being evaluated.
1004 struct Frame {
1005 InterpreterCode* code;
1006 pc_t call_pc;
1007 pc_t ret_pc;
1008 sp_t sp;
1009
1010 // Limit of parameters.
1011 sp_t plimit() { return sp + code->function->sig->parameter_count(); }
1012 // Limit of locals.
1013 sp_t llimit() { return plimit() + code->locals.total_local_count; }
1014 };
1015
1016 CodeMap* codemap_;
1017 WasmModuleInstance* instance_;
1018 ZoneVector<WasmVal> stack_;
1019 ZoneVector<Frame> frames_;
1020 WasmInterpreter::State state_;
1021 TrapReason trap_reason_;
1022
1023 CodeMap* codemap() { return codemap_; }
1024 WasmModuleInstance* instance() { return instance_; }
1025 const WasmModule* module() { return instance_->module; }
1026
1027 void DoTrap(TrapReason trap, pc_t pc) {
1028 state_ = WasmInterpreter::TRAPPED;
1029 trap_reason_ = trap;
1030 CommitPc(pc);
1031 }
1032
1033 // Push a frame with arguments already on the stack.
1034 void PushFrame(InterpreterCode* code, pc_t call_pc, pc_t ret_pc) {
1035 CHECK_NOT_NULL(code);
1036 DCHECK(!frames_.empty());
1037 frames_.back().call_pc = call_pc;
1038 frames_.back().ret_pc = ret_pc;
1039 size_t arity = code->function->sig->parameter_count();
1040 DCHECK_GE(stack_.size(), arity);
1041 // The parameters will overlap the arguments already on the stack.
1042 frames_.push_back({code, 0, 0, stack_.size() - arity});
1043 frames_.back().ret_pc = InitLocals(code);
1044 TRACE(" => push func#%u @%zu\n", code->function->func_index,
1045 frames_.back().ret_pc);
1046 }
1047
1048 pc_t InitLocals(InterpreterCode* code) {
1049 for (auto p : code->locals.local_types) {
1050 WasmVal val;
1051 switch (p.first) {
1052 case kAstI32:
1053 val = WasmVal(static_cast<int32_t>(0));
1054 break;
1055 case kAstI64:
1056 val = WasmVal(static_cast<int64_t>(0));
1057 break;
1058 case kAstF32:
1059 val = WasmVal(static_cast<float>(0));
1060 break;
1061 case kAstF64:
1062 val = WasmVal(static_cast<double>(0));
1063 break;
1064 default:
1065 UNREACHABLE();
1066 break;
1067 }
1068 stack_.insert(stack_.end(), p.second, val);
1069 }
1070 return code->locals.decls_encoded_size;
1071 }
1072
1073 void CommitPc(pc_t pc) {
1074 if (!frames_.empty()) {
1075 frames_.back().ret_pc = pc;
1076 }
1077 }
1078
1079 bool SkipBreakpoint(InterpreterCode* code, pc_t pc) {
1080 // TODO(titzer): skip a breakpoint if we are resuming from it, or it
1081 // is set for another thread only.
1082 return false;
1083 }
1084
1085 bool DoReturn(InterpreterCode** code, pc_t* pc, pc_t* limit, WasmVal val) {
1086 DCHECK_GT(frames_.size(), 0u);
1087 stack_.resize(frames_.back().sp);
1088 frames_.pop_back();
1089 if (frames_.size() == 0) {
1090 // A return from the top frame terminates the execution.
1091 state_ = WasmInterpreter::FINISHED;
1092 stack_.clear();
1093 stack_.push_back(val);
1094 TRACE(" => finish\n");
1095 return false;
1096 } else {
1097 // Return to caller frame.
1098 Frame* top = &frames_.back();
1099 *code = top->code;
1100 *pc = top->ret_pc;
1101 *limit = top->code->end - top->code->start;
1102 if (top->code->start[top->call_pc] == kExprCallIndirect ||
1103 (top->code->orig_start &&
1104 top->code->orig_start[top->call_pc] == kExprCallIndirect)) {
1105 // UGLY: An indirect call has the additional function index on the
1106 // stack.
1107 stack_.pop_back();
1108 }
1109 TRACE(" => pop func#%u @%zu\n", (*code)->function->func_index, *pc);
1110
1111 stack_.push_back(val);
1112 return true;
1113 }
1114 }
1115
1116 void DoCall(InterpreterCode* target, pc_t* pc, pc_t ret_pc, pc_t* limit) {
1117 PushFrame(target, *pc, ret_pc);
1118 *pc = frames_.back().ret_pc;
1119 *limit = target->end - target->start;
1120 }
1121
1122 // Adjust the program counter {pc} and the stack contents according to the
1123 // code's precomputed control transfer map. Returns the different between
1124 // the new pc and the old pc.
1125 int DoControlTransfer(InterpreterCode* code, pc_t pc) {
1126 auto target = code->targets->Lookup(pc);
1127 switch (target.action) {
1128 case ControlTransfer::kNoAction:
1129 TRACE(" action [sp-%u]\n", target.spdiff);
1130 PopN(target.spdiff);
1131 break;
1132 case ControlTransfer::kPopAndRepush: {
1133 WasmVal val = Pop();
1134 TRACE(" action [pop x, sp-%u, push x]\n", target.spdiff - 1);
1135 DCHECK_GE(target.spdiff, 1u);
1136 PopN(target.spdiff - 1);
1137 Push(pc, val);
1138 break;
1139 }
1140 case ControlTransfer::kPushVoid:
1141 TRACE(" action [sp-%u, push void]\n", target.spdiff);
1142 PopN(target.spdiff);
1143 Push(pc, WasmVal());
1144 break;
1145 }
1146 return target.pcdiff;
1147 }
1148
1149 void Execute(InterpreterCode* code, pc_t pc, int max) {
1150 Decoder decoder(code->start, code->end);
1151 pc_t limit = code->end - code->start;
1152 while (true) {
1153 if (max-- <= 0) {
1154 // Maximum number of instructions reached.
1155 state_ = WasmInterpreter::PAUSED;
1156 return CommitPc(pc);
1157 }
1158
1159 if (pc >= limit) {
1160 // Fell off end of code; do an implicit return.
1161 TRACE("@%-3zu: ImplicitReturn\n", pc);
1162 WasmVal val = PopArity(code->function->sig->return_count());
1163 if (!DoReturn(&code, &pc, &limit, val)) return;
1164 decoder.Reset(code->start, code->end);
1165 continue;
1166 }
1167
1168 const char* skip = "";
1169 int len = 1;
1170 byte opcode = code->start[pc];
1171 byte orig = opcode;
1172 if (opcode == kInternalBreakpoint) {
1173 if (SkipBreakpoint(code, pc)) {
1174 // skip breakpoint by switching on original code.
1175 orig = code->orig_start[pc];
1176 skip = "[skip] ";
1177 } else {
1178 state_ = WasmInterpreter::PAUSED;
1179 return CommitPc(pc);
1180 }
1181 }
1182
1183 USE(skip);
1184 TRACE("@%-3zu: %s%-24s:", pc, skip,
1185 WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(orig)));
1186 TraceValueStack();
1187 TRACE("\n");
1188
1189 switch (orig) {
1190 case kExprNop:
1191 Push(pc, WasmVal());
1192 break;
1193 case kExprBlock:
1194 case kExprLoop: {
1195 // Do nothing.
1196 break;
1197 }
1198 case kExprIf: {
1199 WasmVal cond = Pop();
1200 bool is_true = cond.to<uint32_t>() != 0;
1201 if (is_true) {
1202 // fall through to the true block.
1203 TRACE(" true => fallthrough\n");
1204 } else {
1205 len = DoControlTransfer(code, pc);
1206 TRACE(" false => @%zu\n", pc + len);
1207 }
1208 break;
1209 }
1210 case kExprElse: {
1211 len = DoControlTransfer(code, pc);
1212 TRACE(" end => @%zu\n", pc + len);
1213 break;
1214 }
1215 case kExprSelect: {
1216 WasmVal cond = Pop();
1217 WasmVal fval = Pop();
1218 WasmVal tval = Pop();
1219 Push(pc, cond.to<int32_t>() != 0 ? tval : fval);
1220 break;
1221 }
1222 case kExprBr: {
1223 BreakDepthOperand operand(&decoder, code->at(pc));
1224 WasmVal val = PopArity(operand.arity);
1225 len = DoControlTransfer(code, pc);
1226 TRACE(" br => @%zu\n", pc + len);
1227 if (operand.arity > 0) Push(pc, val);
1228 break;
1229 }
1230 case kExprBrIf: {
1231 BreakDepthOperand operand(&decoder, code->at(pc));
1232 WasmVal cond = Pop();
1233 WasmVal val = PopArity(operand.arity);
1234 bool is_true = cond.to<uint32_t>() != 0;
1235 if (is_true) {
1236 len = DoControlTransfer(code, pc);
1237 TRACE(" br_if => @%zu\n", pc + len);
1238 if (operand.arity > 0) Push(pc, val);
1239 } else {
1240 TRACE(" false => fallthrough\n");
1241 len = 1 + operand.length;
1242 Push(pc, WasmVal());
1243 }
1244 break;
1245 }
1246 case kExprBrTable: {
1247 BranchTableOperand operand(&decoder, code->at(pc));
1248 uint32_t key = Pop().to<uint32_t>();
1249 WasmVal val = PopArity(operand.arity);
1250 if (key >= operand.table_count) key = operand.table_count;
1251 len = DoControlTransfer(code, pc + key) + key;
1252 TRACE(" br[%u] => @%zu\n", key, pc + len);
1253 if (operand.arity > 0) Push(pc, val);
1254 break;
1255 }
1256 case kExprReturn: {
1257 ReturnArityOperand operand(&decoder, code->at(pc));
1258 WasmVal val = PopArity(operand.arity);
1259 if (!DoReturn(&code, &pc, &limit, val)) return;
1260 decoder.Reset(code->start, code->end);
1261 continue;
1262 }
1263 case kExprUnreachable: {
1264 DoTrap(kTrapUnreachable, pc);
1265 return CommitPc(pc);
1266 }
1267 case kExprEnd: {
1268 len = DoControlTransfer(code, pc);
1269 DCHECK_EQ(1, len);
1270 break;
1271 }
1272 case kExprI8Const: {
1273 ImmI8Operand operand(&decoder, code->at(pc));
1274 Push(pc, WasmVal(operand.value));
1275 len = 1 + operand.length;
1276 break;
1277 }
1278 case kExprI32Const: {
1279 ImmI32Operand operand(&decoder, code->at(pc));
1280 Push(pc, WasmVal(operand.value));
1281 len = 1 + operand.length;
1282 break;
1283 }
1284 case kExprI64Const: {
1285 ImmI64Operand operand(&decoder, code->at(pc));
1286 Push(pc, WasmVal(operand.value));
1287 len = 1 + operand.length;
1288 break;
1289 }
1290 case kExprF32Const: {
1291 ImmF32Operand operand(&decoder, code->at(pc));
1292 Push(pc, WasmVal(operand.value));
1293 len = 1 + operand.length;
1294 break;
1295 }
1296 case kExprF64Const: {
1297 ImmF64Operand operand(&decoder, code->at(pc));
1298 Push(pc, WasmVal(operand.value));
1299 len = 1 + operand.length;
1300 break;
1301 }
1302 case kExprGetLocal: {
1303 LocalIndexOperand operand(&decoder, code->at(pc));
1304 Push(pc, stack_[frames_.back().sp + operand.index]);
1305 len = 1 + operand.length;
1306 break;
1307 }
1308 case kExprSetLocal: {
1309 LocalIndexOperand operand(&decoder, code->at(pc));
1310 WasmVal val = Pop();
1311 stack_[frames_.back().sp + operand.index] = val;
1312 Push(pc, val);
1313 len = 1 + operand.length;
1314 break;
1315 }
1316 case kExprCallFunction: {
1317 CallFunctionOperand operand(&decoder, code->at(pc));
1318 InterpreterCode* target = codemap()->GetCode(operand.index);
1319 DoCall(target, &pc, pc + 1 + operand.length, &limit);
1320 code = target;
1321 decoder.Reset(code->start, code->end);
1322 continue;
1323 }
1324 case kExprCallIndirect: {
1325 CallIndirectOperand operand(&decoder, code->at(pc));
1326 size_t index = stack_.size() - operand.arity - 1;
1327 DCHECK_LT(index, stack_.size());
1328 uint32_t table_index = stack_[index].to<uint32_t>();
1329 if (table_index >= module()->function_table.size()) {
1330 return DoTrap(kTrapFuncInvalid, pc);
1331 }
1332 uint16_t function_index = module()->function_table[table_index];
1333 InterpreterCode* target = codemap()->GetCode(function_index);
1334 DCHECK(target);
1335 if (target->function->sig_index != operand.index) {
1336 return DoTrap(kTrapFuncSigMismatch, pc);
1337 }
1338
1339 DoCall(target, &pc, pc + 1 + operand.length, &limit);
1340 code = target;
1341 decoder.Reset(code->start, code->end);
1342 continue;
1343 }
1344 case kExprCallImport: {
1345 UNIMPLEMENTED();
1346 break;
1347 }
1348 case kExprLoadGlobal: {
1349 GlobalIndexOperand operand(&decoder, code->at(pc));
1350 const WasmGlobal* global = &module()->globals[operand.index];
1351 byte* ptr = instance()->globals_start + global->offset;
1352 MachineType type = global->type;
1353 WasmVal val;
1354 if (type == MachineType::Int8()) {
1355 val =
1356 WasmVal(static_cast<int32_t>(*reinterpret_cast<int8_t*>(ptr)));
1357 } else if (type == MachineType::Uint8()) {
1358 val =
1359 WasmVal(static_cast<int32_t>(*reinterpret_cast<uint8_t*>(ptr)));
1360 } else if (type == MachineType::Int16()) {
1361 val =
1362 WasmVal(static_cast<int32_t>(*reinterpret_cast<int16_t*>(ptr)));
1363 } else if (type == MachineType::Uint16()) {
1364 val = WasmVal(
1365 static_cast<int32_t>(*reinterpret_cast<uint16_t*>(ptr)));
1366 } else if (type == MachineType::Int32()) {
1367 val = WasmVal(*reinterpret_cast<int32_t*>(ptr));
1368 } else if (type == MachineType::Uint32()) {
1369 val = WasmVal(*reinterpret_cast<uint32_t*>(ptr));
1370 } else if (type == MachineType::Int64()) {
1371 val = WasmVal(*reinterpret_cast<int64_t*>(ptr));
1372 } else if (type == MachineType::Uint64()) {
1373 val = WasmVal(*reinterpret_cast<uint64_t*>(ptr));
1374 } else if (type == MachineType::Float32()) {
1375 val = WasmVal(*reinterpret_cast<float*>(ptr));
1376 } else if (type == MachineType::Float64()) {
1377 val = WasmVal(*reinterpret_cast<double*>(ptr));
1378 } else {
1379 UNREACHABLE();
1380 }
1381 Push(pc, val);
1382 len = 1 + operand.length;
1383 break;
1384 }
1385 case kExprStoreGlobal: {
1386 GlobalIndexOperand operand(&decoder, code->at(pc));
1387 const WasmGlobal* global = &module()->globals[operand.index];
1388 byte* ptr = instance()->globals_start + global->offset;
1389 MachineType type = global->type;
1390 WasmVal val = Pop();
1391 if (type == MachineType::Int8()) {
1392 *reinterpret_cast<int8_t*>(ptr) =
1393 static_cast<int8_t>(val.to<int32_t>());
1394 } else if (type == MachineType::Uint8()) {
1395 *reinterpret_cast<uint8_t*>(ptr) =
1396 static_cast<uint8_t>(val.to<uint32_t>());
1397 } else if (type == MachineType::Int16()) {
1398 *reinterpret_cast<int16_t*>(ptr) =
1399 static_cast<int16_t>(val.to<int32_t>());
1400 } else if (type == MachineType::Uint16()) {
1401 *reinterpret_cast<uint16_t*>(ptr) =
1402 static_cast<uint16_t>(val.to<uint32_t>());
1403 } else if (type == MachineType::Int32()) {
1404 *reinterpret_cast<int32_t*>(ptr) = val.to<int32_t>();
1405 } else if (type == MachineType::Uint32()) {
1406 *reinterpret_cast<uint32_t*>(ptr) = val.to<uint32_t>();
1407 } else if (type == MachineType::Int64()) {
1408 *reinterpret_cast<int64_t*>(ptr) = val.to<int64_t>();
1409 } else if (type == MachineType::Uint64()) {
1410 *reinterpret_cast<uint64_t*>(ptr) = val.to<uint64_t>();
1411 } else if (type == MachineType::Float32()) {
1412 *reinterpret_cast<float*>(ptr) = val.to<float>();
1413 } else if (type == MachineType::Float64()) {
1414 *reinterpret_cast<double*>(ptr) = val.to<double>();
1415 } else {
1416 UNREACHABLE();
1417 }
1418 Push(pc, val);
1419 len = 1 + operand.length;
1420 break;
1421 }
1422
1423 #define LOAD_CASE(name, ctype, mtype) \
1424 case kExpr##name: { \
1425 MemoryAccessOperand operand(&decoder, code->at(pc)); \
1426 uint32_t index = Pop().to<uint32_t>(); \
1427 size_t effective_mem_size = instance()->mem_size - sizeof(mtype); \
1428 if (operand.offset > effective_mem_size || \
1429 index > (effective_mem_size - operand.offset)) { \
1430 return DoTrap(kTrapMemOutOfBounds, pc); \
1431 } \
1432 byte* addr = instance()->mem_start + operand.offset + index; \
1433 /* TODO(titzer): alignment, endianness for load mem */ \
1434 WasmVal result(static_cast<ctype>(*reinterpret_cast<mtype*>(addr))); \
1435 Push(pc, result); \
1436 len = 1 + operand.length; \
1437 break; \
1438 }
1439
1440 LOAD_CASE(I32LoadMem8S, int32_t, int8_t);
1441 LOAD_CASE(I32LoadMem8U, int32_t, uint8_t);
1442 LOAD_CASE(I32LoadMem16S, int32_t, int16_t);
1443 LOAD_CASE(I32LoadMem16U, int32_t, uint16_t);
1444 LOAD_CASE(I64LoadMem8S, int64_t, int8_t);
1445 LOAD_CASE(I64LoadMem8U, int64_t, uint8_t);
1446 LOAD_CASE(I64LoadMem16S, int64_t, int16_t);
1447 LOAD_CASE(I64LoadMem16U, int64_t, uint16_t);
1448 LOAD_CASE(I64LoadMem32S, int64_t, int32_t);
1449 LOAD_CASE(I64LoadMem32U, int64_t, uint32_t);
1450 LOAD_CASE(I32LoadMem, int32_t, int32_t);
1451 LOAD_CASE(I64LoadMem, int64_t, int64_t);
1452 LOAD_CASE(F32LoadMem, float, float);
1453 LOAD_CASE(F64LoadMem, double, double);
1454 #undef LOAD_CASE
1455
1456 #define STORE_CASE(name, ctype, mtype) \
1457 case kExpr##name: { \
1458 MemoryAccessOperand operand(&decoder, code->at(pc)); \
1459 WasmVal val = Pop(); \
1460 uint32_t index = Pop().to<uint32_t>(); \
1461 size_t effective_mem_size = instance()->mem_size - sizeof(mtype); \
1462 if (operand.offset > effective_mem_size || \
1463 index > (effective_mem_size - operand.offset)) { \
1464 return DoTrap(kTrapMemOutOfBounds, pc); \
1465 } \
1466 byte* addr = instance()->mem_start + operand.offset + index; \
1467 /* TODO(titzer): alignment, endianness for store mem */ \
1468 *reinterpret_cast<mtype*>(addr) = static_cast<mtype>(val.to<ctype>()); \
1469 Push(pc, val); \
1470 len = 1 + operand.length; \
1471 break; \
1472 }
1473
1474 STORE_CASE(I32StoreMem8, int32_t, int8_t);
1475 STORE_CASE(I32StoreMem16, int32_t, int16_t);
1476 STORE_CASE(I64StoreMem8, int64_t, int8_t);
1477 STORE_CASE(I64StoreMem16, int64_t, int16_t);
1478 STORE_CASE(I64StoreMem32, int64_t, int32_t);
1479 STORE_CASE(I32StoreMem, int32_t, int32_t);
1480 STORE_CASE(I64StoreMem, int64_t, int64_t);
1481 STORE_CASE(F32StoreMem, float, float);
1482 STORE_CASE(F64StoreMem, double, double);
1483 #undef STORE_CASE
1484
1485 #define ASMJS_LOAD_CASE(name, ctype, mtype, defval) \
1486 case kExpr##name: { \
1487 uint32_t index = Pop().to<uint32_t>(); \
1488 ctype result; \
1489 if (index >= (instance()->mem_size - sizeof(mtype))) { \
1490 result = defval; \
1491 } else { \
1492 byte* addr = instance()->mem_start + index; \
1493 /* TODO(titzer): alignment for asmjs load mem? */ \
1494 result = static_cast<ctype>(*reinterpret_cast<mtype*>(addr)); \
1495 } \
1496 Push(pc, WasmVal(result)); \
1497 break; \
1498 }
1499 ASMJS_LOAD_CASE(I32AsmjsLoadMem8S, int32_t, int8_t, 0);
1500 ASMJS_LOAD_CASE(I32AsmjsLoadMem8U, int32_t, uint8_t, 0);
1501 ASMJS_LOAD_CASE(I32AsmjsLoadMem16S, int32_t, int16_t, 0);
1502 ASMJS_LOAD_CASE(I32AsmjsLoadMem16U, int32_t, uint16_t, 0);
1503 ASMJS_LOAD_CASE(I32AsmjsLoadMem, int32_t, int32_t, 0);
1504 ASMJS_LOAD_CASE(F32AsmjsLoadMem, float, float,
1505 std::numeric_limits<float>::quiet_NaN());
1506 ASMJS_LOAD_CASE(F64AsmjsLoadMem, double, double,
1507 std::numeric_limits<double>::quiet_NaN());
1508 #undef ASMJS_LOAD_CASE
1509
1510 #define ASMJS_STORE_CASE(name, ctype, mtype) \
1511 case kExpr##name: { \
1512 WasmVal val = Pop(); \
1513 uint32_t index = Pop().to<uint32_t>(); \
1514 if (index < (instance()->mem_size - sizeof(mtype))) { \
1515 byte* addr = instance()->mem_start + index; \
1516 /* TODO(titzer): alignment for asmjs store mem? */ \
1517 *(reinterpret_cast<mtype*>(addr)) = static_cast<mtype>(val.to<ctype>()); \
1518 } \
1519 Push(pc, val); \
1520 break; \
1521 }
1522
1523 ASMJS_STORE_CASE(I32AsmjsStoreMem8, int32_t, int8_t);
1524 ASMJS_STORE_CASE(I32AsmjsStoreMem16, int32_t, int16_t);
1525 ASMJS_STORE_CASE(I32AsmjsStoreMem, int32_t, int32_t);
1526 ASMJS_STORE_CASE(F32AsmjsStoreMem, float, float);
1527 ASMJS_STORE_CASE(F64AsmjsStoreMem, double, double);
1528 #undef ASMJS_STORE_CASE
1529
1530 case kExprMemorySize: {
1531 Push(pc, WasmVal(static_cast<uint32_t>(instance()->mem_size)));
1532 break;
1533 }
1534 #define EXECUTE_SIMPLE_BINOP(name, ctype, op) \
1535 case kExpr##name: { \
1536 WasmVal rval = Pop(); \
1537 WasmVal lval = Pop(); \
1538 WasmVal result(lval.to<ctype>() op rval.to<ctype>()); \
1539 Push(pc, result); \
1540 break; \
1541 }
1542 FOREACH_SIMPLE_BINOP(EXECUTE_SIMPLE_BINOP)
1543 #undef EXECUTE_SIMPLE_BINOP
1544
1545 #define EXECUTE_OTHER_BINOP(name, ctype) \
1546 case kExpr##name: { \
1547 TrapReason trap = kTrapCount; \
1548 volatile ctype rval = Pop().to<ctype>(); \
1549 volatile ctype lval = Pop().to<ctype>(); \
1550 WasmVal result(Execute##name(lval, rval, &trap)); \
1551 if (trap != kTrapCount) return DoTrap(trap, pc); \
1552 Push(pc, result); \
1553 break; \
1554 }
1555 FOREACH_OTHER_BINOP(EXECUTE_OTHER_BINOP)
1556 #undef EXECUTE_OTHER_BINOP
1557
1558 #define EXECUTE_OTHER_UNOP(name, ctype) \
1559 case kExpr##name: { \
1560 TrapReason trap = kTrapCount; \
1561 volatile ctype val = Pop().to<ctype>(); \
1562 WasmVal result(Execute##name(val, &trap)); \
1563 if (trap != kTrapCount) return DoTrap(trap, pc); \
1564 Push(pc, result); \
1565 break; \
1566 }
1567 FOREACH_OTHER_UNOP(EXECUTE_OTHER_UNOP)
1568 #undef EXECUTE_OTHER_UNOP
1569
1570 default:
1571 V8_Fatal(__FILE__, __LINE__, "Unknown or unimplemented opcode #%d:%s",
1572 code->start[pc], OpcodeName(code->start[pc]));
1573 UNREACHABLE();
1574 }
1575
1576 pc += len;
1577 }
1578 UNREACHABLE(); // above decoding loop should run forever.
1579 }
1580
1581 WasmVal Pop() {
1582 DCHECK_GT(stack_.size(), 0u);
1583 DCHECK_GT(frames_.size(), 0u);
1584 DCHECK_GT(stack_.size(), frames_.back().llimit()); // can't pop into locals
1585 WasmVal val = stack_.back();
1586 stack_.pop_back();
1587 return val;
1588 }
1589
1590 void PopN(int n) {
1591 DCHECK_GE(stack_.size(), static_cast<size_t>(n));
1592 DCHECK_GT(frames_.size(), 0u);
1593 size_t nsize = stack_.size() - n;
1594 DCHECK_GE(nsize, frames_.back().llimit()); // can't pop into locals
1595 stack_.resize(nsize);
1596 }
1597
1598 WasmVal PopArity(size_t arity) {
1599 if (arity == 0) return WasmVal();
1600 CHECK_EQ(1, arity);
1601 return Pop();
1602 }
1603
1604 void Push(pc_t pc, WasmVal val) {
1605 // TODO(titzer): store PC as well?
1606 stack_.push_back(val);
1607 }
1608
1609 void TraceStack(const char* phase, pc_t pc) {
1610 if (FLAG_trace_wasm_interpreter) {
1611 PrintF("%s @%zu", phase, pc);
1612 UNIMPLEMENTED();
1613 PrintF("\n");
1614 }
1615 }
1616
1617 void TraceValueStack() {
1618 Frame* top = frames_.size() > 0 ? &frames_.back() : nullptr;
1619 sp_t sp = top ? top->sp : 0;
1620 sp_t plimit = top ? top->plimit() : 0;
1621 sp_t llimit = top ? top->llimit() : 0;
1622 if (FLAG_trace_wasm_interpreter) {
1623 for (size_t i = sp; i < stack_.size(); i++) {
1624 if (i < plimit)
1625 PrintF(" p%zu:", i);
1626 else if (i < llimit)
1627 PrintF(" l%zu:", i);
1628 else
1629 PrintF(" s%zu:", i);
1630 WasmVal val = stack_[i];
1631 switch (val.type) {
1632 case kAstI32:
1633 PrintF("i32:%d", val.to<int32_t>());
1634 break;
1635 case kAstI64:
1636 PrintF("i64:%" PRId64 "", val.to<int64_t>());
1637 break;
1638 case kAstF32:
1639 PrintF("f32:%f", val.to<float>());
1640 break;
1641 case kAstF64:
1642 PrintF("f64:%lf", val.to<double>());
1643 break;
1644 case kAstStmt:
1645 PrintF("void");
1646 break;
1647 default:
1648 UNREACHABLE();
1649 break;
1650 }
1651 }
1652 }
1653 }
1654 };
1655
1656 //============================================================================
1657 // The implementation details of the interpreter.
1658 //============================================================================
1659 class WasmInterpreterInternals : public ZoneObject {
1660 public:
1661 WasmModuleInstance* instance_;
1662 CodeMap codemap_;
1663 ZoneVector<ThreadImpl> threads_;
1664
1665 WasmInterpreterInternals(Zone* zone, WasmModuleInstance* instance)
1666 : instance_(instance),
1667 codemap_(instance_ ? instance_->module : nullptr, zone),
1668 threads_(zone) {
1669 threads_.push_back(ThreadImpl(zone, &codemap_, instance));
1670 }
1671 };
1672
1673 //============================================================================
1674 // Implementation of the public interface of the interpreter.
1675 //============================================================================
1676 WasmInterpreter::WasmInterpreter(WasmModuleInstance* instance,
1677 base::AccountingAllocator* allocator)
1678 : zone_(allocator),
1679 internals_(new (&zone_) WasmInterpreterInternals(&zone_, instance)) {}
1680
1681 WasmInterpreter::~WasmInterpreter() {}
1682
1683 void WasmInterpreter::Run() { internals_->threads_[0].Run(); }
1684
1685 void WasmInterpreter::Pause() { internals_->threads_[0].Pause(); }
1686
1687 bool WasmInterpreter::SetBreakpoint(const WasmFunction* function, int pc,
1688 bool enabled) {
1689 InterpreterCode* code = internals_->codemap_.FindCode(function);
1690 if (!code) return false;
1691 int size = static_cast<int>(code->end - code->start);
1692 // Check bounds for {pc}.
1693 if (pc < 0 || pc >= size) return false;
1694 // Make a copy of the code before enabling a breakpoint.
1695 if (enabled && code->orig_start == code->start) {
1696 code->start = reinterpret_cast<byte*>(zone_.New(size));
1697 memcpy(code->start, code->orig_start, size);
1698 code->end = code->start + size;
1699 }
1700 bool prev = code->start[pc] == kInternalBreakpoint;
1701 if (enabled) {
1702 code->start[pc] = kInternalBreakpoint;
1703 } else {
1704 code->start[pc] = code->orig_start[pc];
1705 }
1706 return prev;
1707 }
1708
1709 bool WasmInterpreter::GetBreakpoint(const WasmFunction* function, int pc) {
1710 InterpreterCode* code = internals_->codemap_.FindCode(function);
1711 if (!code) return false;
1712 int size = static_cast<int>(code->end - code->start);
1713 // Check bounds for {pc}.
1714 if (pc < 0 || pc >= size) return false;
1715 // Check if a breakpoint is present at that place in the code.
1716 return code->start[pc] == kInternalBreakpoint;
1717 }
1718
1719 bool WasmInterpreter::SetTracing(const WasmFunction* function, bool enabled) {
1720 UNIMPLEMENTED();
1721 return false;
1722 }
1723
1724 int WasmInterpreter::GetThreadCount() {
1725 return 1; // only one thread for now.
1726 }
1727
1728 WasmInterpreter::Thread& WasmInterpreter::GetThread(int id) {
1729 CHECK_EQ(0, id); // only one thread for now.
1730 return internals_->threads_[id];
1731 }
1732
1733 WasmVal WasmInterpreter::GetLocalVal(const WasmFrame* frame, int index) {
1734 CHECK_GE(index, 0);
1735 UNIMPLEMENTED();
1736 WasmVal none;
1737 none.type = kAstStmt;
1738 return none;
1739 }
1740
1741 WasmVal WasmInterpreter::GetExprVal(const WasmFrame* frame, int pc) {
1742 UNIMPLEMENTED();
1743 WasmVal none;
1744 none.type = kAstStmt;
1745 return none;
1746 }
1747
1748 void WasmInterpreter::SetLocalVal(WasmFrame* frame, int index, WasmVal val) {
1749 UNIMPLEMENTED();
1750 }
1751
1752 void WasmInterpreter::SetExprVal(WasmFrame* frame, int pc, WasmVal val) {
1753 UNIMPLEMENTED();
1754 }
1755
1756 size_t WasmInterpreter::GetMemorySize() {
1757 return internals_->instance_->mem_size;
1758 }
1759
1760 WasmVal WasmInterpreter::ReadMemory(size_t offset) {
1761 UNIMPLEMENTED();
1762 return WasmVal();
1763 }
1764
1765 void WasmInterpreter::WriteMemory(size_t offset, WasmVal val) {
1766 UNIMPLEMENTED();
1767 }
1768
1769 int WasmInterpreter::AddFunctionForTesting(const WasmFunction* function) {
1770 return internals_->codemap_.AddFunction(function, nullptr, nullptr);
1771 }
1772
1773 bool WasmInterpreter::SetFunctionCodeForTesting(const WasmFunction* function,
1774 const byte* start,
1775 const byte* end) {
1776 return internals_->codemap_.SetFunctionCode(function, start, end);
1777 }
1778
1779 ControlTransferMap WasmInterpreter::ComputeControlTransfersForTesting(
1780 Zone* zone, const byte* start, const byte* end) {
1781 ControlTransfers targets(zone, 0, start, end);
1782 return targets.map_;
1783 }
1784
1785 } // namespace wasm
1786 } // namespace internal
1787 } // namespace v8
OLDNEW
« no previous file with comments | « src/wasm/wasm-interpreter.h ('k') | src/wasm/wasm-macro-gen.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698