Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(209)

Side by Side Diff: src/wasm/ast-decoder.cc

Issue 2345593003: [wasm] Master CL for Binary 0xC changes. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix test failures and TSAN races. Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/wasm/ast-decoder.h ('k') | src/wasm/decoder.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/signature.h" 5 #include "src/signature.h"
6 6
7 #include "src/bit-vector.h" 7 #include "src/bit-vector.h"
8 #include "src/flags.h" 8 #include "src/flags.h"
9 #include "src/handles.h" 9 #include "src/handles.h"
10 #include "src/zone/zone-containers.h" 10 #include "src/zone/zone-containers.h"
(...skipping 18 matching lines...) Expand all
29 } while (false) 29 } while (false)
30 #else 30 #else
31 #define TRACE(...) 31 #define TRACE(...)
32 #endif 32 #endif
33 33
34 #define CHECK_PROTOTYPE_OPCODE(flag) \ 34 #define CHECK_PROTOTYPE_OPCODE(flag) \
35 if (!FLAG_##flag) { \ 35 if (!FLAG_##flag) { \
36 error("Invalid opcode (enable with --" #flag ")"); \ 36 error("Invalid opcode (enable with --" #flag ")"); \
37 break; \ 37 break; \
38 } 38 }
39 // TODO(titzer): this is only for intermediate migration.
40 #define IMPLICIT_FUNCTION_END 1
39 41
40 // An SsaEnv environment carries the current local variable renaming 42 // An SsaEnv environment carries the current local variable renaming
41 // as well as the current effect and control dependency in the TF graph. 43 // as well as the current effect and control dependency in the TF graph.
42 // It maintains a control state that tracks whether the environment 44 // It maintains a control state that tracks whether the environment
43 // is reachable, has reached a control end, or has been merged. 45 // is reachable, has reached a control end, or has been merged.
44 struct SsaEnv { 46 struct SsaEnv {
45 enum State { kControlEnd, kUnreachable, kReached, kMerged }; 47 enum State { kControlEnd, kUnreachable, kReached, kMerged };
46 48
47 State state; 49 State state;
48 TFNode* control; 50 TFNode* control;
(...skipping 14 matching lines...) Expand all
63 65
64 // An entry on the value stack. 66 // An entry on the value stack.
65 struct Value { 67 struct Value {
66 const byte* pc; 68 const byte* pc;
67 TFNode* node; 69 TFNode* node;
68 LocalType type; 70 LocalType type;
69 }; 71 };
70 72
71 struct Control; 73 struct Control;
72 74
73 // An entry on the control stack (i.e. if, block, loop, try). 75 struct MergeValues {
76 uint32_t arity;
77 union {
78 Value* array;
79 Value first;
80 } vals; // Either multiple values or a single value.
81
82 Value& first() {
83 DCHECK_GT(arity, 0u);
84 return arity == 1 ? vals.first : vals.array[0];
85 }
86 };
87
88 // IncomingBranch is used by exception handling code for managing finally's.
89 struct IncomingBranch {
90 int32_t token_value;
91 Control* target;
92 MergeValues merge;
93 };
94
95 static Value* NO_VALUE = nullptr;
96
97 enum ControlKind { kControlIf, kControlBlock, kControlLoop, kControlTry };
98
99 // An entry on the control stack (i.e. if, block, loop).
74 struct Control { 100 struct Control {
75 const byte* pc; 101 const byte* pc;
102 ControlKind kind;
76 int stack_depth; // stack height at the beginning of the construct. 103 int stack_depth; // stack height at the beginning of the construct.
77 SsaEnv* end_env; // end environment for the construct. 104 SsaEnv* end_env; // end environment for the construct.
78 SsaEnv* false_env; // false environment (only for if). 105 SsaEnv* false_env; // false environment (only for if).
79 SsaEnv* catch_env; // catch environment (only for try). 106 SsaEnv* catch_env; // catch environment (only for try).
80 TFNode* node; // result node for the construct.
81 LocalType type; // result type for the construct.
82 bool is_loop; // true if this is the inner label of a loop.
83 107
84 bool is_if() const { return *pc == kExprIf; } 108 // Values merged into the end of this control construct.
109 MergeValues merge;
85 110
86 bool is_try() const { return *pc == kExprTry; } 111 inline bool is_if() const { return kind == kControlIf; }
112 inline bool is_block() const { return kind == kControlBlock; }
113 inline bool is_loop() const { return kind == kControlLoop; }
114 inline bool is_try() const { return kind == kControlTry; }
87 115
88 // Named constructors. 116 // Named constructors.
89 static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env) { 117 static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env) {
90 return {pc, stack_depth, end_env, nullptr, 118 return {pc, kControlBlock, stack_depth, end_env,
91 nullptr, nullptr, kAstEnd, false}; 119 nullptr, nullptr, {0, {NO_VALUE}}};
92 } 120 }
93 121
94 static Control If(const byte* pc, int stack_depth, SsaEnv* end_env, 122 static Control If(const byte* pc, int stack_depth, SsaEnv* end_env,
95 SsaEnv* false_env) { 123 SsaEnv* false_env) {
96 return {pc, stack_depth, end_env, false_env, 124 return {pc, kControlIf, stack_depth, end_env,
97 nullptr, nullptr, kAstStmt, false}; 125 false_env, nullptr, {0, {NO_VALUE}}};
98 } 126 }
99 127
100 static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env) { 128 static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env) {
101 return {pc, stack_depth, end_env, nullptr, nullptr, nullptr, kAstEnd, true}; 129 return {pc, kControlLoop, stack_depth, end_env,
130 nullptr, nullptr, {0, {NO_VALUE}}};
102 } 131 }
103 132
104 static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env, 133 static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env,
105 SsaEnv* catch_env) { 134 SsaEnv* catch_env) {
106 return {pc, stack_depth, end_env, nullptr, 135 return {pc, kControlTry, stack_depth, end_env,
107 catch_env, nullptr, kAstEnd, false}; 136 nullptr, catch_env, {0, {NO_VALUE}}};
108 } 137 }
109 }; 138 };
110 139
111 // Macros that build nodes only if there is a graph and the current SSA 140 // Macros that build nodes only if there is a graph and the current SSA
112 // environment is reachable from start. This avoids problems with malformed 141 // environment is reachable from start. This avoids problems with malformed
113 // TF graphs when decoding inputs that have unreachable code. 142 // TF graphs when decoding inputs that have unreachable code.
114 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) 143 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr)
115 #define BUILD0(func) (build() ? builder_->func() : nullptr) 144 #define BUILD0(func) (build() ? builder_->func() : nullptr)
116 145
117 // Generic Wasm bytecode decoder with utilities for decoding operands, 146 // Generic Wasm bytecode decoder with utilities for decoding operands,
(...skipping 14 matching lines...) Expand all
132 161
133 inline bool Validate(const byte* pc, LocalIndexOperand& operand) { 162 inline bool Validate(const byte* pc, LocalIndexOperand& operand) {
134 if (operand.index < total_locals_) { 163 if (operand.index < total_locals_) {
135 if (local_types_) { 164 if (local_types_) {
136 operand.type = local_types_->at(operand.index); 165 operand.type = local_types_->at(operand.index);
137 } else { 166 } else {
138 operand.type = kAstStmt; 167 operand.type = kAstStmt;
139 } 168 }
140 return true; 169 return true;
141 } 170 }
142 error(pc, pc + 1, "invalid local index"); 171 error(pc, pc + 1, "invalid local index: %u", operand.index);
143 return false; 172 return false;
144 } 173 }
145 174
146 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) { 175 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) {
147 ModuleEnv* m = module_; 176 ModuleEnv* m = module_;
148 if (m && m->module && operand.index < m->module->globals.size()) { 177 if (m && m->module && operand.index < m->module->globals.size()) {
149 operand.type = m->module->globals[operand.index].type; 178 operand.global = &m->module->globals[operand.index];
179 operand.type = operand.global->type;
150 return true; 180 return true;
151 } 181 }
152 error(pc, pc + 1, "invalid global index"); 182 error(pc, pc + 1, "invalid global index: %u", operand.index);
153 return false; 183 return false;
154 } 184 }
155 185
156 inline bool Complete(const byte* pc, CallFunctionOperand& operand) { 186 inline bool Complete(const byte* pc, CallFunctionOperand& operand) {
157 ModuleEnv* m = module_; 187 ModuleEnv* m = module_;
158 if (m && m->module && operand.index < m->module->functions.size()) { 188 if (m && m->module && operand.index < m->module->functions.size()) {
159 operand.sig = m->module->functions[operand.index].sig; 189 operand.sig = m->module->functions[operand.index].sig;
160 return true; 190 return true;
161 } 191 }
162 return false; 192 return false;
163 } 193 }
164 194
165 inline bool Validate(const byte* pc, CallFunctionOperand& operand) { 195 inline bool Validate(const byte* pc, CallFunctionOperand& operand) {
166 if (Complete(pc, operand)) { 196 if (Complete(pc, operand)) {
167 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
168 if (operand.arity != expected) {
169 error(pc, pc + 1,
170 "arity mismatch in direct function call (expected %u, got %u)",
171 expected, operand.arity);
172 return false;
173 }
174 return true; 197 return true;
175 } 198 }
176 error(pc, pc + 1, "invalid function index"); 199 error(pc, pc + 1, "invalid function index: %u", operand.index);
177 return false; 200 return false;
178 } 201 }
179 202
180 inline bool Complete(const byte* pc, CallIndirectOperand& operand) { 203 inline bool Complete(const byte* pc, CallIndirectOperand& operand) {
181 ModuleEnv* m = module_; 204 ModuleEnv* m = module_;
182 if (m && m->module && operand.index < m->module->signatures.size()) { 205 if (m && m->module && operand.index < m->module->signatures.size()) {
183 operand.sig = m->module->signatures[operand.index]; 206 operand.sig = m->module->signatures[operand.index];
184 return true; 207 return true;
185 } 208 }
186 return false; 209 return false;
187 } 210 }
188 211
189 inline bool Validate(const byte* pc, CallIndirectOperand& operand) { 212 inline bool Validate(const byte* pc, CallIndirectOperand& operand) {
190 if (Complete(pc, operand)) { 213 if (Complete(pc, operand)) {
191 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
192 if (operand.arity != expected) {
193 error(pc, pc + 1,
194 "arity mismatch in indirect function call (expected %u, got %u)",
195 expected, operand.arity);
196 return false;
197 }
198 return true; 214 return true;
199 } 215 }
200 error(pc, pc + 1, "invalid signature index"); 216 error(pc, pc + 1, "invalid signature index: #%u", operand.index);
201 return false;
202 }
203
204 inline bool Complete(const byte* pc, CallImportOperand& operand) {
205 ModuleEnv* m = module_;
206 if (m && m->module && operand.index < m->module->import_table.size()) {
207 operand.sig = m->module->import_table[operand.index].sig;
208 return true;
209 }
210 return false;
211 }
212
213 inline bool Validate(const byte* pc, CallImportOperand& operand) {
214 if (Complete(pc, operand)) {
215 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
216 if (operand.arity != expected) {
217 error(pc, pc + 1, "arity mismatch in import call (expected %u, got %u)",
218 expected, operand.arity);
219 return false;
220 }
221 return true;
222 }
223 error(pc, pc + 1, "invalid signature index");
224 return false; 217 return false;
225 } 218 }
226 219
227 inline bool Validate(const byte* pc, BreakDepthOperand& operand, 220 inline bool Validate(const byte* pc, BreakDepthOperand& operand,
228 ZoneVector<Control>& control) { 221 ZoneVector<Control>& control) {
229 if (operand.arity > 1) {
230 error(pc, pc + 1, "invalid arity for br or br_if");
231 return false;
232 }
233 if (operand.depth < control.size()) { 222 if (operand.depth < control.size()) {
234 operand.target = &control[control.size() - operand.depth - 1]; 223 operand.target = &control[control.size() - operand.depth - 1];
235 return true; 224 return true;
236 } 225 }
237 error(pc, pc + 1, "invalid break depth"); 226 error(pc, pc + 1, "invalid break depth: %u", operand.depth);
238 return false; 227 return false;
239 } 228 }
240 229
241 bool Validate(const byte* pc, BranchTableOperand& operand, 230 bool Validate(const byte* pc, BranchTableOperand& operand,
242 size_t block_depth) { 231 size_t block_depth) {
243 if (operand.arity > 1) { 232 // TODO(titzer): add extra redundant validation for br_table here?
244 error(pc, pc + 1, "invalid arity for break");
245 return false;
246 }
247 // Verify table.
248 for (uint32_t i = 0; i < operand.table_count + 1; ++i) {
249 uint32_t target = operand.read_entry(this, i);
250 if (target >= block_depth) {
251 error(operand.table + i * 2, "improper branch in br_table");
252 return false;
253 }
254 }
255 return true; 233 return true;
256 } 234 }
257 235
258 unsigned OpcodeArity(const byte* pc) {
259 #define DECLARE_ARITY(name, ...) \
260 static const LocalType kTypes_##name[] = {__VA_ARGS__}; \
261 static const int kArity_##name = \
262 static_cast<int>(arraysize(kTypes_##name) - 1);
263
264 FOREACH_SIGNATURE(DECLARE_ARITY);
265 #undef DECLARE_ARITY
266
267 switch (static_cast<WasmOpcode>(*pc)) {
268 case kExprI8Const:
269 case kExprI32Const:
270 case kExprI64Const:
271 case kExprF64Const:
272 case kExprF32Const:
273 case kExprGetLocal:
274 case kExprGetGlobal:
275 case kExprNop:
276 case kExprUnreachable:
277 case kExprEnd:
278 case kExprBlock:
279 case kExprThrow:
280 case kExprTry:
281 case kExprLoop:
282 return 0;
283
284 case kExprSetGlobal:
285 case kExprSetLocal:
286 case kExprElse:
287 case kExprCatch:
288 return 1;
289
290 case kExprBr: {
291 BreakDepthOperand operand(this, pc);
292 return operand.arity;
293 }
294 case kExprBrIf: {
295 BreakDepthOperand operand(this, pc);
296 return 1 + operand.arity;
297 }
298 case kExprBrTable: {
299 BranchTableOperand operand(this, pc);
300 return 1 + operand.arity;
301 }
302
303 case kExprIf:
304 return 1;
305 case kExprSelect:
306 return 3;
307
308 case kExprCallFunction: {
309 CallFunctionOperand operand(this, pc);
310 return operand.arity;
311 }
312 case kExprCallIndirect: {
313 CallIndirectOperand operand(this, pc);
314 return 1 + operand.arity;
315 }
316 case kExprCallImport: {
317 CallImportOperand operand(this, pc);
318 return operand.arity;
319 }
320 case kExprReturn: {
321 ReturnArityOperand operand(this, pc);
322 return operand.arity;
323 }
324
325 #define DECLARE_OPCODE_CASE(name, opcode, sig) \
326 case kExpr##name: \
327 return kArity_##sig;
328
329 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
330 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
331 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE)
332 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE)
333 FOREACH_SIMPLE_MEM_OPCODE(DECLARE_OPCODE_CASE)
334 FOREACH_ASMJS_COMPAT_OPCODE(DECLARE_OPCODE_CASE)
335 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
336 #undef DECLARE_OPCODE_CASE
337 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
338 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
339 #undef DECLARE_OPCODE_CASE
340 return 1;
341 default:
342 UNREACHABLE();
343 return 0;
344 }
345 }
346
347 unsigned OpcodeLength(const byte* pc) { 236 unsigned OpcodeLength(const byte* pc) {
348 switch (static_cast<WasmOpcode>(*pc)) { 237 switch (static_cast<WasmOpcode>(*pc)) {
349 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: 238 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
350 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) 239 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
351 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) 240 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
352 #undef DECLARE_OPCODE_CASE 241 #undef DECLARE_OPCODE_CASE
353 { 242 {
354 MemoryAccessOperand operand(this, pc, UINT32_MAX); 243 MemoryAccessOperand operand(this, pc, UINT32_MAX);
355 return 1 + operand.length; 244 return 1 + operand.length;
356 } 245 }
357 case kExprBr: 246 case kExprBr:
358 case kExprBrIf: { 247 case kExprBrIf: {
359 BreakDepthOperand operand(this, pc); 248 BreakDepthOperand operand(this, pc);
360 return 1 + operand.length; 249 return 1 + operand.length;
361 } 250 }
362 case kExprSetGlobal: 251 case kExprSetGlobal:
363 case kExprGetGlobal: { 252 case kExprGetGlobal: {
364 GlobalIndexOperand operand(this, pc); 253 GlobalIndexOperand operand(this, pc);
365 return 1 + operand.length; 254 return 1 + operand.length;
366 } 255 }
367 256
368 case kExprCallFunction: { 257 case kExprCallFunction: {
369 CallFunctionOperand operand(this, pc); 258 CallFunctionOperand operand(this, pc);
370 return 1 + operand.length; 259 return 1 + operand.length;
371 } 260 }
372 case kExprCallIndirect: { 261 case kExprCallIndirect: {
373 CallIndirectOperand operand(this, pc); 262 CallIndirectOperand operand(this, pc);
374 return 1 + operand.length; 263 return 1 + operand.length;
375 } 264 }
376 case kExprCallImport: { 265
377 CallImportOperand operand(this, pc); 266 case kExprTry:
267 case kExprIf: // fall thru
268 case kExprLoop:
269 case kExprBlock: {
270 BlockTypeOperand operand(this, pc);
378 return 1 + operand.length; 271 return 1 + operand.length;
379 } 272 }
380 273
381 case kExprSetLocal: 274 case kExprSetLocal:
275 case kExprTeeLocal:
382 case kExprGetLocal: 276 case kExprGetLocal:
383 case kExprCatch: { 277 case kExprCatch: {
384 LocalIndexOperand operand(this, pc); 278 LocalIndexOperand operand(this, pc);
385 return 1 + operand.length; 279 return 1 + operand.length;
386 } 280 }
387 case kExprBrTable: { 281 case kExprBrTable: {
388 BranchTableOperand operand(this, pc); 282 BranchTableOperand operand(this, pc);
389 return 1 + operand.length; 283 BranchTableIterator iterator(this, operand);
284 return 1 + iterator.length();
390 } 285 }
391 case kExprI32Const: { 286 case kExprI32Const: {
392 ImmI32Operand operand(this, pc); 287 ImmI32Operand operand(this, pc);
393 return 1 + operand.length; 288 return 1 + operand.length;
394 } 289 }
395 case kExprI64Const: { 290 case kExprI64Const: {
396 ImmI64Operand operand(this, pc); 291 ImmI64Operand operand(this, pc);
397 return 1 + operand.length; 292 return 1 + operand.length;
398 } 293 }
399 case kExprI8Const: 294 case kExprI8Const:
400 return 2; 295 return 2;
401 case kExprF32Const: 296 case kExprF32Const:
402 return 5; 297 return 5;
403 case kExprF64Const: 298 case kExprF64Const:
404 return 9; 299 return 9;
405 case kExprReturn: {
406 ReturnArityOperand operand(this, pc);
407 return 1 + operand.length;
408 }
409 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
410 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE) { return 2; }
411 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE) { return 3; }
412 #undef DECLARE_OPCODE_CASE
413 default: 300 default:
414 return 1; 301 return 1;
415 } 302 }
416 } 303 }
417 }; 304 };
418 305
419 // The full WASM decoder for bytecode. Both verifies bytecode and generates 306 // The full WASM decoder for bytecode. Both verifies bytecode and generates
420 // a TurboFan IR graph. 307 // a TurboFan IR graph.
421 class WasmFullDecoder : public WasmDecoder { 308 class WasmFullDecoder : public WasmDecoder {
422 public: 309 public:
423 WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body) 310 WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body)
424 : WasmDecoder(body.module, body.sig, body.start, body.end), 311 : WasmDecoder(body.module, body.sig, body.start, body.end),
425 zone_(zone), 312 zone_(zone),
426 builder_(builder), 313 builder_(builder),
427 base_(body.base), 314 base_(body.base),
428 local_type_vec_(zone), 315 local_type_vec_(zone),
429 stack_(zone), 316 stack_(zone),
430 control_(zone) { 317 control_(zone),
318 last_end_found_(false) {
431 local_types_ = &local_type_vec_; 319 local_types_ = &local_type_vec_;
432 } 320 }
433 321
434 bool Decode() { 322 bool Decode() {
435 base::ElapsedTimer decode_timer; 323 base::ElapsedTimer decode_timer;
436 if (FLAG_trace_wasm_decode_time) { 324 if (FLAG_trace_wasm_decode_time) {
437 decode_timer.Start(); 325 decode_timer.Start();
438 } 326 }
439 stack_.clear(); 327 stack_.clear();
440 control_.clear(); 328 control_.clear();
441 329
442 if (end_ < pc_) { 330 if (end_ < pc_) {
443 error(pc_, "function body end < start"); 331 error("function body end < start");
444 return false; 332 return false;
445 } 333 }
446 334
447 DecodeLocalDecls(); 335 DecodeLocalDecls();
448 InitSsaEnv(); 336 InitSsaEnv();
449 DecodeFunctionBody(); 337 DecodeFunctionBody();
450 338
451 if (failed()) return TraceFailed(); 339 if (failed()) return TraceFailed();
452 340
341 #if IMPLICIT_FUNCTION_END
342 // With implicit end support (old style), the function block
343 // remains on the stack. Other control blocks are an error.
344 if (control_.size() > 1) {
345 error(pc_, control_.back().pc, "unterminated control structure");
346 return TraceFailed();
347 }
348
349 // Assume an implicit end to the function body block.
350 if (control_.size() == 1) {
351 Control* c = &control_.back();
352 if (ssa_env_->go()) {
353 FallThruTo(c);
354 }
355
356 if (c->end_env->go()) {
357 // Push the end values onto the stack.
358 stack_.resize(c->stack_depth);
359 if (c->merge.arity == 1) {
360 stack_.push_back(c->merge.vals.first);
361 } else {
362 for (unsigned i = 0; i < c->merge.arity; i++) {
363 stack_.push_back(c->merge.vals.array[i]);
364 }
365 }
366
367 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
368 SetEnv("function:end", c->end_env);
369 DoReturn();
370 TRACE("\n");
371 }
372 }
373 #else
453 if (!control_.empty()) { 374 if (!control_.empty()) {
454 error(pc_, control_.back().pc, "unterminated control structure"); 375 error(pc_, control_.back().pc, "unterminated control structure");
455 return TraceFailed(); 376 return TraceFailed();
456 } 377 }
457 378
458 if (ssa_env_->go()) { 379 if (!last_end_found_) {
459 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn"); 380 error("function body must end with \"end\" opcode.");
460 DoReturn(); 381 return false;
461 if (failed()) return TraceFailed();
462 TRACE("\n");
463 } 382 }
383 #endif
464 384
465 if (FLAG_trace_wasm_decode_time) { 385 if (FLAG_trace_wasm_decode_time) {
466 double ms = decode_timer.Elapsed().InMillisecondsF(); 386 double ms = decode_timer.Elapsed().InMillisecondsF();
467 PrintF("wasm-decode ok (%0.3f ms)\n\n", ms); 387 PrintF("wasm-decode %s (%0.3f ms)\n\n", ok() ? "ok" : "failed", ms);
468 } else { 388 } else {
469 TRACE("wasm-decode ok\n\n"); 389 TRACE("wasm-decode %s\n\n", ok() ? "ok" : "failed");
470 } 390 }
471 391
472 return true; 392 return true;
473 } 393 }
474 394
475 bool TraceFailed() { 395 bool TraceFailed() {
476 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_), 396 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_),
477 startrel(error_pc_), error_msg_.get()); 397 startrel(error_pc_), error_msg_.get());
478 return false; 398 return false;
479 } 399 }
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
512 432
513 Zone* zone_; 433 Zone* zone_;
514 TFBuilder* builder_; 434 TFBuilder* builder_;
515 const byte* base_; 435 const byte* base_;
516 436
517 SsaEnv* ssa_env_; 437 SsaEnv* ssa_env_;
518 438
519 ZoneVector<LocalType> local_type_vec_; // types of local variables. 439 ZoneVector<LocalType> local_type_vec_; // types of local variables.
520 ZoneVector<Value> stack_; // stack of values. 440 ZoneVector<Value> stack_; // stack of values.
521 ZoneVector<Control> control_; // stack of blocks, loops, and ifs. 441 ZoneVector<Control> control_; // stack of blocks, loops, and ifs.
442 bool last_end_found_;
522 443
523 inline bool build() { return builder_ && ssa_env_->go(); } 444 inline bool build() { return builder_ && ssa_env_->go(); }
524 445
525 void InitSsaEnv() { 446 void InitSsaEnv() {
526 TFNode* start = nullptr; 447 TFNode* start = nullptr;
527 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); 448 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
528 size_t size = sizeof(TFNode*) * EnvironmentCount(); 449 size_t size = sizeof(TFNode*) * EnvironmentCount();
529 ssa_env->state = SsaEnv::kReached; 450 ssa_env->state = SsaEnv::kReached;
530 ssa_env->locals = 451 ssa_env->locals =
531 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; 452 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr;
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
632 total_locals_ = local_type_vec_.size(); 553 total_locals_ = local_type_vec_.size();
633 } 554 }
634 555
635 // Decodes the body of a function. 556 // Decodes the body of a function.
636 void DecodeFunctionBody() { 557 void DecodeFunctionBody() {
637 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n", 558 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n",
638 reinterpret_cast<const void*>(start_), 559 reinterpret_cast<const void*>(start_),
639 reinterpret_cast<const void*>(limit_), baserel(pc_), 560 reinterpret_cast<const void*>(limit_), baserel(pc_),
640 static_cast<int>(limit_ - start_), builder_ ? "graph building" : ""); 561 static_cast<int>(limit_ - start_), builder_ ? "graph building" : "");
641 562
563 {
564 // Set up initial function block.
565 SsaEnv* break_env = ssa_env_;
566 SetEnv("initial env", Steal(break_env));
567 PushBlock(break_env);
568 Control* c = &control_.back();
569 c->merge.arity = static_cast<uint32_t>(sig_->return_count());
570
571 if (c->merge.arity == 1) {
572 c->merge.vals.first = {pc_, nullptr, sig_->GetReturn(0)};
573 } else if (c->merge.arity > 1) {
574 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
575 for (unsigned i = 0; i < c->merge.arity; i++) {
576 c->merge.vals.array[i] = {pc_, nullptr, sig_->GetReturn(i)};
577 }
578 }
579 }
580
642 if (pc_ >= limit_) return; // Nothing to do. 581 if (pc_ >= limit_) return; // Nothing to do.
643 582
644 while (true) { // decoding loop. 583 while (true) { // decoding loop.
645 unsigned len = 1; 584 unsigned len = 1;
646 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); 585 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
647 if (!WasmOpcodes::IsPrefixOpcode(opcode)) { 586 if (!WasmOpcodes::IsPrefixOpcode(opcode)) {
648 TRACE(" @%-8d #%02x:%-20s|", startrel(pc_), opcode, 587 TRACE(" @%-8d #%02x:%-20s|", startrel(pc_), opcode,
649 WasmOpcodes::ShortOpcodeName(opcode)); 588 WasmOpcodes::ShortOpcodeName(opcode));
650 } 589 }
651 590
652 FunctionSig* sig = WasmOpcodes::Signature(opcode); 591 FunctionSig* sig = WasmOpcodes::Signature(opcode);
653 if (sig) { 592 if (sig) {
654 BuildSimpleOperator(opcode, sig); 593 BuildSimpleOperator(opcode, sig);
655 } else { 594 } else {
656 // Complex bytecode. 595 // Complex bytecode.
657 switch (opcode) { 596 switch (opcode) {
658 case kExprNop: 597 case kExprNop:
659 Push(kAstStmt, nullptr);
660 break; 598 break;
661 case kExprBlock: { 599 case kExprBlock: {
662 // The break environment is the outer environment. 600 // The break environment is the outer environment.
601 BlockTypeOperand operand(this, pc_);
663 SsaEnv* break_env = ssa_env_; 602 SsaEnv* break_env = ssa_env_;
664 PushBlock(break_env); 603 PushBlock(break_env);
665 SetEnv("block:start", Steal(break_env)); 604 SetEnv("block:start", Steal(break_env));
605 SetBlockType(&control_.back(), operand);
606 len = 1 + operand.length;
666 break; 607 break;
667 } 608 }
668 case kExprThrow: { 609 case kExprThrow: {
669 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); 610 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
670 Value value = Pop(0, kAstI32); 611 Value value = Pop(0, kAstI32);
671 BUILD(Throw, value.node); 612 BUILD(Throw, value.node);
672 break; 613 break;
673 } 614 }
674 case kExprTry: { 615 case kExprTry: {
675 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); 616 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
617 BlockTypeOperand operand(this, pc_);
676 SsaEnv* outer_env = ssa_env_; 618 SsaEnv* outer_env = ssa_env_;
677 SsaEnv* try_env = Steal(outer_env); 619 SsaEnv* try_env = Steal(outer_env);
678 SsaEnv* catch_env = Split(try_env); 620 SsaEnv* catch_env = Split(try_env);
679 PushTry(outer_env, catch_env); 621 PushTry(outer_env, catch_env);
680 SetEnv("try:start", try_env); 622 SetEnv("try_catch:start", try_env);
623 SetBlockType(&control_.back(), operand);
624 len = 1 + operand.length;
681 break; 625 break;
682 } 626 }
683 case kExprCatch: { 627 case kExprCatch: {
684 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); 628 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
685 LocalIndexOperand operand(this, pc_); 629 LocalIndexOperand operand(this, pc_);
686 len = 1 + operand.length; 630 len = 1 + operand.length;
687 631
688 if (control_.empty()) { 632 if (control_.empty()) {
689 error(pc_, "catch does not match a any try"); 633 error("catch does not match any try");
690 break; 634 break;
691 } 635 }
692 636
693 Control* c = &control_.back(); 637 Control* c = &control_.back();
694 if (!c->is_try()) { 638 if (!c->is_try()) {
695 error(pc_, "catch does not match a try"); 639 error("catch does not match any try");
696 break; 640 break;
697 } 641 }
698 642
699 if (c->catch_env == nullptr) { 643 if (c->catch_env == nullptr) {
700 error(pc_, "catch already present for try with catch"); 644 error("catch already present for try with catch");
701 break; 645 break;
702 } 646 }
703 647
704 Goto(ssa_env_, c->end_env); 648 Goto(ssa_env_, c->end_env);
705 649
706 SsaEnv* catch_env = c->catch_env; 650 SsaEnv* catch_env = c->catch_env;
707 c->catch_env = nullptr; 651 c->catch_env = nullptr;
708 SetEnv("catch:begin", catch_env); 652 SetEnv("catch:begin", catch_env);
709 653
710 if (Validate(pc_, operand)) { 654 if (Validate(pc_, operand)) {
711 // TODO(jpp): figure out how thrown value is propagated. It is 655 // TODO(jpp): figure out how thrown value is propagated. It is
712 // unlikely to be a value on the stack. 656 // unlikely to be a value on the stack.
713 if (ssa_env_->locals) { 657 if (ssa_env_->locals) {
714 ssa_env_->locals[operand.index] = nullptr; 658 ssa_env_->locals[operand.index] = nullptr;
715 } 659 }
716 } 660 }
717 661
718 PopUpTo(c->stack_depth); 662 PopUpTo(c->stack_depth);
719
720 break; 663 break;
721 } 664 }
722 case kExprLoop: { 665 case kExprLoop: {
723 // The break environment is the outer environment. 666 BlockTypeOperand operand(this, pc_);
724 SsaEnv* break_env = ssa_env_; 667 SsaEnv* finish_try_env = Steal(ssa_env_);
725 PushBlock(break_env);
726 SsaEnv* finish_try_env = Steal(break_env);
727 // The continue environment is the inner environment. 668 // The continue environment is the inner environment.
728 PrepareForLoop(pc_, finish_try_env); 669 PrepareForLoop(pc_, finish_try_env);
729 SetEnv("loop:start", Split(finish_try_env)); 670 SetEnv("loop:start", Split(finish_try_env));
730 ssa_env_->SetNotMerged(); 671 ssa_env_->SetNotMerged();
731 PushLoop(finish_try_env); 672 PushLoop(finish_try_env);
673 SetBlockType(&control_.back(), operand);
674 len = 1 + operand.length;
732 break; 675 break;
733 } 676 }
734 case kExprIf: { 677 case kExprIf: {
735 // Condition on top of stack. Split environments for branches. 678 // Condition on top of stack. Split environments for branches.
679 BlockTypeOperand operand(this, pc_);
736 Value cond = Pop(0, kAstI32); 680 Value cond = Pop(0, kAstI32);
737 TFNode* if_true = nullptr; 681 TFNode* if_true = nullptr;
738 TFNode* if_false = nullptr; 682 TFNode* if_false = nullptr;
739 BUILD(Branch, cond.node, &if_true, &if_false); 683 BUILD(Branch, cond.node, &if_true, &if_false);
740 SsaEnv* end_env = ssa_env_; 684 SsaEnv* end_env = ssa_env_;
741 SsaEnv* false_env = Split(ssa_env_); 685 SsaEnv* false_env = Split(ssa_env_);
742 false_env->control = if_false; 686 false_env->control = if_false;
743 SsaEnv* true_env = Steal(ssa_env_); 687 SsaEnv* true_env = Steal(ssa_env_);
744 true_env->control = if_true; 688 true_env->control = if_true;
745 PushIf(end_env, false_env); 689 PushIf(end_env, false_env);
746 SetEnv("if:true", true_env); 690 SetEnv("if:true", true_env);
691 SetBlockType(&control_.back(), operand);
692 len = 1 + operand.length;
747 break; 693 break;
748 } 694 }
749 case kExprElse: { 695 case kExprElse: {
750 if (control_.empty()) { 696 if (control_.empty()) {
751 error(pc_, "else does not match any if"); 697 error("else does not match any if");
752 break; 698 break;
753 } 699 }
754 Control* c = &control_.back(); 700 Control* c = &control_.back();
755 if (!c->is_if()) { 701 if (!c->is_if()) {
756 error(pc_, c->pc, "else does not match an if"); 702 error(pc_, c->pc, "else does not match an if");
757 break; 703 break;
758 } 704 }
759 if (c->false_env == nullptr) { 705 if (c->false_env == nullptr) {
760 error(pc_, c->pc, "else already present for if"); 706 error(pc_, c->pc, "else already present for if");
761 break; 707 break;
762 } 708 }
763 Value val = PopUpTo(c->stack_depth); 709 FallThruTo(c);
764 MergeInto(c->end_env, &c->node, &c->type, val);
765 // Switch to environment for false branch. 710 // Switch to environment for false branch.
711 stack_.resize(c->stack_depth);
766 SetEnv("if_else:false", c->false_env); 712 SetEnv("if_else:false", c->false_env);
767 c->false_env = nullptr; // record that an else is already seen 713 c->false_env = nullptr; // record that an else is already seen
768 break; 714 break;
769 } 715 }
770 case kExprEnd: { 716 case kExprEnd: {
771 if (control_.empty()) { 717 if (control_.empty()) {
772 error(pc_, "end does not match any if, try, or block"); 718 error("end does not match any if, try, or block");
773 break; 719 return;
774 } 720 }
775 const char* name = "block:end"; 721 const char* name = "block:end";
776 Control* c = &control_.back(); 722 Control* c = &control_.back();
777 Value val = PopUpTo(c->stack_depth); 723 if (c->is_loop()) {
778 if (c->is_loop) { 724 // A loop just leaves the values on the stack.
779 // Loops always push control in pairs. 725 TypeCheckLoopFallThru(c);
780 PopControl(); 726 PopControl();
781 c = &control_.back(); 727 SetEnv("loop:end", ssa_env_);
782 name = "loop:end"; 728 break;
783 } else if (c->is_if()) { 729 }
730 if (c->is_if()) {
784 if (c->false_env != nullptr) { 731 if (c->false_env != nullptr) {
785 // End the true branch of a one-armed if. 732 // End the true branch of a one-armed if.
786 Goto(c->false_env, c->end_env); 733 Goto(c->false_env, c->end_env);
787 val = {val.pc, nullptr, kAstStmt}; 734 if (ssa_env_->go() && stack_.size() != c->stack_depth) {
735 error("end of if expected empty stack");
736 stack_.resize(c->stack_depth);
737 }
738 if (c->merge.arity > 0) {
739 error("non-void one-armed if");
740 }
788 name = "if:merge"; 741 name = "if:merge";
789 } else { 742 } else {
790 // End the false branch of a two-armed if. 743 // End the false branch of a two-armed if.
791 name = "if_else:merge"; 744 name = "if_else:merge";
792 } 745 }
793 } else if (c->is_try()) { 746 } else if (c->is_try()) {
794 name = "try:end"; 747 name = "try:end";
795 748
796 // validate that catch was seen. 749 // validate that catch was seen.
797 if (c->catch_env != nullptr) { 750 if (c->catch_env != nullptr) {
798 error(pc_, "missing catch in try"); 751 error("missing catch in try");
799 break; 752 break;
800 } 753 }
801 } 754 }
755 FallThruTo(c);
756 SetEnv(name, c->end_env);
802 757
803 if (ssa_env_->go()) { 758 // Push the end values onto the stack.
804 // Adds a fallthrough edge to the next control block. 759 stack_.resize(c->stack_depth);
805 MergeInto(c->end_env, &c->node, &c->type, val); 760 if (c->merge.arity == 1) {
761 stack_.push_back(c->merge.vals.first);
762 } else {
763 for (unsigned i = 0; i < c->merge.arity; i++) {
764 stack_.push_back(c->merge.vals.array[i]);
765 }
806 } 766 }
807 SetEnv(name, c->end_env); 767
808 stack_.resize(c->stack_depth);
809 Push(c->type, c->node);
810 PopControl(); 768 PopControl();
769
770 if (control_.empty()) {
771 // If the last (implicit) control was popped, check we are at end.
772 if (pc_ + 1 != end_) {
773 error(pc_, pc_ + 1, "trailing code after function end");
774 }
775 last_end_found_ = true;
776 if (ssa_env_->go()) {
777 // The result of the block is the return value.
778 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
779 DoReturn();
780 TRACE("\n");
781 }
782 return;
783 }
811 break; 784 break;
812 } 785 }
813 case kExprSelect: { 786 case kExprSelect: {
814 Value cond = Pop(2, kAstI32); 787 Value cond = Pop(2, kAstI32);
815 Value fval = Pop(); 788 Value fval = Pop();
816 Value tval = Pop(); 789 Value tval = Pop();
817 if (tval.type == kAstStmt || tval.type != fval.type) { 790 if (tval.type == kAstStmt || tval.type != fval.type) {
818 if (tval.type != kAstEnd && fval.type != kAstEnd) { 791 if (tval.type != kAstEnd && fval.type != kAstEnd) {
819 error(pc_, "type mismatch in select"); 792 error("type mismatch in select");
820 break; 793 break;
821 } 794 }
822 } 795 }
823 if (build()) { 796 if (build()) {
824 DCHECK(tval.type != kAstEnd); 797 DCHECK(tval.type != kAstEnd);
825 DCHECK(fval.type != kAstEnd); 798 DCHECK(fval.type != kAstEnd);
826 DCHECK(cond.type != kAstEnd); 799 DCHECK(cond.type != kAstEnd);
827 TFNode* controls[2]; 800 TFNode* controls[2];
828 builder_->Branch(cond.node, &controls[0], &controls[1]); 801 builder_->Branch(cond.node, &controls[0], &controls[1]);
829 TFNode* merge = builder_->Merge(2, controls); 802 TFNode* merge = builder_->Merge(2, controls);
830 TFNode* vals[2] = {tval.node, fval.node}; 803 TFNode* vals[2] = {tval.node, fval.node};
831 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge); 804 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge);
832 Push(tval.type, phi); 805 Push(tval.type, phi);
833 ssa_env_->control = merge; 806 ssa_env_->control = merge;
834 } else { 807 } else {
835 Push(tval.type, nullptr); 808 Push(tval.type, nullptr);
836 } 809 }
837 break; 810 break;
838 } 811 }
839 case kExprBr: { 812 case kExprBr: {
840 BreakDepthOperand operand(this, pc_); 813 BreakDepthOperand operand(this, pc_);
841 Value val = {pc_, nullptr, kAstStmt};
842 if (operand.arity) val = Pop();
843 if (Validate(pc_, operand, control_)) { 814 if (Validate(pc_, operand, control_)) {
844 BreakTo(operand.target, val); 815 BreakTo(operand.depth);
845 } 816 }
846 len = 1 + operand.length; 817 len = 1 + operand.length;
847 Push(kAstEnd, nullptr); 818 EndControl();
848 break; 819 break;
849 } 820 }
850 case kExprBrIf: { 821 case kExprBrIf: {
851 BreakDepthOperand operand(this, pc_); 822 BreakDepthOperand operand(this, pc_);
852 Value cond = Pop(operand.arity, kAstI32); 823 Value cond = Pop(0, kAstI32);
853 Value val = {pc_, nullptr, kAstStmt};
854 if (operand.arity == 1) val = Pop();
855 if (ok() && Validate(pc_, operand, control_)) { 824 if (ok() && Validate(pc_, operand, control_)) {
856 SsaEnv* fenv = ssa_env_; 825 SsaEnv* fenv = ssa_env_;
857 SsaEnv* tenv = Split(fenv); 826 SsaEnv* tenv = Split(fenv);
858 fenv->SetNotMerged(); 827 fenv->SetNotMerged();
859 BUILD(Branch, cond.node, &tenv->control, &fenv->control); 828 BUILD(Branch, cond.node, &tenv->control, &fenv->control);
860 ssa_env_ = tenv; 829 ssa_env_ = tenv;
861 BreakTo(operand.target, val); 830 BreakTo(operand.depth);
862 ssa_env_ = fenv; 831 ssa_env_ = fenv;
863 } 832 }
864 len = 1 + operand.length; 833 len = 1 + operand.length;
865 Push(kAstStmt, nullptr);
866 break; 834 break;
867 } 835 }
868 case kExprBrTable: { 836 case kExprBrTable: {
869 BranchTableOperand operand(this, pc_); 837 BranchTableOperand operand(this, pc_);
838 BranchTableIterator iterator(this, operand);
870 if (Validate(pc_, operand, control_.size())) { 839 if (Validate(pc_, operand, control_.size())) {
871 Value key = Pop(operand.arity, kAstI32); 840 Value key = Pop(0, kAstI32);
872 Value val = {pc_, nullptr, kAstStmt};
873 if (operand.arity == 1) val = Pop();
874 if (failed()) break; 841 if (failed()) break;
875 842
876 SsaEnv* break_env = ssa_env_; 843 SsaEnv* break_env = ssa_env_;
877 if (operand.table_count > 0) { 844 if (operand.table_count > 0) {
878 // Build branches to the various blocks based on the table. 845 // Build branches to the various blocks based on the table.
879 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node); 846 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node);
880 847
881 SsaEnv* copy = Steal(break_env); 848 SsaEnv* copy = Steal(break_env);
882 ssa_env_ = copy; 849 ssa_env_ = copy;
883 for (uint32_t i = 0; i < operand.table_count + 1; ++i) { 850 while (iterator.has_next()) {
884 uint16_t target = operand.read_entry(this, i); 851 uint32_t i = iterator.cur_index();
852 const byte* pos = iterator.pc();
853 uint32_t target = iterator.next();
854 if (target >= control_.size()) {
855 error(pos, "improper branch in br_table");
856 break;
857 }
885 ssa_env_ = Split(copy); 858 ssa_env_ = Split(copy);
886 ssa_env_->control = (i == operand.table_count) 859 ssa_env_->control = (i == operand.table_count)
887 ? BUILD(IfDefault, sw) 860 ? BUILD(IfDefault, sw)
888 : BUILD(IfValue, i, sw); 861 : BUILD(IfValue, i, sw);
889 int depth = target; 862 BreakTo(target);
890 Control* c = &control_[control_.size() - depth - 1];
891 MergeInto(c->end_env, &c->node, &c->type, val);
892 } 863 }
893 } else { 864 } else {
894 // Only a default target. Do the equivalent of br. 865 // Only a default target. Do the equivalent of br.
895 uint16_t target = operand.read_entry(this, 0); 866 const byte* pos = iterator.pc();
896 int depth = target; 867 uint32_t target = iterator.next();
897 Control* c = &control_[control_.size() - depth - 1]; 868 if (target >= control_.size()) {
898 MergeInto(c->end_env, &c->node, &c->type, val); 869 error(pos, "improper branch in br_table");
870 break;
871 }
872 BreakTo(target);
899 } 873 }
900 // br_table ends the control flow like br. 874 // br_table ends the control flow like br.
901 ssa_env_ = break_env; 875 ssa_env_ = break_env;
902 Push(kAstStmt, nullptr);
903 } 876 }
904 len = 1 + operand.length; 877 len = 1 + iterator.length();
905 break; 878 break;
906 } 879 }
907 case kExprReturn: { 880 case kExprReturn: {
908 ReturnArityOperand operand(this, pc_);
909 if (operand.arity != sig_->return_count()) {
910 error(pc_, pc_ + 1, "arity mismatch in return");
911 }
912 DoReturn(); 881 DoReturn();
913 len = 1 + operand.length;
914 break; 882 break;
915 } 883 }
916 case kExprUnreachable: { 884 case kExprUnreachable: {
917 Push(kAstEnd, BUILD(Unreachable, position())); 885 BUILD(Unreachable, position());
918 ssa_env_->Kill(SsaEnv::kControlEnd); 886 EndControl();
919 break; 887 break;
920 } 888 }
921 case kExprI8Const: { 889 case kExprI8Const: {
922 ImmI8Operand operand(this, pc_); 890 ImmI8Operand operand(this, pc_);
923 Push(kAstI32, BUILD(Int32Constant, operand.value)); 891 Push(kAstI32, BUILD(Int32Constant, operand.value));
924 len = 1 + operand.length; 892 len = 1 + operand.length;
925 break; 893 break;
926 } 894 }
927 case kExprI32Const: { 895 case kExprI32Const: {
928 ImmI32Operand operand(this, pc_); 896 ImmI32Operand operand(this, pc_);
(...skipping 29 matching lines...) Expand all
958 } 926 }
959 } 927 }
960 len = 1 + operand.length; 928 len = 1 + operand.length;
961 break; 929 break;
962 } 930 }
963 case kExprSetLocal: { 931 case kExprSetLocal: {
964 LocalIndexOperand operand(this, pc_); 932 LocalIndexOperand operand(this, pc_);
965 if (Validate(pc_, operand)) { 933 if (Validate(pc_, operand)) {
966 Value val = Pop(0, local_type_vec_[operand.index]); 934 Value val = Pop(0, local_type_vec_[operand.index]);
967 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node; 935 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
936 }
937 len = 1 + operand.length;
938 break;
939 }
940 case kExprTeeLocal: {
941 LocalIndexOperand operand(this, pc_);
942 if (Validate(pc_, operand)) {
943 Value val = Pop(0, local_type_vec_[operand.index]);
944 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
968 Push(val.type, val.node); 945 Push(val.type, val.node);
969 } 946 }
970 len = 1 + operand.length; 947 len = 1 + operand.length;
971 break; 948 break;
972 } 949 }
950 case kExprDrop: {
951 Pop();
952 break;
953 }
973 case kExprGetGlobal: { 954 case kExprGetGlobal: {
974 GlobalIndexOperand operand(this, pc_); 955 GlobalIndexOperand operand(this, pc_);
975 if (Validate(pc_, operand)) { 956 if (Validate(pc_, operand)) {
976 Push(operand.type, BUILD(GetGlobal, operand.index)); 957 Push(operand.type, BUILD(GetGlobal, operand.index));
977 } 958 }
978 len = 1 + operand.length; 959 len = 1 + operand.length;
979 break; 960 break;
980 } 961 }
981 case kExprSetGlobal: { 962 case kExprSetGlobal: {
982 GlobalIndexOperand operand(this, pc_); 963 GlobalIndexOperand operand(this, pc_);
983 if (Validate(pc_, operand)) { 964 if (Validate(pc_, operand)) {
984 Value val = Pop(0, operand.type); 965 if (operand.global->mutability) {
985 BUILD(SetGlobal, operand.index, val.node); 966 Value val = Pop(0, operand.type);
986 Push(val.type, val.node); 967 BUILD(SetGlobal, operand.index, val.node);
968 } else {
969 error(pc_, pc_ + 1, "immutable global #%u cannot be assigned",
970 operand.index);
971 }
987 } 972 }
988 len = 1 + operand.length; 973 len = 1 + operand.length;
989 break; 974 break;
990 } 975 }
991 case kExprI32LoadMem8S: 976 case kExprI32LoadMem8S:
992 len = DecodeLoadMem(kAstI32, MachineType::Int8()); 977 len = DecodeLoadMem(kAstI32, MachineType::Int8());
993 break; 978 break;
994 case kExprI32LoadMem8U: 979 case kExprI32LoadMem8U:
995 len = DecodeLoadMem(kAstI32, MachineType::Uint8()); 980 len = DecodeLoadMem(kAstI32, MachineType::Uint8());
996 break; 981 break;
997 case kExprI32LoadMem16S: 982 case kExprI32LoadMem16S:
998 len = DecodeLoadMem(kAstI32, MachineType::Int16()); 983 len = DecodeLoadMem(kAstI32, MachineType::Int16());
999 break; 984 break;
1000 case kExprI32LoadMem16U: 985 case kExprI32LoadMem16U:
1001 len = DecodeLoadMem(kAstI32, MachineType::Uint16()); 986 len = DecodeLoadMem(kAstI32, MachineType::Uint16());
1002 break; 987 break;
1003 case kExprI32LoadMem: 988 case kExprI32LoadMem:
1004 len = DecodeLoadMem(kAstI32, MachineType::Int32()); 989 len = DecodeLoadMem(kAstI32, MachineType::Int32());
1005 break; 990 break;
1006
1007 case kExprI64LoadMem8S: 991 case kExprI64LoadMem8S:
1008 len = DecodeLoadMem(kAstI64, MachineType::Int8()); 992 len = DecodeLoadMem(kAstI64, MachineType::Int8());
1009 break; 993 break;
1010 case kExprI64LoadMem8U: 994 case kExprI64LoadMem8U:
1011 len = DecodeLoadMem(kAstI64, MachineType::Uint8()); 995 len = DecodeLoadMem(kAstI64, MachineType::Uint8());
1012 break; 996 break;
1013 case kExprI64LoadMem16S: 997 case kExprI64LoadMem16S:
1014 len = DecodeLoadMem(kAstI64, MachineType::Int16()); 998 len = DecodeLoadMem(kAstI64, MachineType::Int16());
1015 break; 999 break;
1016 case kExprI64LoadMem16U: 1000 case kExprI64LoadMem16U:
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1060 break; 1044 break;
1061 case kExprGrowMemory: 1045 case kExprGrowMemory:
1062 if (module_->origin != kAsmJsOrigin) { 1046 if (module_->origin != kAsmJsOrigin) {
1063 Value val = Pop(0, kAstI32); 1047 Value val = Pop(0, kAstI32);
1064 Push(kAstI32, BUILD(GrowMemory, val.node)); 1048 Push(kAstI32, BUILD(GrowMemory, val.node));
1065 } else { 1049 } else {
1066 error("grow_memory is not supported for asmjs modules"); 1050 error("grow_memory is not supported for asmjs modules");
1067 } 1051 }
1068 break; 1052 break;
1069 case kExprMemorySize: 1053 case kExprMemorySize:
1070 Push(kAstI32, BUILD(MemSize, 0)); 1054 Push(kAstI32, BUILD(CurrentMemoryPages));
1071 break; 1055 break;
1072 case kExprCallFunction: { 1056 case kExprCallFunction: {
1073 CallFunctionOperand operand(this, pc_); 1057 CallFunctionOperand operand(this, pc_);
1074 if (Validate(pc_, operand)) { 1058 if (Validate(pc_, operand)) {
1075 TFNode** buffer = PopArgs(operand.sig); 1059 TFNode** buffer = PopArgs(operand.sig);
1076 TFNode* call = 1060 TFNode** rets =
1077 BUILD(CallDirect, operand.index, buffer, position()); 1061 BUILD(CallDirect, operand.index, buffer, position());
1078 Push(GetReturnType(operand.sig), call); 1062 PushReturns(operand.sig, rets);
1079 } 1063 }
1080 len = 1 + operand.length; 1064 len = 1 + operand.length;
1081 break; 1065 break;
1082 } 1066 }
1083 case kExprCallIndirect: { 1067 case kExprCallIndirect: {
1084 CallIndirectOperand operand(this, pc_); 1068 CallIndirectOperand operand(this, pc_);
1085 if (Validate(pc_, operand)) { 1069 if (Validate(pc_, operand)) {
1070 Value index = Pop(0, kAstI32);
1086 TFNode** buffer = PopArgs(operand.sig); 1071 TFNode** buffer = PopArgs(operand.sig);
1087 Value index = Pop(0, kAstI32);
1088 if (buffer) buffer[0] = index.node; 1072 if (buffer) buffer[0] = index.node;
1089 TFNode* call = 1073 TFNode** rets =
1090 BUILD(CallIndirect, operand.index, buffer, position()); 1074 BUILD(CallIndirect, operand.index, buffer, position());
1091 Push(GetReturnType(operand.sig), call); 1075 PushReturns(operand.sig, rets);
1092 } 1076 }
1093 len = 1 + operand.length; 1077 len = 1 + operand.length;
1094 break; 1078 break;
1095 }
1096 case kExprCallImport: {
1097 CallImportOperand operand(this, pc_);
1098 if (Validate(pc_, operand)) {
1099 TFNode** buffer = PopArgs(operand.sig);
1100 TFNode* call =
1101 BUILD(CallImport, operand.index, buffer, position());
1102 Push(GetReturnType(operand.sig), call);
1103 }
1104 len = 1 + operand.length;
1105 break;
1106 } 1079 }
1107 case kSimdPrefix: { 1080 case kSimdPrefix: {
1108 CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype); 1081 CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype);
1109 len++; 1082 len++;
1110 byte simd_index = *(pc_ + 1); 1083 byte simd_index = *(pc_ + 1);
1111 opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index); 1084 opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index);
1112 TRACE(" @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix, 1085 TRACE(" @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix,
1113 simd_index, WasmOpcodes::ShortOpcodeName(opcode)); 1086 simd_index, WasmOpcodes::ShortOpcodeName(opcode));
1114 len += DecodeSimdOpcode(opcode); 1087 len += DecodeSimdOpcode(opcode);
1115 break; 1088 break;
1116 } 1089 }
1117 default: 1090 default: {
1118 // Deal with special asmjs opcodes. 1091 // Deal with special asmjs opcodes.
1119 if (module_->origin == kAsmJsOrigin) { 1092 if (module_ && module_->origin == kAsmJsOrigin) {
1120 sig = WasmOpcodes::AsmjsSignature(opcode); 1093 sig = WasmOpcodes::AsmjsSignature(opcode);
1121 if (sig) { 1094 if (sig) {
1122 BuildSimpleOperator(opcode, sig); 1095 BuildSimpleOperator(opcode, sig);
1123 } 1096 }
1124 } else { 1097 } else {
1125 error("Invalid opcode"); 1098 error("Invalid opcode");
1126 return; 1099 return;
1127 } 1100 }
1101 }
1128 } 1102 }
1129 } // end complex bytecode 1103 }
1130 1104
1131 #if DEBUG 1105 #if DEBUG
1132 if (FLAG_trace_wasm_decoder) { 1106 if (FLAG_trace_wasm_decoder) {
1133 for (size_t i = 0; i < stack_.size(); ++i) { 1107 for (size_t i = 0; i < stack_.size(); ++i) {
1134 Value& val = stack_[i]; 1108 Value& val = stack_[i];
1135 WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc); 1109 WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc);
1136 if (WasmOpcodes::IsPrefixOpcode(opcode)) { 1110 if (WasmOpcodes::IsPrefixOpcode(opcode)) {
1137 opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1)); 1111 opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1));
1138 } 1112 }
1139 PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type), 1113 PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type),
1140 static_cast<int>(val.pc - start_), 1114 static_cast<int>(val.pc - start_),
1141 WasmOpcodes::ShortOpcodeName(opcode)); 1115 WasmOpcodes::ShortOpcodeName(opcode));
1142 switch (opcode) { 1116 switch (opcode) {
1143 case kExprI32Const: { 1117 case kExprI32Const: {
1144 ImmI32Operand operand(this, val.pc); 1118 ImmI32Operand operand(this, val.pc);
1145 PrintF("[%d]", operand.value); 1119 PrintF("[%d]", operand.value);
1146 break; 1120 break;
1147 } 1121 }
1148 case kExprGetLocal: { 1122 case kExprGetLocal: {
1149 LocalIndexOperand operand(this, val.pc); 1123 LocalIndexOperand operand(this, val.pc);
1150 PrintF("[%u]", operand.index); 1124 PrintF("[%u]", operand.index);
1151 break; 1125 break;
1152 } 1126 }
1153 case kExprSetLocal: { 1127 case kExprSetLocal: // fallthru
1128 case kExprTeeLocal: {
1154 LocalIndexOperand operand(this, val.pc); 1129 LocalIndexOperand operand(this, val.pc);
1155 PrintF("[%u]", operand.index); 1130 PrintF("[%u]", operand.index);
1156 break; 1131 break;
1157 } 1132 }
1158 default: 1133 default:
1159 break; 1134 break;
1160 } 1135 }
1161 } 1136 }
1162 PrintF("\n"); 1137 PrintF("\n");
1163 } 1138 }
1164 #endif 1139 #endif
1165 pc_ += len; 1140 pc_ += len;
1166 if (pc_ >= limit_) { 1141 if (pc_ >= limit_) {
1167 // End of code reached or exceeded. 1142 // End of code reached or exceeded.
1168 if (pc_ > limit_ && ok()) error("Beyond end of code"); 1143 if (pc_ > limit_ && ok()) error("Beyond end of code");
1169 return; 1144 return;
1170 } 1145 }
1171 } // end decode loop 1146 } // end decode loop
1172 } // end DecodeFunctionBody() 1147 }
1148
1149 void EndControl() { ssa_env_->Kill(SsaEnv::kControlEnd); }
1150
1151 void SetBlockType(Control* c, BlockTypeOperand& operand) {
1152 c->merge.arity = operand.arity;
1153 if (c->merge.arity == 1) {
1154 c->merge.vals.first = {pc_, nullptr, operand.read_entry(0)};
1155 } else if (c->merge.arity > 1) {
1156 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
1157 for (unsigned i = 0; i < c->merge.arity; i++) {
1158 c->merge.vals.array[i] = {pc_, nullptr, operand.read_entry(i)};
1159 }
1160 }
1161 }
1173 1162
1174 TFNode** PopArgs(FunctionSig* sig) { 1163 TFNode** PopArgs(FunctionSig* sig) {
1175 if (build()) { 1164 if (build()) {
1176 int count = static_cast<int>(sig->parameter_count()); 1165 int count = static_cast<int>(sig->parameter_count());
1177 TFNode** buffer = builder_->Buffer(count + 1); 1166 TFNode** buffer = builder_->Buffer(count + 1);
1178 buffer[0] = nullptr; // reserved for code object or function index. 1167 buffer[0] = nullptr; // reserved for code object or function index.
1179 for (int i = count - 1; i >= 0; i--) { 1168 for (int i = count - 1; i >= 0; i--) {
1180 buffer[i + 1] = Pop(i, sig->GetParam(i)).node; 1169 buffer[i + 1] = Pop(i, sig->GetParam(i)).node;
1181 } 1170 }
1182 return buffer; 1171 return buffer;
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1226 return 1 + operand.length; 1215 return 1 + operand.length;
1227 } 1216 }
1228 1217
1229 int DecodeStoreMem(LocalType type, MachineType mem_type) { 1218 int DecodeStoreMem(LocalType type, MachineType mem_type) {
1230 MemoryAccessOperand operand(this, pc_, 1219 MemoryAccessOperand operand(this, pc_,
1231 ElementSizeLog2Of(mem_type.representation())); 1220 ElementSizeLog2Of(mem_type.representation()));
1232 Value val = Pop(1, type); 1221 Value val = Pop(1, type);
1233 Value index = Pop(0, kAstI32); 1222 Value index = Pop(0, kAstI32);
1234 BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment, 1223 BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment,
1235 val.node, position()); 1224 val.node, position());
1236 Push(type, val.node);
1237 return 1 + operand.length; 1225 return 1 + operand.length;
1238 } 1226 }
1239 1227
1240 unsigned DecodeSimdOpcode(WasmOpcode opcode) { 1228 unsigned DecodeSimdOpcode(WasmOpcode opcode) {
1241 unsigned len = 0; 1229 unsigned len = 0;
1242 switch (opcode) { 1230 switch (opcode) {
1243 case kExprI32x4ExtractLane: { 1231 case kExprI32x4ExtractLane: {
1244 uint8_t lane = this->checked_read_u8(pc_, 2, "lane number"); 1232 uint8_t lane = this->checked_read_u8(pc_, 2, "lane number");
1245 if (lane < 0 || lane > 3) { 1233 if (lane < 0 || lane > 3) {
1246 error(pc_, pc_ + 2, "invalid extract lane value"); 1234 error(pc_, pc_ + 2, "invalid extract lane value");
1247 } 1235 }
1248 TFNode* input = Pop(0, LocalType::kSimd128).node; 1236 TFNode* input = Pop(0, LocalType::kSimd128).node;
1249 TFNode* node = BUILD(SimdExtractLane, opcode, lane, input); 1237 TFNode* node = BUILD(SimdExtractLane, opcode, lane, input);
1250 Push(LocalType::kWord32, node); 1238 Push(LocalType::kWord32, node);
1251 len++; 1239 len++;
1252 break; 1240 break;
1253 } 1241 }
1254 default: { 1242 default: {
1255 FunctionSig* sig = WasmOpcodes::Signature(opcode); 1243 FunctionSig* sig = WasmOpcodes::Signature(opcode);
1256 if (sig != nullptr) { 1244 if (sig != nullptr) {
1257 compiler::NodeVector inputs(sig->parameter_count(), zone_); 1245 compiler::NodeVector inputs(sig->parameter_count(), zone_);
1258 for (size_t i = sig->parameter_count(); i > 0; i--) { 1246 for (size_t i = sig->parameter_count(); i > 0; i--) {
1259 Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1)); 1247 Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1));
1260 inputs[i - 1] = val.node; 1248 inputs[i - 1] = val.node;
1261 } 1249 }
1262 TFNode* node = BUILD(SimdOp, opcode, inputs); 1250 TFNode* node = BUILD(SimdOp, opcode, inputs);
1263 Push(GetReturnType(sig), node); 1251 Push(GetReturnType(sig), node);
1264 } else { 1252 } else {
1265 error(pc_, pc_, "invalid simd opcode"); 1253 error("invalid simd opcode");
1266 } 1254 }
1267 } 1255 }
1268 } 1256 }
1269 return len; 1257 return len;
1270 } 1258 }
1271 1259
1272 void DoReturn() { 1260 void DoReturn() {
1273 int count = static_cast<int>(sig_->return_count()); 1261 int count = static_cast<int>(sig_->return_count());
1274 TFNode** buffer = nullptr; 1262 TFNode** buffer = nullptr;
1275 if (build()) buffer = builder_->Buffer(count); 1263 if (build()) buffer = builder_->Buffer(count);
1276 1264
1277 // Pop return values off the stack in reverse order. 1265 // Pop return values off the stack in reverse order.
1278 for (int i = count - 1; i >= 0; i--) { 1266 for (int i = count - 1; i >= 0; i--) {
1279 Value val = Pop(i, sig_->GetReturn(i)); 1267 Value val = Pop(i, sig_->GetReturn(i));
1280 if (buffer) buffer[i] = val.node; 1268 if (buffer) buffer[i] = val.node;
1281 } 1269 }
1282 1270
1283 Push(kAstEnd, BUILD(Return, count, buffer)); 1271 BUILD(Return, count, buffer);
1284 ssa_env_->Kill(SsaEnv::kControlEnd); 1272 EndControl();
1285 } 1273 }
1286 1274
1287 void Push(LocalType type, TFNode* node) { 1275 void Push(LocalType type, TFNode* node) {
1288 stack_.push_back({pc_, node, type}); 1276 if (type != kAstStmt && type != kAstEnd) {
1277 stack_.push_back({pc_, node, type});
1278 }
1279 }
1280
1281 void PushReturns(FunctionSig* sig, TFNode** rets) {
1282 for (size_t i = 0; i < sig->return_count(); i++) {
1283 // When verifying only, then {rets} will be null, so push null.
1284 Push(sig->GetReturn(i), rets ? rets[i] : nullptr);
1285 }
1289 } 1286 }
1290 1287
1291 const char* SafeOpcodeNameAt(const byte* pc) { 1288 const char* SafeOpcodeNameAt(const byte* pc) {
1292 if (pc >= end_) return "<end>"; 1289 if (pc >= end_) return "<end>";
1293 return WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc)); 1290 return WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc));
1294 } 1291 }
1295 1292
1296 Value Pop(int index, LocalType expected) { 1293 Value Pop(int index, LocalType expected) {
1294 if (!ssa_env_->go()) {
1295 // Unreachable code is essentially not typechecked.
1296 return {pc_, nullptr, expected};
1297 }
1297 Value val = Pop(); 1298 Value val = Pop();
1298 if (val.type != expected) { 1299 if (val.type != expected) {
1299 if (val.type != kAstEnd) { 1300 if (val.type != kAstEnd) {
1300 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s", 1301 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s",
1301 SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected), 1302 SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected),
1302 SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type)); 1303 SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type));
1303 } 1304 }
1304 } 1305 }
1305 return val; 1306 return val;
1306 } 1307 }
1307 1308
1308 Value Pop() { 1309 Value Pop() {
1310 if (!ssa_env_->go()) {
1311 // Unreachable code is essentially not typechecked.
1312 return {pc_, nullptr, kAstEnd};
1313 }
1309 size_t limit = control_.empty() ? 0 : control_.back().stack_depth; 1314 size_t limit = control_.empty() ? 0 : control_.back().stack_depth;
1310 if (stack_.size() <= limit) { 1315 if (stack_.size() <= limit) {
1311 Value val = {pc_, nullptr, kAstStmt}; 1316 Value val = {pc_, nullptr, kAstStmt};
1312 error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_)); 1317 error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_));
1313 return val; 1318 return val;
1314 } 1319 }
1315 Value val = stack_.back(); 1320 Value val = stack_.back();
1316 stack_.pop_back(); 1321 stack_.pop_back();
1317 return val; 1322 return val;
1318 } 1323 }
1319 1324
1320 Value PopUpTo(int stack_depth) { 1325 Value PopUpTo(int stack_depth) {
1326 if (!ssa_env_->go()) {
1327 // Unreachable code is essentially not typechecked.
1328 return {pc_, nullptr, kAstEnd};
1329 }
1321 if (stack_depth == stack_.size()) { 1330 if (stack_depth == stack_.size()) {
1322 Value val = {pc_, nullptr, kAstStmt}; 1331 Value val = {pc_, nullptr, kAstStmt};
1323 return val; 1332 return val;
1324 } else { 1333 } else {
1325 DCHECK_LE(stack_depth, static_cast<int>(stack_.size())); 1334 DCHECK_LE(stack_depth, static_cast<int>(stack_.size()));
1326 Value val = Pop(); 1335 Value val = Pop();
1327 stack_.resize(stack_depth); 1336 stack_.resize(stack_depth);
1328 return val; 1337 return val;
1329 } 1338 }
1330 } 1339 }
1331 1340
1332 int baserel(const byte* ptr) { 1341 int baserel(const byte* ptr) {
1333 return base_ ? static_cast<int>(ptr - base_) : 0; 1342 return base_ ? static_cast<int>(ptr - base_) : 0;
1334 } 1343 }
1335 1344
1336 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); } 1345 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); }
1337 1346
1338 void BreakTo(Control* block, const Value& val) { 1347 void BreakTo(unsigned depth) {
1339 if (block->is_loop) { 1348 if (!ssa_env_->go()) return;
1349 Control* c = &control_[control_.size() - depth - 1];
1350 if (c->is_loop()) {
1340 // This is the inner loop block, which does not have a value. 1351 // This is the inner loop block, which does not have a value.
1341 Goto(ssa_env_, block->end_env); 1352 Goto(ssa_env_, c->end_env);
1342 } else { 1353 } else {
1343 // Merge the value into the production for the block. 1354 // Merge the value(s) into the end of the block.
1344 MergeInto(block->end_env, &block->node, &block->type, val); 1355 if (static_cast<size_t>(c->stack_depth + c->merge.arity) >
1356 stack_.size()) {
1357 error(
1358 pc_, pc_,
1359 "expected at least %d values on the stack for br to @%d, found %d",
1360 c->merge.arity, startrel(c->pc),
1361 static_cast<int>(stack_.size() - c->stack_depth));
1362 return;
1363 }
1364 MergeValuesInto(c);
1345 } 1365 }
1346 } 1366 }
1347 1367
1348 void MergeInto(SsaEnv* target, TFNode** node, LocalType* type, 1368 void FallThruTo(Control* c) {
1349 const Value& val) {
1350 if (!ssa_env_->go()) return; 1369 if (!ssa_env_->go()) return;
1351 DCHECK_NE(kAstEnd, val.type); 1370 // Merge the value(s) into the end of the block.
1371 int arity = static_cast<int>(c->merge.arity);
1372 if (c->stack_depth + arity != stack_.size()) {
1373 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d",
1374 arity, startrel(c->pc));
1375 return;
1376 }
1377 MergeValuesInto(c);
1378 }
1352 1379
1380 inline Value& GetMergeValueFromStack(Control* c, int i) {
1381 return stack_[stack_.size() - c->merge.arity + i];
1382 }
1383
1384 void TypeCheckLoopFallThru(Control* c) {
1385 if (!ssa_env_->go()) return;
1386 // Fallthru must match arity exactly.
1387 int arity = static_cast<int>(c->merge.arity);
1388 if (c->stack_depth + arity != stack_.size()) {
1389 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d",
1390 arity, startrel(c->pc));
1391 return;
1392 }
1393 // Typecheck the values left on the stack.
1394 for (unsigned i = 0; i < c->merge.arity; i++) {
1395 Value& val = GetMergeValueFromStack(c, i);
1396 Value& old =
1397 c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i];
1398 if (val.type != old.type) {
1399 error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i,
1400 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
1401 return;
1402 }
1403 }
1404 }
1405
1406 void MergeValuesInto(Control* c) {
1407 SsaEnv* target = c->end_env;
1353 bool first = target->state == SsaEnv::kUnreachable; 1408 bool first = target->state == SsaEnv::kUnreachable;
1354 Goto(ssa_env_, target); 1409 Goto(ssa_env_, target);
1355 1410
1356 if (first) { 1411 for (unsigned i = 0; i < c->merge.arity; i++) {
1357 // first merge to this environment; set the type and the node. 1412 Value& val = GetMergeValueFromStack(c, i);
1358 *type = val.type; 1413 Value& old =
1359 *node = val.node; 1414 c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i];
1360 } else if (val.type == *type && val.type != kAstStmt) { 1415 if (val.type != old.type) {
1361 // merge with the existing value for this block. 1416 error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i,
1362 *node = CreateOrMergeIntoPhi(*type, target->control, *node, val.node); 1417 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
1363 } else { 1418 return;
1364 // types don't match, or block is already a stmt. 1419 }
1365 *type = kAstStmt; 1420 old.node =
1366 *node = nullptr; 1421 first ? val.node : CreateOrMergeIntoPhi(old.type, target->control,
1422 old.node, val.node);
1367 } 1423 }
1368 } 1424 }
1369 1425
1370 void SetEnv(const char* reason, SsaEnv* env) { 1426 void SetEnv(const char* reason, SsaEnv* env) {
1371 #if DEBUG 1427 #if DEBUG
1372 if (FLAG_trace_wasm_decoder) { 1428 if (FLAG_trace_wasm_decoder) {
1373 char state = 'X'; 1429 char state = 'X';
1374 if (env) { 1430 if (env) {
1375 switch (env->state) { 1431 switch (env->state) {
1376 case SsaEnv::kReached: 1432 case SsaEnv::kReached:
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after
1585 int depth = 0; 1641 int depth = 0;
1586 // Iteratively process all AST nodes nested inside the loop. 1642 // Iteratively process all AST nodes nested inside the loop.
1587 while (pc < limit_ && ok()) { 1643 while (pc < limit_ && ok()) {
1588 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); 1644 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1589 unsigned length = 1; 1645 unsigned length = 1;
1590 switch (opcode) { 1646 switch (opcode) {
1591 case kExprLoop: 1647 case kExprLoop:
1592 case kExprIf: 1648 case kExprIf:
1593 case kExprBlock: 1649 case kExprBlock:
1594 case kExprTry: 1650 case kExprTry:
1651 length = OpcodeLength(pc);
1595 depth++; 1652 depth++;
1596 DCHECK_EQ(1, OpcodeLength(pc));
1597 break; 1653 break;
1598 case kExprSetLocal: { 1654 case kExprSetLocal: // fallthru
1655 case kExprTeeLocal: {
1599 LocalIndexOperand operand(this, pc); 1656 LocalIndexOperand operand(this, pc);
1600 if (assigned->length() > 0 && 1657 if (assigned->length() > 0 &&
1601 operand.index < static_cast<uint32_t>(assigned->length())) { 1658 operand.index < static_cast<uint32_t>(assigned->length())) {
1602 // Unverified code might have an out-of-bounds index. 1659 // Unverified code might have an out-of-bounds index.
1603 assigned->Add(operand.index); 1660 assigned->Add(operand.index);
1604 } 1661 }
1605 length = 1 + operand.length; 1662 length = 1 + operand.length;
1606 break; 1663 break;
1607 } 1664 }
1608 case kExprEnd: 1665 case kExprEnd:
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1681 WasmFullDecoder decoder(&zone, builder, body); 1738 WasmFullDecoder decoder(&zone, builder, body);
1682 decoder.Decode(); 1739 decoder.Decode();
1683 return decoder.toResult<DecodeStruct*>(nullptr); 1740 return decoder.toResult<DecodeStruct*>(nullptr);
1684 } 1741 }
1685 1742
1686 unsigned OpcodeLength(const byte* pc, const byte* end) { 1743 unsigned OpcodeLength(const byte* pc, const byte* end) {
1687 WasmDecoder decoder(nullptr, nullptr, pc, end); 1744 WasmDecoder decoder(nullptr, nullptr, pc, end);
1688 return decoder.OpcodeLength(pc); 1745 return decoder.OpcodeLength(pc);
1689 } 1746 }
1690 1747
1691 unsigned OpcodeArity(const byte* pc, const byte* end) {
1692 WasmDecoder decoder(nullptr, nullptr, pc, end);
1693 return decoder.OpcodeArity(pc);
1694 }
1695
1696 void PrintAstForDebugging(const byte* start, const byte* end) { 1748 void PrintAstForDebugging(const byte* start, const byte* end) {
1697 AccountingAllocator allocator; 1749 AccountingAllocator allocator;
1698 OFStream os(stdout); 1750 OFStream os(stdout);
1699 PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr); 1751 PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr);
1700 } 1752 }
1701 1753
1702 bool PrintAst(AccountingAllocator* allocator, const FunctionBody& body, 1754 bool PrintAst(AccountingAllocator* allocator, const FunctionBody& body,
1703 std::ostream& os, 1755 std::ostream& os,
1704 std::vector<std::tuple<uint32_t, int, int>>* offset_table) { 1756 std::vector<std::tuple<uint32_t, int, int>>* offset_table) {
1705 Zone zone(allocator); 1757 Zone zone(allocator);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
1751 const char* padding = 1803 const char* padding =
1752 " "; 1804 " ";
1753 os.write(padding, num_whitespaces); 1805 os.write(padding, num_whitespaces);
1754 os << "k" << WasmOpcodes::OpcodeName(opcode) << ","; 1806 os << "k" << WasmOpcodes::OpcodeName(opcode) << ",";
1755 1807
1756 for (size_t j = 1; j < length; ++j) { 1808 for (size_t j = 1; j < length; ++j) {
1757 os << " " << AsHex(i.pc()[j], 2) << ","; 1809 os << " " << AsHex(i.pc()[j], 2) << ",";
1758 } 1810 }
1759 1811
1760 switch (opcode) { 1812 switch (opcode) {
1761 case kExprIf:
1762 case kExprElse: 1813 case kExprElse:
1763 case kExprLoop:
1764 case kExprBlock:
1765 case kExprTry:
1766 os << " // @" << i.pc_offset(); 1814 os << " // @" << i.pc_offset();
1767 control_depth++; 1815 control_depth++;
1768 break; 1816 break;
1817 case kExprLoop:
1818 case kExprIf:
1819 case kExprBlock:
1820 case kExprTry: {
1821 BlockTypeOperand operand(&i, i.pc());
1822 os << " // @" << i.pc_offset();
1823 for (unsigned i = 0; i < operand.arity; i++) {
1824 os << " " << WasmOpcodes::TypeName(operand.read_entry(i));
1825 }
1826 control_depth++;
1827 break;
1828 }
1769 case kExprEnd: 1829 case kExprEnd:
1770 os << " // @" << i.pc_offset(); 1830 os << " // @" << i.pc_offset();
1771 control_depth--; 1831 control_depth--;
1772 break; 1832 break;
1773 case kExprBr: { 1833 case kExprBr: {
1774 BreakDepthOperand operand(&i, i.pc()); 1834 BreakDepthOperand operand(&i, i.pc());
1775 os << " // arity=" << operand.arity << " depth=" << operand.depth; 1835 os << " // depth=" << operand.depth;
1776 break; 1836 break;
1777 } 1837 }
1778 case kExprBrIf: { 1838 case kExprBrIf: {
1779 BreakDepthOperand operand(&i, i.pc()); 1839 BreakDepthOperand operand(&i, i.pc());
1780 os << " // arity=" << operand.arity << " depth" << operand.depth; 1840 os << " // depth=" << operand.depth;
1781 break; 1841 break;
1782 } 1842 }
1783 case kExprBrTable: { 1843 case kExprBrTable: {
1784 BranchTableOperand operand(&i, i.pc()); 1844 BranchTableOperand operand(&i, i.pc());
1785 os << " // arity=" << operand.arity 1845 os << " // entries=" << operand.table_count;
1786 << " entries=" << operand.table_count;
1787 break; 1846 break;
1788 } 1847 }
1789 case kExprCallIndirect: { 1848 case kExprCallIndirect: {
1790 CallIndirectOperand operand(&i, i.pc()); 1849 CallIndirectOperand operand(&i, i.pc());
1850 os << " // sig #" << operand.index;
1791 if (decoder.Complete(i.pc(), operand)) { 1851 if (decoder.Complete(i.pc(), operand)) {
1792 os << " // sig #" << operand.index << ": " << *operand.sig; 1852 os << ": " << *operand.sig;
1793 } else {
1794 os << " // arity=" << operand.arity << " sig #" << operand.index;
1795 }
1796 break;
1797 }
1798 case kExprCallImport: {
1799 CallImportOperand operand(&i, i.pc());
1800 if (decoder.Complete(i.pc(), operand)) {
1801 os << " // import #" << operand.index << ": " << *operand.sig;
1802 } else {
1803 os << " // arity=" << operand.arity << " import #" << operand.index;
1804 } 1853 }
1805 break; 1854 break;
1806 } 1855 }
1807 case kExprCallFunction: { 1856 case kExprCallFunction: {
1808 CallFunctionOperand operand(&i, i.pc()); 1857 CallFunctionOperand operand(&i, i.pc());
1858 os << " // function #" << operand.index;
1809 if (decoder.Complete(i.pc(), operand)) { 1859 if (decoder.Complete(i.pc(), operand)) {
1810 os << " // function #" << operand.index << ": " << *operand.sig; 1860 os << ": " << *operand.sig;
1811 } else {
1812 os << " // arity=" << operand.arity << " function #" << operand.index;
1813 } 1861 }
1814 break; 1862 break;
1815 } 1863 }
1816 case kExprReturn: {
1817 ReturnArityOperand operand(&i, i.pc());
1818 os << " // arity=" << operand.arity;
1819 break;
1820 }
1821 default: 1864 default:
1822 break; 1865 break;
1823 } 1866 }
1824 os << std::endl; 1867 os << std::endl;
1825 ++line_nr; 1868 ++line_nr;
1826 } 1869 }
1827 1870
1828 return decoder.ok(); 1871 return decoder.ok();
1829 } 1872 }
1830 1873
1831 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, 1874 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals,
1832 const byte* start, const byte* end) { 1875 const byte* start, const byte* end) {
1833 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; 1876 FunctionBody body = {nullptr, nullptr, nullptr, start, end};
1834 WasmFullDecoder decoder(zone, nullptr, body); 1877 WasmFullDecoder decoder(zone, nullptr, body);
1835 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); 1878 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals);
1836 } 1879 }
1837 1880
1838 } // namespace wasm 1881 } // namespace wasm
1839 } // namespace internal 1882 } // namespace internal
1840 } // namespace v8 1883 } // namespace v8
OLDNEW
« no previous file with comments | « src/wasm/ast-decoder.h ('k') | src/wasm/decoder.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698