Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(205)

Side by Side Diff: src/wasm/ast-decoder.cc

Issue 2361053004: Revert of [wasm] Master CL for Binary 0xC changes. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/wasm/ast-decoder.h ('k') | src/wasm/decoder.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/signature.h" 5 #include "src/signature.h"
6 6
7 #include "src/bit-vector.h" 7 #include "src/bit-vector.h"
8 #include "src/flags.h" 8 #include "src/flags.h"
9 #include "src/handles.h" 9 #include "src/handles.h"
10 #include "src/zone/zone-containers.h" 10 #include "src/zone/zone-containers.h"
(...skipping 18 matching lines...) Expand all
29 } while (false) 29 } while (false)
30 #else 30 #else
31 #define TRACE(...) 31 #define TRACE(...)
32 #endif 32 #endif
33 33
34 #define CHECK_PROTOTYPE_OPCODE(flag) \ 34 #define CHECK_PROTOTYPE_OPCODE(flag) \
35 if (!FLAG_##flag) { \ 35 if (!FLAG_##flag) { \
36 error("Invalid opcode (enable with --" #flag ")"); \ 36 error("Invalid opcode (enable with --" #flag ")"); \
37 break; \ 37 break; \
38 } 38 }
39 // TODO(titzer): this is only for intermediate migration.
40 #define IMPLICIT_FUNCTION_END 1
41 39
42 // An SsaEnv environment carries the current local variable renaming 40 // An SsaEnv environment carries the current local variable renaming
43 // as well as the current effect and control dependency in the TF graph. 41 // as well as the current effect and control dependency in the TF graph.
44 // It maintains a control state that tracks whether the environment 42 // It maintains a control state that tracks whether the environment
45 // is reachable, has reached a control end, or has been merged. 43 // is reachable, has reached a control end, or has been merged.
46 struct SsaEnv { 44 struct SsaEnv {
47 enum State { kControlEnd, kUnreachable, kReached, kMerged }; 45 enum State { kControlEnd, kUnreachable, kReached, kMerged };
48 46
49 State state; 47 State state;
50 TFNode* control; 48 TFNode* control;
(...skipping 14 matching lines...) Expand all
65 63
66 // An entry on the value stack. 64 // An entry on the value stack.
67 struct Value { 65 struct Value {
68 const byte* pc; 66 const byte* pc;
69 TFNode* node; 67 TFNode* node;
70 LocalType type; 68 LocalType type;
71 }; 69 };
72 70
73 struct Control; 71 struct Control;
74 72
75 struct MergeValues { 73 // An entry on the control stack (i.e. if, block, loop, try).
76 uint32_t arity;
77 union {
78 Value* array;
79 Value first;
80 } vals; // Either multiple values or a single value.
81
82 Value& first() {
83 DCHECK_GT(arity, 0u);
84 return arity == 1 ? vals.first : vals.array[0];
85 }
86 };
87
88 // IncomingBranch is used by exception handling code for managing finally's.
89 struct IncomingBranch {
90 int32_t token_value;
91 Control* target;
92 MergeValues merge;
93 };
94
95 static Value* NO_VALUE = nullptr;
96
97 enum ControlKind { kControlIf, kControlBlock, kControlLoop, kControlTry };
98
99 // An entry on the control stack (i.e. if, block, loop).
100 struct Control { 74 struct Control {
101 const byte* pc; 75 const byte* pc;
102 ControlKind kind;
103 int stack_depth; // stack height at the beginning of the construct. 76 int stack_depth; // stack height at the beginning of the construct.
104 SsaEnv* end_env; // end environment for the construct. 77 SsaEnv* end_env; // end environment for the construct.
105 SsaEnv* false_env; // false environment (only for if). 78 SsaEnv* false_env; // false environment (only for if).
106 SsaEnv* catch_env; // catch environment (only for try). 79 SsaEnv* catch_env; // catch environment (only for try).
80 TFNode* node; // result node for the construct.
81 LocalType type; // result type for the construct.
82 bool is_loop; // true if this is the inner label of a loop.
107 83
108 // Values merged into the end of this control construct. 84 bool is_if() const { return *pc == kExprIf; }
109 MergeValues merge;
110 85
111 inline bool is_if() const { return kind == kControlIf; } 86 bool is_try() const { return *pc == kExprTry; }
112 inline bool is_block() const { return kind == kControlBlock; }
113 inline bool is_loop() const { return kind == kControlLoop; }
114 inline bool is_try() const { return kind == kControlTry; }
115 87
116 // Named constructors. 88 // Named constructors.
117 static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env) { 89 static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env) {
118 return {pc, kControlBlock, stack_depth, end_env, 90 return {pc, stack_depth, end_env, nullptr,
119 nullptr, nullptr, {0, {NO_VALUE}}}; 91 nullptr, nullptr, kAstEnd, false};
120 } 92 }
121 93
122 static Control If(const byte* pc, int stack_depth, SsaEnv* end_env, 94 static Control If(const byte* pc, int stack_depth, SsaEnv* end_env,
123 SsaEnv* false_env) { 95 SsaEnv* false_env) {
124 return {pc, kControlIf, stack_depth, end_env, 96 return {pc, stack_depth, end_env, false_env,
125 false_env, nullptr, {0, {NO_VALUE}}}; 97 nullptr, nullptr, kAstStmt, false};
126 } 98 }
127 99
128 static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env) { 100 static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env) {
129 return {pc, kControlLoop, stack_depth, end_env, 101 return {pc, stack_depth, end_env, nullptr, nullptr, nullptr, kAstEnd, true};
130 nullptr, nullptr, {0, {NO_VALUE}}};
131 } 102 }
132 103
133 static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env, 104 static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env,
134 SsaEnv* catch_env) { 105 SsaEnv* catch_env) {
135 return {pc, kControlTry, stack_depth, end_env, 106 return {pc, stack_depth, end_env, nullptr,
136 nullptr, catch_env, {0, {NO_VALUE}}}; 107 catch_env, nullptr, kAstEnd, false};
137 } 108 }
138 }; 109 };
139 110
140 // Macros that build nodes only if there is a graph and the current SSA 111 // Macros that build nodes only if there is a graph and the current SSA
141 // environment is reachable from start. This avoids problems with malformed 112 // environment is reachable from start. This avoids problems with malformed
142 // TF graphs when decoding inputs that have unreachable code. 113 // TF graphs when decoding inputs that have unreachable code.
143 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) 114 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr)
144 #define BUILD0(func) (build() ? builder_->func() : nullptr) 115 #define BUILD0(func) (build() ? builder_->func() : nullptr)
145 116
146 // Generic Wasm bytecode decoder with utilities for decoding operands, 117 // Generic Wasm bytecode decoder with utilities for decoding operands,
(...skipping 14 matching lines...) Expand all
161 132
162 inline bool Validate(const byte* pc, LocalIndexOperand& operand) { 133 inline bool Validate(const byte* pc, LocalIndexOperand& operand) {
163 if (operand.index < total_locals_) { 134 if (operand.index < total_locals_) {
164 if (local_types_) { 135 if (local_types_) {
165 operand.type = local_types_->at(operand.index); 136 operand.type = local_types_->at(operand.index);
166 } else { 137 } else {
167 operand.type = kAstStmt; 138 operand.type = kAstStmt;
168 } 139 }
169 return true; 140 return true;
170 } 141 }
171 error(pc, pc + 1, "invalid local index: %u", operand.index); 142 error(pc, pc + 1, "invalid local index");
172 return false; 143 return false;
173 } 144 }
174 145
175 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) { 146 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) {
176 ModuleEnv* m = module_; 147 ModuleEnv* m = module_;
177 if (m && m->module && operand.index < m->module->globals.size()) { 148 if (m && m->module && operand.index < m->module->globals.size()) {
178 operand.global = &m->module->globals[operand.index]; 149 operand.type = m->module->globals[operand.index].type;
179 operand.type = operand.global->type;
180 return true; 150 return true;
181 } 151 }
182 error(pc, pc + 1, "invalid global index: %u", operand.index); 152 error(pc, pc + 1, "invalid global index");
183 return false; 153 return false;
184 } 154 }
185 155
186 inline bool Complete(const byte* pc, CallFunctionOperand& operand) { 156 inline bool Complete(const byte* pc, CallFunctionOperand& operand) {
187 ModuleEnv* m = module_; 157 ModuleEnv* m = module_;
188 if (m && m->module && operand.index < m->module->functions.size()) { 158 if (m && m->module && operand.index < m->module->functions.size()) {
189 operand.sig = m->module->functions[operand.index].sig; 159 operand.sig = m->module->functions[operand.index].sig;
190 return true; 160 return true;
191 } 161 }
192 return false; 162 return false;
193 } 163 }
194 164
195 inline bool Validate(const byte* pc, CallFunctionOperand& operand) { 165 inline bool Validate(const byte* pc, CallFunctionOperand& operand) {
196 if (Complete(pc, operand)) { 166 if (Complete(pc, operand)) {
167 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
168 if (operand.arity != expected) {
169 error(pc, pc + 1,
170 "arity mismatch in direct function call (expected %u, got %u)",
171 expected, operand.arity);
172 return false;
173 }
197 return true; 174 return true;
198 } 175 }
199 error(pc, pc + 1, "invalid function index: %u", operand.index); 176 error(pc, pc + 1, "invalid function index");
200 return false; 177 return false;
201 } 178 }
202 179
203 inline bool Complete(const byte* pc, CallIndirectOperand& operand) { 180 inline bool Complete(const byte* pc, CallIndirectOperand& operand) {
204 ModuleEnv* m = module_; 181 ModuleEnv* m = module_;
205 if (m && m->module && operand.index < m->module->signatures.size()) { 182 if (m && m->module && operand.index < m->module->signatures.size()) {
206 operand.sig = m->module->signatures[operand.index]; 183 operand.sig = m->module->signatures[operand.index];
207 return true; 184 return true;
208 } 185 }
209 return false; 186 return false;
210 } 187 }
211 188
212 inline bool Validate(const byte* pc, CallIndirectOperand& operand) { 189 inline bool Validate(const byte* pc, CallIndirectOperand& operand) {
213 if (Complete(pc, operand)) { 190 if (Complete(pc, operand)) {
191 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
192 if (operand.arity != expected) {
193 error(pc, pc + 1,
194 "arity mismatch in indirect function call (expected %u, got %u)",
195 expected, operand.arity);
196 return false;
197 }
214 return true; 198 return true;
215 } 199 }
216 error(pc, pc + 1, "invalid signature index: #%u", operand.index); 200 error(pc, pc + 1, "invalid signature index");
201 return false;
202 }
203
204 inline bool Complete(const byte* pc, CallImportOperand& operand) {
205 ModuleEnv* m = module_;
206 if (m && m->module && operand.index < m->module->import_table.size()) {
207 operand.sig = m->module->import_table[operand.index].sig;
208 return true;
209 }
210 return false;
211 }
212
213 inline bool Validate(const byte* pc, CallImportOperand& operand) {
214 if (Complete(pc, operand)) {
215 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count());
216 if (operand.arity != expected) {
217 error(pc, pc + 1, "arity mismatch in import call (expected %u, got %u)",
218 expected, operand.arity);
219 return false;
220 }
221 return true;
222 }
223 error(pc, pc + 1, "invalid signature index");
217 return false; 224 return false;
218 } 225 }
219 226
220 inline bool Validate(const byte* pc, BreakDepthOperand& operand, 227 inline bool Validate(const byte* pc, BreakDepthOperand& operand,
221 ZoneVector<Control>& control) { 228 ZoneVector<Control>& control) {
229 if (operand.arity > 1) {
230 error(pc, pc + 1, "invalid arity for br or br_if");
231 return false;
232 }
222 if (operand.depth < control.size()) { 233 if (operand.depth < control.size()) {
223 operand.target = &control[control.size() - operand.depth - 1]; 234 operand.target = &control[control.size() - operand.depth - 1];
224 return true; 235 return true;
225 } 236 }
226 error(pc, pc + 1, "invalid break depth: %u", operand.depth); 237 error(pc, pc + 1, "invalid break depth");
227 return false; 238 return false;
228 } 239 }
229 240
230 bool Validate(const byte* pc, BranchTableOperand& operand, 241 bool Validate(const byte* pc, BranchTableOperand& operand,
231 size_t block_depth) { 242 size_t block_depth) {
232 // TODO(titzer): add extra redundant validation for br_table here? 243 if (operand.arity > 1) {
244 error(pc, pc + 1, "invalid arity for break");
245 return false;
246 }
247 // Verify table.
248 for (uint32_t i = 0; i < operand.table_count + 1; ++i) {
249 uint32_t target = operand.read_entry(this, i);
250 if (target >= block_depth) {
251 error(operand.table + i * 2, "improper branch in br_table");
252 return false;
253 }
254 }
233 return true; 255 return true;
234 } 256 }
235 257
258 unsigned OpcodeArity(const byte* pc) {
259 #define DECLARE_ARITY(name, ...) \
260 static const LocalType kTypes_##name[] = {__VA_ARGS__}; \
261 static const int kArity_##name = \
262 static_cast<int>(arraysize(kTypes_##name) - 1);
263
264 FOREACH_SIGNATURE(DECLARE_ARITY);
265 #undef DECLARE_ARITY
266
267 switch (static_cast<WasmOpcode>(*pc)) {
268 case kExprI8Const:
269 case kExprI32Const:
270 case kExprI64Const:
271 case kExprF64Const:
272 case kExprF32Const:
273 case kExprGetLocal:
274 case kExprGetGlobal:
275 case kExprNop:
276 case kExprUnreachable:
277 case kExprEnd:
278 case kExprBlock:
279 case kExprThrow:
280 case kExprTry:
281 case kExprLoop:
282 return 0;
283
284 case kExprSetGlobal:
285 case kExprSetLocal:
286 case kExprElse:
287 case kExprCatch:
288 return 1;
289
290 case kExprBr: {
291 BreakDepthOperand operand(this, pc);
292 return operand.arity;
293 }
294 case kExprBrIf: {
295 BreakDepthOperand operand(this, pc);
296 return 1 + operand.arity;
297 }
298 case kExprBrTable: {
299 BranchTableOperand operand(this, pc);
300 return 1 + operand.arity;
301 }
302
303 case kExprIf:
304 return 1;
305 case kExprSelect:
306 return 3;
307
308 case kExprCallFunction: {
309 CallFunctionOperand operand(this, pc);
310 return operand.arity;
311 }
312 case kExprCallIndirect: {
313 CallIndirectOperand operand(this, pc);
314 return 1 + operand.arity;
315 }
316 case kExprCallImport: {
317 CallImportOperand operand(this, pc);
318 return operand.arity;
319 }
320 case kExprReturn: {
321 ReturnArityOperand operand(this, pc);
322 return operand.arity;
323 }
324
325 #define DECLARE_OPCODE_CASE(name, opcode, sig) \
326 case kExpr##name: \
327 return kArity_##sig;
328
329 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
330 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
331 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE)
332 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE)
333 FOREACH_SIMPLE_MEM_OPCODE(DECLARE_OPCODE_CASE)
334 FOREACH_ASMJS_COMPAT_OPCODE(DECLARE_OPCODE_CASE)
335 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
336 #undef DECLARE_OPCODE_CASE
337 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
338 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
339 #undef DECLARE_OPCODE_CASE
340 return 1;
341 default:
342 UNREACHABLE();
343 return 0;
344 }
345 }
346
236 unsigned OpcodeLength(const byte* pc) { 347 unsigned OpcodeLength(const byte* pc) {
237 switch (static_cast<WasmOpcode>(*pc)) { 348 switch (static_cast<WasmOpcode>(*pc)) {
238 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: 349 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
239 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) 350 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
240 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) 351 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
241 #undef DECLARE_OPCODE_CASE 352 #undef DECLARE_OPCODE_CASE
242 { 353 {
243 MemoryAccessOperand operand(this, pc, UINT32_MAX); 354 MemoryAccessOperand operand(this, pc, UINT32_MAX);
244 return 1 + operand.length; 355 return 1 + operand.length;
245 } 356 }
246 case kExprBr: 357 case kExprBr:
247 case kExprBrIf: { 358 case kExprBrIf: {
248 BreakDepthOperand operand(this, pc); 359 BreakDepthOperand operand(this, pc);
249 return 1 + operand.length; 360 return 1 + operand.length;
250 } 361 }
251 case kExprSetGlobal: 362 case kExprSetGlobal:
252 case kExprGetGlobal: { 363 case kExprGetGlobal: {
253 GlobalIndexOperand operand(this, pc); 364 GlobalIndexOperand operand(this, pc);
254 return 1 + operand.length; 365 return 1 + operand.length;
255 } 366 }
256 367
257 case kExprCallFunction: { 368 case kExprCallFunction: {
258 CallFunctionOperand operand(this, pc); 369 CallFunctionOperand operand(this, pc);
259 return 1 + operand.length; 370 return 1 + operand.length;
260 } 371 }
261 case kExprCallIndirect: { 372 case kExprCallIndirect: {
262 CallIndirectOperand operand(this, pc); 373 CallIndirectOperand operand(this, pc);
263 return 1 + operand.length; 374 return 1 + operand.length;
264 } 375 }
265 376 case kExprCallImport: {
266 case kExprTry: 377 CallImportOperand operand(this, pc);
267 case kExprIf: // fall thru
268 case kExprLoop:
269 case kExprBlock: {
270 BlockTypeOperand operand(this, pc);
271 return 1 + operand.length; 378 return 1 + operand.length;
272 } 379 }
273 380
274 case kExprSetLocal: 381 case kExprSetLocal:
275 case kExprTeeLocal:
276 case kExprGetLocal: 382 case kExprGetLocal:
277 case kExprCatch: { 383 case kExprCatch: {
278 LocalIndexOperand operand(this, pc); 384 LocalIndexOperand operand(this, pc);
279 return 1 + operand.length; 385 return 1 + operand.length;
280 } 386 }
281 case kExprBrTable: { 387 case kExprBrTable: {
282 BranchTableOperand operand(this, pc); 388 BranchTableOperand operand(this, pc);
283 BranchTableIterator iterator(this, operand); 389 return 1 + operand.length;
284 return 1 + iterator.length();
285 } 390 }
286 case kExprI32Const: { 391 case kExprI32Const: {
287 ImmI32Operand operand(this, pc); 392 ImmI32Operand operand(this, pc);
288 return 1 + operand.length; 393 return 1 + operand.length;
289 } 394 }
290 case kExprI64Const: { 395 case kExprI64Const: {
291 ImmI64Operand operand(this, pc); 396 ImmI64Operand operand(this, pc);
292 return 1 + operand.length; 397 return 1 + operand.length;
293 } 398 }
294 case kExprI8Const: 399 case kExprI8Const:
295 return 2; 400 return 2;
296 case kExprF32Const: 401 case kExprF32Const:
297 return 5; 402 return 5;
298 case kExprF64Const: 403 case kExprF64Const:
299 return 9; 404 return 9;
405 case kExprReturn: {
406 ReturnArityOperand operand(this, pc);
407 return 1 + operand.length;
408 }
409 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
410 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE) { return 2; }
411 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE) { return 3; }
412 #undef DECLARE_OPCODE_CASE
300 default: 413 default:
301 return 1; 414 return 1;
302 } 415 }
303 } 416 }
304 }; 417 };
305 418
306 // The full WASM decoder for bytecode. Both verifies bytecode and generates 419 // The full WASM decoder for bytecode. Both verifies bytecode and generates
307 // a TurboFan IR graph. 420 // a TurboFan IR graph.
308 class WasmFullDecoder : public WasmDecoder { 421 class WasmFullDecoder : public WasmDecoder {
309 public: 422 public:
310 WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body) 423 WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body)
311 : WasmDecoder(body.module, body.sig, body.start, body.end), 424 : WasmDecoder(body.module, body.sig, body.start, body.end),
312 zone_(zone), 425 zone_(zone),
313 builder_(builder), 426 builder_(builder),
314 base_(body.base), 427 base_(body.base),
315 local_type_vec_(zone), 428 local_type_vec_(zone),
316 stack_(zone), 429 stack_(zone),
317 control_(zone), 430 control_(zone) {
318 last_end_found_(false) {
319 local_types_ = &local_type_vec_; 431 local_types_ = &local_type_vec_;
320 } 432 }
321 433
322 bool Decode() { 434 bool Decode() {
323 base::ElapsedTimer decode_timer; 435 base::ElapsedTimer decode_timer;
324 if (FLAG_trace_wasm_decode_time) { 436 if (FLAG_trace_wasm_decode_time) {
325 decode_timer.Start(); 437 decode_timer.Start();
326 } 438 }
327 stack_.clear(); 439 stack_.clear();
328 control_.clear(); 440 control_.clear();
329 441
330 if (end_ < pc_) { 442 if (end_ < pc_) {
331 error("function body end < start"); 443 error(pc_, "function body end < start");
332 return false; 444 return false;
333 } 445 }
334 446
335 DecodeLocalDecls(); 447 DecodeLocalDecls();
336 InitSsaEnv(); 448 InitSsaEnv();
337 DecodeFunctionBody(); 449 DecodeFunctionBody();
338 450
339 if (failed()) return TraceFailed(); 451 if (failed()) return TraceFailed();
340 452
341 #if IMPLICIT_FUNCTION_END
342 // With implicit end support (old style), the function block
343 // remains on the stack. Other control blocks are an error.
344 if (control_.size() > 1) {
345 error(pc_, control_.back().pc, "unterminated control structure");
346 return TraceFailed();
347 }
348
349 // Assume an implicit end to the function body block.
350 if (control_.size() == 1) {
351 Control* c = &control_.back();
352 if (ssa_env_->go()) {
353 FallThruTo(c);
354 }
355
356 if (c->end_env->go()) {
357 // Push the end values onto the stack.
358 stack_.resize(c->stack_depth);
359 if (c->merge.arity == 1) {
360 stack_.push_back(c->merge.vals.first);
361 } else {
362 for (unsigned i = 0; i < c->merge.arity; i++) {
363 stack_.push_back(c->merge.vals.array[i]);
364 }
365 }
366
367 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
368 SetEnv("function:end", c->end_env);
369 DoReturn();
370 TRACE("\n");
371 }
372 }
373 #else
374 if (!control_.empty()) { 453 if (!control_.empty()) {
375 error(pc_, control_.back().pc, "unterminated control structure"); 454 error(pc_, control_.back().pc, "unterminated control structure");
376 return TraceFailed(); 455 return TraceFailed();
377 } 456 }
378 457
379 if (!last_end_found_) { 458 if (ssa_env_->go()) {
380 error("function body must end with \"end\" opcode."); 459 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
381 return false; 460 DoReturn();
461 if (failed()) return TraceFailed();
462 TRACE("\n");
382 } 463 }
383 #endif
384 464
385 if (FLAG_trace_wasm_decode_time) { 465 if (FLAG_trace_wasm_decode_time) {
386 double ms = decode_timer.Elapsed().InMillisecondsF(); 466 double ms = decode_timer.Elapsed().InMillisecondsF();
387 PrintF("wasm-decode %s (%0.3f ms)\n\n", ok() ? "ok" : "failed", ms); 467 PrintF("wasm-decode ok (%0.3f ms)\n\n", ms);
388 } else { 468 } else {
389 TRACE("wasm-decode %s\n\n", ok() ? "ok" : "failed"); 469 TRACE("wasm-decode ok\n\n");
390 } 470 }
391 471
392 return true; 472 return true;
393 } 473 }
394 474
395 bool TraceFailed() { 475 bool TraceFailed() {
396 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_), 476 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_),
397 startrel(error_pc_), error_msg_.get()); 477 startrel(error_pc_), error_msg_.get());
398 return false; 478 return false;
399 } 479 }
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
432 512
433 Zone* zone_; 513 Zone* zone_;
434 TFBuilder* builder_; 514 TFBuilder* builder_;
435 const byte* base_; 515 const byte* base_;
436 516
437 SsaEnv* ssa_env_; 517 SsaEnv* ssa_env_;
438 518
439 ZoneVector<LocalType> local_type_vec_; // types of local variables. 519 ZoneVector<LocalType> local_type_vec_; // types of local variables.
440 ZoneVector<Value> stack_; // stack of values. 520 ZoneVector<Value> stack_; // stack of values.
441 ZoneVector<Control> control_; // stack of blocks, loops, and ifs. 521 ZoneVector<Control> control_; // stack of blocks, loops, and ifs.
442 bool last_end_found_;
443 522
444 inline bool build() { return builder_ && ssa_env_->go(); } 523 inline bool build() { return builder_ && ssa_env_->go(); }
445 524
446 void InitSsaEnv() { 525 void InitSsaEnv() {
447 TFNode* start = nullptr; 526 TFNode* start = nullptr;
448 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); 527 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
449 size_t size = sizeof(TFNode*) * EnvironmentCount(); 528 size_t size = sizeof(TFNode*) * EnvironmentCount();
450 ssa_env->state = SsaEnv::kReached; 529 ssa_env->state = SsaEnv::kReached;
451 ssa_env->locals = 530 ssa_env->locals =
452 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; 531 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr;
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
553 total_locals_ = local_type_vec_.size(); 632 total_locals_ = local_type_vec_.size();
554 } 633 }
555 634
556 // Decodes the body of a function. 635 // Decodes the body of a function.
557 void DecodeFunctionBody() { 636 void DecodeFunctionBody() {
558 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n", 637 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n",
559 reinterpret_cast<const void*>(start_), 638 reinterpret_cast<const void*>(start_),
560 reinterpret_cast<const void*>(limit_), baserel(pc_), 639 reinterpret_cast<const void*>(limit_), baserel(pc_),
561 static_cast<int>(limit_ - start_), builder_ ? "graph building" : ""); 640 static_cast<int>(limit_ - start_), builder_ ? "graph building" : "");
562 641
563 {
564 // Set up initial function block.
565 SsaEnv* break_env = ssa_env_;
566 SetEnv("initial env", Steal(break_env));
567 PushBlock(break_env);
568 Control* c = &control_.back();
569 c->merge.arity = static_cast<uint32_t>(sig_->return_count());
570
571 if (c->merge.arity == 1) {
572 c->merge.vals.first = {pc_, nullptr, sig_->GetReturn(0)};
573 } else if (c->merge.arity > 1) {
574 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
575 for (unsigned i = 0; i < c->merge.arity; i++) {
576 c->merge.vals.array[i] = {pc_, nullptr, sig_->GetReturn(i)};
577 }
578 }
579 }
580
581 if (pc_ >= limit_) return; // Nothing to do. 642 if (pc_ >= limit_) return; // Nothing to do.
582 643
583 while (true) { // decoding loop. 644 while (true) { // decoding loop.
584 unsigned len = 1; 645 unsigned len = 1;
585 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); 646 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
586 if (!WasmOpcodes::IsPrefixOpcode(opcode)) { 647 if (!WasmOpcodes::IsPrefixOpcode(opcode)) {
587 TRACE(" @%-8d #%02x:%-20s|", startrel(pc_), opcode, 648 TRACE(" @%-8d #%02x:%-20s|", startrel(pc_), opcode,
588 WasmOpcodes::ShortOpcodeName(opcode)); 649 WasmOpcodes::ShortOpcodeName(opcode));
589 } 650 }
590 651
591 FunctionSig* sig = WasmOpcodes::Signature(opcode); 652 FunctionSig* sig = WasmOpcodes::Signature(opcode);
592 if (sig) { 653 if (sig) {
593 BuildSimpleOperator(opcode, sig); 654 BuildSimpleOperator(opcode, sig);
594 } else { 655 } else {
595 // Complex bytecode. 656 // Complex bytecode.
596 switch (opcode) { 657 switch (opcode) {
597 case kExprNop: 658 case kExprNop:
659 Push(kAstStmt, nullptr);
598 break; 660 break;
599 case kExprBlock: { 661 case kExprBlock: {
600 // The break environment is the outer environment. 662 // The break environment is the outer environment.
601 BlockTypeOperand operand(this, pc_);
602 SsaEnv* break_env = ssa_env_; 663 SsaEnv* break_env = ssa_env_;
603 PushBlock(break_env); 664 PushBlock(break_env);
604 SetEnv("block:start", Steal(break_env)); 665 SetEnv("block:start", Steal(break_env));
605 SetBlockType(&control_.back(), operand);
606 len = 1 + operand.length;
607 break; 666 break;
608 } 667 }
609 case kExprThrow: { 668 case kExprThrow: {
610 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); 669 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
611 Value value = Pop(0, kAstI32); 670 Value value = Pop(0, kAstI32);
612 BUILD(Throw, value.node); 671 BUILD(Throw, value.node);
613 break; 672 break;
614 } 673 }
615 case kExprTry: { 674 case kExprTry: {
616 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); 675 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
617 BlockTypeOperand operand(this, pc_);
618 SsaEnv* outer_env = ssa_env_; 676 SsaEnv* outer_env = ssa_env_;
619 SsaEnv* try_env = Steal(outer_env); 677 SsaEnv* try_env = Steal(outer_env);
620 SsaEnv* catch_env = Split(try_env); 678 SsaEnv* catch_env = Split(try_env);
621 PushTry(outer_env, catch_env); 679 PushTry(outer_env, catch_env);
622 SetEnv("try_catch:start", try_env); 680 SetEnv("try:start", try_env);
623 SetBlockType(&control_.back(), operand);
624 len = 1 + operand.length;
625 break; 681 break;
626 } 682 }
627 case kExprCatch: { 683 case kExprCatch: {
628 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); 684 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
629 LocalIndexOperand operand(this, pc_); 685 LocalIndexOperand operand(this, pc_);
630 len = 1 + operand.length; 686 len = 1 + operand.length;
631 687
632 if (control_.empty()) { 688 if (control_.empty()) {
633 error("catch does not match any try"); 689 error(pc_, "catch does not match a any try");
634 break; 690 break;
635 } 691 }
636 692
637 Control* c = &control_.back(); 693 Control* c = &control_.back();
638 if (!c->is_try()) { 694 if (!c->is_try()) {
639 error("catch does not match any try"); 695 error(pc_, "catch does not match a try");
640 break; 696 break;
641 } 697 }
642 698
643 if (c->catch_env == nullptr) { 699 if (c->catch_env == nullptr) {
644 error("catch already present for try with catch"); 700 error(pc_, "catch already present for try with catch");
645 break; 701 break;
646 } 702 }
647 703
648 Goto(ssa_env_, c->end_env); 704 Goto(ssa_env_, c->end_env);
649 705
650 SsaEnv* catch_env = c->catch_env; 706 SsaEnv* catch_env = c->catch_env;
651 c->catch_env = nullptr; 707 c->catch_env = nullptr;
652 SetEnv("catch:begin", catch_env); 708 SetEnv("catch:begin", catch_env);
653 709
654 if (Validate(pc_, operand)) { 710 if (Validate(pc_, operand)) {
655 // TODO(jpp): figure out how thrown value is propagated. It is 711 // TODO(jpp): figure out how thrown value is propagated. It is
656 // unlikely to be a value on the stack. 712 // unlikely to be a value on the stack.
657 if (ssa_env_->locals) { 713 if (ssa_env_->locals) {
658 ssa_env_->locals[operand.index] = nullptr; 714 ssa_env_->locals[operand.index] = nullptr;
659 } 715 }
660 } 716 }
661 717
662 PopUpTo(c->stack_depth); 718 PopUpTo(c->stack_depth);
719
663 break; 720 break;
664 } 721 }
665 case kExprLoop: { 722 case kExprLoop: {
666 BlockTypeOperand operand(this, pc_); 723 // The break environment is the outer environment.
667 SsaEnv* finish_try_env = Steal(ssa_env_); 724 SsaEnv* break_env = ssa_env_;
725 PushBlock(break_env);
726 SsaEnv* finish_try_env = Steal(break_env);
668 // The continue environment is the inner environment. 727 // The continue environment is the inner environment.
669 PrepareForLoop(pc_, finish_try_env); 728 PrepareForLoop(pc_, finish_try_env);
670 SetEnv("loop:start", Split(finish_try_env)); 729 SetEnv("loop:start", Split(finish_try_env));
671 ssa_env_->SetNotMerged(); 730 ssa_env_->SetNotMerged();
672 PushLoop(finish_try_env); 731 PushLoop(finish_try_env);
673 SetBlockType(&control_.back(), operand);
674 len = 1 + operand.length;
675 break; 732 break;
676 } 733 }
677 case kExprIf: { 734 case kExprIf: {
678 // Condition on top of stack. Split environments for branches. 735 // Condition on top of stack. Split environments for branches.
679 BlockTypeOperand operand(this, pc_);
680 Value cond = Pop(0, kAstI32); 736 Value cond = Pop(0, kAstI32);
681 TFNode* if_true = nullptr; 737 TFNode* if_true = nullptr;
682 TFNode* if_false = nullptr; 738 TFNode* if_false = nullptr;
683 BUILD(Branch, cond.node, &if_true, &if_false); 739 BUILD(Branch, cond.node, &if_true, &if_false);
684 SsaEnv* end_env = ssa_env_; 740 SsaEnv* end_env = ssa_env_;
685 SsaEnv* false_env = Split(ssa_env_); 741 SsaEnv* false_env = Split(ssa_env_);
686 false_env->control = if_false; 742 false_env->control = if_false;
687 SsaEnv* true_env = Steal(ssa_env_); 743 SsaEnv* true_env = Steal(ssa_env_);
688 true_env->control = if_true; 744 true_env->control = if_true;
689 PushIf(end_env, false_env); 745 PushIf(end_env, false_env);
690 SetEnv("if:true", true_env); 746 SetEnv("if:true", true_env);
691 SetBlockType(&control_.back(), operand);
692 len = 1 + operand.length;
693 break; 747 break;
694 } 748 }
695 case kExprElse: { 749 case kExprElse: {
696 if (control_.empty()) { 750 if (control_.empty()) {
697 error("else does not match any if"); 751 error(pc_, "else does not match any if");
698 break; 752 break;
699 } 753 }
700 Control* c = &control_.back(); 754 Control* c = &control_.back();
701 if (!c->is_if()) { 755 if (!c->is_if()) {
702 error(pc_, c->pc, "else does not match an if"); 756 error(pc_, c->pc, "else does not match an if");
703 break; 757 break;
704 } 758 }
705 if (c->false_env == nullptr) { 759 if (c->false_env == nullptr) {
706 error(pc_, c->pc, "else already present for if"); 760 error(pc_, c->pc, "else already present for if");
707 break; 761 break;
708 } 762 }
709 FallThruTo(c); 763 Value val = PopUpTo(c->stack_depth);
764 MergeInto(c->end_env, &c->node, &c->type, val);
710 // Switch to environment for false branch. 765 // Switch to environment for false branch.
711 stack_.resize(c->stack_depth);
712 SetEnv("if_else:false", c->false_env); 766 SetEnv("if_else:false", c->false_env);
713 c->false_env = nullptr; // record that an else is already seen 767 c->false_env = nullptr; // record that an else is already seen
714 break; 768 break;
715 } 769 }
716 case kExprEnd: { 770 case kExprEnd: {
717 if (control_.empty()) { 771 if (control_.empty()) {
718 error("end does not match any if, try, or block"); 772 error(pc_, "end does not match any if, try, or block");
719 return; 773 break;
720 } 774 }
721 const char* name = "block:end"; 775 const char* name = "block:end";
722 Control* c = &control_.back(); 776 Control* c = &control_.back();
723 if (c->is_loop()) { 777 Value val = PopUpTo(c->stack_depth);
724 // A loop just leaves the values on the stack. 778 if (c->is_loop) {
725 TypeCheckLoopFallThru(c); 779 // Loops always push control in pairs.
726 PopControl(); 780 PopControl();
727 SetEnv("loop:end", ssa_env_); 781 c = &control_.back();
728 break; 782 name = "loop:end";
729 } 783 } else if (c->is_if()) {
730 if (c->is_if()) {
731 if (c->false_env != nullptr) { 784 if (c->false_env != nullptr) {
732 // End the true branch of a one-armed if. 785 // End the true branch of a one-armed if.
733 Goto(c->false_env, c->end_env); 786 Goto(c->false_env, c->end_env);
734 if (ssa_env_->go() && stack_.size() != c->stack_depth) { 787 val = {val.pc, nullptr, kAstStmt};
735 error("end of if expected empty stack");
736 stack_.resize(c->stack_depth);
737 }
738 if (c->merge.arity > 0) {
739 error("non-void one-armed if");
740 }
741 name = "if:merge"; 788 name = "if:merge";
742 } else { 789 } else {
743 // End the false branch of a two-armed if. 790 // End the false branch of a two-armed if.
744 name = "if_else:merge"; 791 name = "if_else:merge";
745 } 792 }
746 } else if (c->is_try()) { 793 } else if (c->is_try()) {
747 name = "try:end"; 794 name = "try:end";
748 795
749 // validate that catch was seen. 796 // validate that catch was seen.
750 if (c->catch_env != nullptr) { 797 if (c->catch_env != nullptr) {
751 error("missing catch in try"); 798 error(pc_, "missing catch in try");
752 break; 799 break;
753 } 800 }
754 } 801 }
755 FallThruTo(c); 802
803 if (ssa_env_->go()) {
804 // Adds a fallthrough edge to the next control block.
805 MergeInto(c->end_env, &c->node, &c->type, val);
806 }
756 SetEnv(name, c->end_env); 807 SetEnv(name, c->end_env);
757
758 // Push the end values onto the stack.
759 stack_.resize(c->stack_depth); 808 stack_.resize(c->stack_depth);
760 if (c->merge.arity == 1) { 809 Push(c->type, c->node);
761 stack_.push_back(c->merge.vals.first);
762 } else {
763 for (unsigned i = 0; i < c->merge.arity; i++) {
764 stack_.push_back(c->merge.vals.array[i]);
765 }
766 }
767
768 PopControl(); 810 PopControl();
769
770 if (control_.empty()) {
771 // If the last (implicit) control was popped, check we are at end.
772 if (pc_ + 1 != end_) {
773 error(pc_, pc_ + 1, "trailing code after function end");
774 }
775 last_end_found_ = true;
776 if (ssa_env_->go()) {
777 // The result of the block is the return value.
778 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
779 DoReturn();
780 TRACE("\n");
781 }
782 return;
783 }
784 break; 811 break;
785 } 812 }
786 case kExprSelect: { 813 case kExprSelect: {
787 Value cond = Pop(2, kAstI32); 814 Value cond = Pop(2, kAstI32);
788 Value fval = Pop(); 815 Value fval = Pop();
789 Value tval = Pop(); 816 Value tval = Pop();
790 if (tval.type == kAstStmt || tval.type != fval.type) { 817 if (tval.type == kAstStmt || tval.type != fval.type) {
791 if (tval.type != kAstEnd && fval.type != kAstEnd) { 818 if (tval.type != kAstEnd && fval.type != kAstEnd) {
792 error("type mismatch in select"); 819 error(pc_, "type mismatch in select");
793 break; 820 break;
794 } 821 }
795 } 822 }
796 if (build()) { 823 if (build()) {
797 DCHECK(tval.type != kAstEnd); 824 DCHECK(tval.type != kAstEnd);
798 DCHECK(fval.type != kAstEnd); 825 DCHECK(fval.type != kAstEnd);
799 DCHECK(cond.type != kAstEnd); 826 DCHECK(cond.type != kAstEnd);
800 TFNode* controls[2]; 827 TFNode* controls[2];
801 builder_->Branch(cond.node, &controls[0], &controls[1]); 828 builder_->Branch(cond.node, &controls[0], &controls[1]);
802 TFNode* merge = builder_->Merge(2, controls); 829 TFNode* merge = builder_->Merge(2, controls);
803 TFNode* vals[2] = {tval.node, fval.node}; 830 TFNode* vals[2] = {tval.node, fval.node};
804 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge); 831 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge);
805 Push(tval.type, phi); 832 Push(tval.type, phi);
806 ssa_env_->control = merge; 833 ssa_env_->control = merge;
807 } else { 834 } else {
808 Push(tval.type, nullptr); 835 Push(tval.type, nullptr);
809 } 836 }
810 break; 837 break;
811 } 838 }
812 case kExprBr: { 839 case kExprBr: {
813 BreakDepthOperand operand(this, pc_); 840 BreakDepthOperand operand(this, pc_);
841 Value val = {pc_, nullptr, kAstStmt};
842 if (operand.arity) val = Pop();
814 if (Validate(pc_, operand, control_)) { 843 if (Validate(pc_, operand, control_)) {
815 BreakTo(operand.depth); 844 BreakTo(operand.target, val);
816 } 845 }
817 len = 1 + operand.length; 846 len = 1 + operand.length;
818 EndControl(); 847 Push(kAstEnd, nullptr);
819 break; 848 break;
820 } 849 }
821 case kExprBrIf: { 850 case kExprBrIf: {
822 BreakDepthOperand operand(this, pc_); 851 BreakDepthOperand operand(this, pc_);
823 Value cond = Pop(0, kAstI32); 852 Value cond = Pop(operand.arity, kAstI32);
853 Value val = {pc_, nullptr, kAstStmt};
854 if (operand.arity == 1) val = Pop();
824 if (ok() && Validate(pc_, operand, control_)) { 855 if (ok() && Validate(pc_, operand, control_)) {
825 SsaEnv* fenv = ssa_env_; 856 SsaEnv* fenv = ssa_env_;
826 SsaEnv* tenv = Split(fenv); 857 SsaEnv* tenv = Split(fenv);
827 fenv->SetNotMerged(); 858 fenv->SetNotMerged();
828 BUILD(Branch, cond.node, &tenv->control, &fenv->control); 859 BUILD(Branch, cond.node, &tenv->control, &fenv->control);
829 ssa_env_ = tenv; 860 ssa_env_ = tenv;
830 BreakTo(operand.depth); 861 BreakTo(operand.target, val);
831 ssa_env_ = fenv; 862 ssa_env_ = fenv;
832 } 863 }
833 len = 1 + operand.length; 864 len = 1 + operand.length;
865 Push(kAstStmt, nullptr);
834 break; 866 break;
835 } 867 }
836 case kExprBrTable: { 868 case kExprBrTable: {
837 BranchTableOperand operand(this, pc_); 869 BranchTableOperand operand(this, pc_);
838 BranchTableIterator iterator(this, operand);
839 if (Validate(pc_, operand, control_.size())) { 870 if (Validate(pc_, operand, control_.size())) {
840 Value key = Pop(0, kAstI32); 871 Value key = Pop(operand.arity, kAstI32);
872 Value val = {pc_, nullptr, kAstStmt};
873 if (operand.arity == 1) val = Pop();
841 if (failed()) break; 874 if (failed()) break;
842 875
843 SsaEnv* break_env = ssa_env_; 876 SsaEnv* break_env = ssa_env_;
844 if (operand.table_count > 0) { 877 if (operand.table_count > 0) {
845 // Build branches to the various blocks based on the table. 878 // Build branches to the various blocks based on the table.
846 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node); 879 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node);
847 880
848 SsaEnv* copy = Steal(break_env); 881 SsaEnv* copy = Steal(break_env);
849 ssa_env_ = copy; 882 ssa_env_ = copy;
850 while (iterator.has_next()) { 883 for (uint32_t i = 0; i < operand.table_count + 1; ++i) {
851 uint32_t i = iterator.cur_index(); 884 uint16_t target = operand.read_entry(this, i);
852 const byte* pos = iterator.pc();
853 uint32_t target = iterator.next();
854 if (target >= control_.size()) {
855 error(pos, "improper branch in br_table");
856 break;
857 }
858 ssa_env_ = Split(copy); 885 ssa_env_ = Split(copy);
859 ssa_env_->control = (i == operand.table_count) 886 ssa_env_->control = (i == operand.table_count)
860 ? BUILD(IfDefault, sw) 887 ? BUILD(IfDefault, sw)
861 : BUILD(IfValue, i, sw); 888 : BUILD(IfValue, i, sw);
862 BreakTo(target); 889 int depth = target;
890 Control* c = &control_[control_.size() - depth - 1];
891 MergeInto(c->end_env, &c->node, &c->type, val);
863 } 892 }
864 } else { 893 } else {
865 // Only a default target. Do the equivalent of br. 894 // Only a default target. Do the equivalent of br.
866 const byte* pos = iterator.pc(); 895 uint16_t target = operand.read_entry(this, 0);
867 uint32_t target = iterator.next(); 896 int depth = target;
868 if (target >= control_.size()) { 897 Control* c = &control_[control_.size() - depth - 1];
869 error(pos, "improper branch in br_table"); 898 MergeInto(c->end_env, &c->node, &c->type, val);
870 break;
871 }
872 BreakTo(target);
873 } 899 }
874 // br_table ends the control flow like br. 900 // br_table ends the control flow like br.
875 ssa_env_ = break_env; 901 ssa_env_ = break_env;
902 Push(kAstStmt, nullptr);
876 } 903 }
877 len = 1 + iterator.length(); 904 len = 1 + operand.length;
878 break; 905 break;
879 } 906 }
880 case kExprReturn: { 907 case kExprReturn: {
908 ReturnArityOperand operand(this, pc_);
909 if (operand.arity != sig_->return_count()) {
910 error(pc_, pc_ + 1, "arity mismatch in return");
911 }
881 DoReturn(); 912 DoReturn();
913 len = 1 + operand.length;
882 break; 914 break;
883 } 915 }
884 case kExprUnreachable: { 916 case kExprUnreachable: {
885 BUILD(Unreachable, position()); 917 Push(kAstEnd, BUILD(Unreachable, position()));
886 EndControl(); 918 ssa_env_->Kill(SsaEnv::kControlEnd);
887 break; 919 break;
888 } 920 }
889 case kExprI8Const: { 921 case kExprI8Const: {
890 ImmI8Operand operand(this, pc_); 922 ImmI8Operand operand(this, pc_);
891 Push(kAstI32, BUILD(Int32Constant, operand.value)); 923 Push(kAstI32, BUILD(Int32Constant, operand.value));
892 len = 1 + operand.length; 924 len = 1 + operand.length;
893 break; 925 break;
894 } 926 }
895 case kExprI32Const: { 927 case kExprI32Const: {
896 ImmI32Operand operand(this, pc_); 928 ImmI32Operand operand(this, pc_);
(...skipping 29 matching lines...) Expand all
926 } 958 }
927 } 959 }
928 len = 1 + operand.length; 960 len = 1 + operand.length;
929 break; 961 break;
930 } 962 }
931 case kExprSetLocal: { 963 case kExprSetLocal: {
932 LocalIndexOperand operand(this, pc_); 964 LocalIndexOperand operand(this, pc_);
933 if (Validate(pc_, operand)) { 965 if (Validate(pc_, operand)) {
934 Value val = Pop(0, local_type_vec_[operand.index]); 966 Value val = Pop(0, local_type_vec_[operand.index]);
935 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node; 967 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
936 }
937 len = 1 + operand.length;
938 break;
939 }
940 case kExprTeeLocal: {
941 LocalIndexOperand operand(this, pc_);
942 if (Validate(pc_, operand)) {
943 Value val = Pop(0, local_type_vec_[operand.index]);
944 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
945 Push(val.type, val.node); 968 Push(val.type, val.node);
946 } 969 }
947 len = 1 + operand.length; 970 len = 1 + operand.length;
948 break; 971 break;
949 } 972 }
950 case kExprDrop: {
951 Pop();
952 break;
953 }
954 case kExprGetGlobal: { 973 case kExprGetGlobal: {
955 GlobalIndexOperand operand(this, pc_); 974 GlobalIndexOperand operand(this, pc_);
956 if (Validate(pc_, operand)) { 975 if (Validate(pc_, operand)) {
957 Push(operand.type, BUILD(GetGlobal, operand.index)); 976 Push(operand.type, BUILD(GetGlobal, operand.index));
958 } 977 }
959 len = 1 + operand.length; 978 len = 1 + operand.length;
960 break; 979 break;
961 } 980 }
962 case kExprSetGlobal: { 981 case kExprSetGlobal: {
963 GlobalIndexOperand operand(this, pc_); 982 GlobalIndexOperand operand(this, pc_);
964 if (Validate(pc_, operand)) { 983 if (Validate(pc_, operand)) {
965 if (operand.global->mutability) { 984 Value val = Pop(0, operand.type);
966 Value val = Pop(0, operand.type); 985 BUILD(SetGlobal, operand.index, val.node);
967 BUILD(SetGlobal, operand.index, val.node); 986 Push(val.type, val.node);
968 } else {
969 error(pc_, pc_ + 1, "immutable global #%u cannot be assigned",
970 operand.index);
971 }
972 } 987 }
973 len = 1 + operand.length; 988 len = 1 + operand.length;
974 break; 989 break;
975 } 990 }
976 case kExprI32LoadMem8S: 991 case kExprI32LoadMem8S:
977 len = DecodeLoadMem(kAstI32, MachineType::Int8()); 992 len = DecodeLoadMem(kAstI32, MachineType::Int8());
978 break; 993 break;
979 case kExprI32LoadMem8U: 994 case kExprI32LoadMem8U:
980 len = DecodeLoadMem(kAstI32, MachineType::Uint8()); 995 len = DecodeLoadMem(kAstI32, MachineType::Uint8());
981 break; 996 break;
982 case kExprI32LoadMem16S: 997 case kExprI32LoadMem16S:
983 len = DecodeLoadMem(kAstI32, MachineType::Int16()); 998 len = DecodeLoadMem(kAstI32, MachineType::Int16());
984 break; 999 break;
985 case kExprI32LoadMem16U: 1000 case kExprI32LoadMem16U:
986 len = DecodeLoadMem(kAstI32, MachineType::Uint16()); 1001 len = DecodeLoadMem(kAstI32, MachineType::Uint16());
987 break; 1002 break;
988 case kExprI32LoadMem: 1003 case kExprI32LoadMem:
989 len = DecodeLoadMem(kAstI32, MachineType::Int32()); 1004 len = DecodeLoadMem(kAstI32, MachineType::Int32());
990 break; 1005 break;
1006
991 case kExprI64LoadMem8S: 1007 case kExprI64LoadMem8S:
992 len = DecodeLoadMem(kAstI64, MachineType::Int8()); 1008 len = DecodeLoadMem(kAstI64, MachineType::Int8());
993 break; 1009 break;
994 case kExprI64LoadMem8U: 1010 case kExprI64LoadMem8U:
995 len = DecodeLoadMem(kAstI64, MachineType::Uint8()); 1011 len = DecodeLoadMem(kAstI64, MachineType::Uint8());
996 break; 1012 break;
997 case kExprI64LoadMem16S: 1013 case kExprI64LoadMem16S:
998 len = DecodeLoadMem(kAstI64, MachineType::Int16()); 1014 len = DecodeLoadMem(kAstI64, MachineType::Int16());
999 break; 1015 break;
1000 case kExprI64LoadMem16U: 1016 case kExprI64LoadMem16U:
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1044 break; 1060 break;
1045 case kExprGrowMemory: 1061 case kExprGrowMemory:
1046 if (module_->origin != kAsmJsOrigin) { 1062 if (module_->origin != kAsmJsOrigin) {
1047 Value val = Pop(0, kAstI32); 1063 Value val = Pop(0, kAstI32);
1048 Push(kAstI32, BUILD(GrowMemory, val.node)); 1064 Push(kAstI32, BUILD(GrowMemory, val.node));
1049 } else { 1065 } else {
1050 error("grow_memory is not supported for asmjs modules"); 1066 error("grow_memory is not supported for asmjs modules");
1051 } 1067 }
1052 break; 1068 break;
1053 case kExprMemorySize: 1069 case kExprMemorySize:
1054 Push(kAstI32, BUILD(CurrentMemoryPages)); 1070 Push(kAstI32, BUILD(MemSize, 0));
1055 break; 1071 break;
1056 case kExprCallFunction: { 1072 case kExprCallFunction: {
1057 CallFunctionOperand operand(this, pc_); 1073 CallFunctionOperand operand(this, pc_);
1058 if (Validate(pc_, operand)) { 1074 if (Validate(pc_, operand)) {
1059 TFNode** buffer = PopArgs(operand.sig); 1075 TFNode** buffer = PopArgs(operand.sig);
1060 TFNode** rets = 1076 TFNode* call =
1061 BUILD(CallDirect, operand.index, buffer, position()); 1077 BUILD(CallDirect, operand.index, buffer, position());
1062 PushReturns(operand.sig, rets); 1078 Push(GetReturnType(operand.sig), call);
1063 } 1079 }
1064 len = 1 + operand.length; 1080 len = 1 + operand.length;
1065 break; 1081 break;
1066 } 1082 }
1067 case kExprCallIndirect: { 1083 case kExprCallIndirect: {
1068 CallIndirectOperand operand(this, pc_); 1084 CallIndirectOperand operand(this, pc_);
1069 if (Validate(pc_, operand)) { 1085 if (Validate(pc_, operand)) {
1086 TFNode** buffer = PopArgs(operand.sig);
1070 Value index = Pop(0, kAstI32); 1087 Value index = Pop(0, kAstI32);
1071 TFNode** buffer = PopArgs(operand.sig);
1072 if (buffer) buffer[0] = index.node; 1088 if (buffer) buffer[0] = index.node;
1073 TFNode** rets = 1089 TFNode* call =
1074 BUILD(CallIndirect, operand.index, buffer, position()); 1090 BUILD(CallIndirect, operand.index, buffer, position());
1075 PushReturns(operand.sig, rets); 1091 Push(GetReturnType(operand.sig), call);
1076 } 1092 }
1077 len = 1 + operand.length; 1093 len = 1 + operand.length;
1078 break; 1094 break;
1095 }
1096 case kExprCallImport: {
1097 CallImportOperand operand(this, pc_);
1098 if (Validate(pc_, operand)) {
1099 TFNode** buffer = PopArgs(operand.sig);
1100 TFNode* call =
1101 BUILD(CallImport, operand.index, buffer, position());
1102 Push(GetReturnType(operand.sig), call);
1103 }
1104 len = 1 + operand.length;
1105 break;
1079 } 1106 }
1080 case kSimdPrefix: { 1107 case kSimdPrefix: {
1081 CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype); 1108 CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype);
1082 len++; 1109 len++;
1083 byte simd_index = *(pc_ + 1); 1110 byte simd_index = *(pc_ + 1);
1084 opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index); 1111 opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index);
1085 TRACE(" @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix, 1112 TRACE(" @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix,
1086 simd_index, WasmOpcodes::ShortOpcodeName(opcode)); 1113 simd_index, WasmOpcodes::ShortOpcodeName(opcode));
1087 len += DecodeSimdOpcode(opcode); 1114 len += DecodeSimdOpcode(opcode);
1088 break; 1115 break;
1089 } 1116 }
1090 default: { 1117 default:
1091 // Deal with special asmjs opcodes. 1118 // Deal with special asmjs opcodes.
1092 if (module_ && module_->origin == kAsmJsOrigin) { 1119 if (module_->origin == kAsmJsOrigin) {
1093 sig = WasmOpcodes::AsmjsSignature(opcode); 1120 sig = WasmOpcodes::AsmjsSignature(opcode);
1094 if (sig) { 1121 if (sig) {
1095 BuildSimpleOperator(opcode, sig); 1122 BuildSimpleOperator(opcode, sig);
1096 } 1123 }
1097 } else { 1124 } else {
1098 error("Invalid opcode"); 1125 error("Invalid opcode");
1099 return; 1126 return;
1100 } 1127 }
1101 }
1102 } 1128 }
1103 } 1129 } // end complex bytecode
1104 1130
1105 #if DEBUG 1131 #if DEBUG
1106 if (FLAG_trace_wasm_decoder) { 1132 if (FLAG_trace_wasm_decoder) {
1107 for (size_t i = 0; i < stack_.size(); ++i) { 1133 for (size_t i = 0; i < stack_.size(); ++i) {
1108 Value& val = stack_[i]; 1134 Value& val = stack_[i];
1109 WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc); 1135 WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc);
1110 if (WasmOpcodes::IsPrefixOpcode(opcode)) { 1136 if (WasmOpcodes::IsPrefixOpcode(opcode)) {
1111 opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1)); 1137 opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1));
1112 } 1138 }
1113 PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type), 1139 PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type),
1114 static_cast<int>(val.pc - start_), 1140 static_cast<int>(val.pc - start_),
1115 WasmOpcodes::ShortOpcodeName(opcode)); 1141 WasmOpcodes::ShortOpcodeName(opcode));
1116 switch (opcode) { 1142 switch (opcode) {
1117 case kExprI32Const: { 1143 case kExprI32Const: {
1118 ImmI32Operand operand(this, val.pc); 1144 ImmI32Operand operand(this, val.pc);
1119 PrintF("[%d]", operand.value); 1145 PrintF("[%d]", operand.value);
1120 break; 1146 break;
1121 } 1147 }
1122 case kExprGetLocal: { 1148 case kExprGetLocal: {
1123 LocalIndexOperand operand(this, val.pc); 1149 LocalIndexOperand operand(this, val.pc);
1124 PrintF("[%u]", operand.index); 1150 PrintF("[%u]", operand.index);
1125 break; 1151 break;
1126 } 1152 }
1127 case kExprSetLocal: // fallthru 1153 case kExprSetLocal: {
1128 case kExprTeeLocal: {
1129 LocalIndexOperand operand(this, val.pc); 1154 LocalIndexOperand operand(this, val.pc);
1130 PrintF("[%u]", operand.index); 1155 PrintF("[%u]", operand.index);
1131 break; 1156 break;
1132 } 1157 }
1133 default: 1158 default:
1134 break; 1159 break;
1135 } 1160 }
1136 } 1161 }
1137 PrintF("\n"); 1162 PrintF("\n");
1138 } 1163 }
1139 #endif 1164 #endif
1140 pc_ += len; 1165 pc_ += len;
1141 if (pc_ >= limit_) { 1166 if (pc_ >= limit_) {
1142 // End of code reached or exceeded. 1167 // End of code reached or exceeded.
1143 if (pc_ > limit_ && ok()) error("Beyond end of code"); 1168 if (pc_ > limit_ && ok()) error("Beyond end of code");
1144 return; 1169 return;
1145 } 1170 }
1146 } // end decode loop 1171 } // end decode loop
1147 } 1172 } // end DecodeFunctionBody()
1148
1149 void EndControl() { ssa_env_->Kill(SsaEnv::kControlEnd); }
1150
1151 void SetBlockType(Control* c, BlockTypeOperand& operand) {
1152 c->merge.arity = operand.arity;
1153 if (c->merge.arity == 1) {
1154 c->merge.vals.first = {pc_, nullptr, operand.read_entry(0)};
1155 } else if (c->merge.arity > 1) {
1156 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
1157 for (unsigned i = 0; i < c->merge.arity; i++) {
1158 c->merge.vals.array[i] = {pc_, nullptr, operand.read_entry(i)};
1159 }
1160 }
1161 }
1162 1173
1163 TFNode** PopArgs(FunctionSig* sig) { 1174 TFNode** PopArgs(FunctionSig* sig) {
1164 if (build()) { 1175 if (build()) {
1165 int count = static_cast<int>(sig->parameter_count()); 1176 int count = static_cast<int>(sig->parameter_count());
1166 TFNode** buffer = builder_->Buffer(count + 1); 1177 TFNode** buffer = builder_->Buffer(count + 1);
1167 buffer[0] = nullptr; // reserved for code object or function index. 1178 buffer[0] = nullptr; // reserved for code object or function index.
1168 for (int i = count - 1; i >= 0; i--) { 1179 for (int i = count - 1; i >= 0; i--) {
1169 buffer[i + 1] = Pop(i, sig->GetParam(i)).node; 1180 buffer[i + 1] = Pop(i, sig->GetParam(i)).node;
1170 } 1181 }
1171 return buffer; 1182 return buffer;
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1215 return 1 + operand.length; 1226 return 1 + operand.length;
1216 } 1227 }
1217 1228
1218 int DecodeStoreMem(LocalType type, MachineType mem_type) { 1229 int DecodeStoreMem(LocalType type, MachineType mem_type) {
1219 MemoryAccessOperand operand(this, pc_, 1230 MemoryAccessOperand operand(this, pc_,
1220 ElementSizeLog2Of(mem_type.representation())); 1231 ElementSizeLog2Of(mem_type.representation()));
1221 Value val = Pop(1, type); 1232 Value val = Pop(1, type);
1222 Value index = Pop(0, kAstI32); 1233 Value index = Pop(0, kAstI32);
1223 BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment, 1234 BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment,
1224 val.node, position()); 1235 val.node, position());
1236 Push(type, val.node);
1225 return 1 + operand.length; 1237 return 1 + operand.length;
1226 } 1238 }
1227 1239
1228 unsigned DecodeSimdOpcode(WasmOpcode opcode) { 1240 unsigned DecodeSimdOpcode(WasmOpcode opcode) {
1229 unsigned len = 0; 1241 unsigned len = 0;
1230 switch (opcode) { 1242 switch (opcode) {
1231 case kExprI32x4ExtractLane: { 1243 case kExprI32x4ExtractLane: {
1232 uint8_t lane = this->checked_read_u8(pc_, 2, "lane number"); 1244 uint8_t lane = this->checked_read_u8(pc_, 2, "lane number");
1233 if (lane < 0 || lane > 3) { 1245 if (lane < 0 || lane > 3) {
1234 error(pc_, pc_ + 2, "invalid extract lane value"); 1246 error(pc_, pc_ + 2, "invalid extract lane value");
1235 } 1247 }
1236 TFNode* input = Pop(0, LocalType::kSimd128).node; 1248 TFNode* input = Pop(0, LocalType::kSimd128).node;
1237 TFNode* node = BUILD(SimdExtractLane, opcode, lane, input); 1249 TFNode* node = BUILD(SimdExtractLane, opcode, lane, input);
1238 Push(LocalType::kWord32, node); 1250 Push(LocalType::kWord32, node);
1239 len++; 1251 len++;
1240 break; 1252 break;
1241 } 1253 }
1242 default: { 1254 default: {
1243 FunctionSig* sig = WasmOpcodes::Signature(opcode); 1255 FunctionSig* sig = WasmOpcodes::Signature(opcode);
1244 if (sig != nullptr) { 1256 if (sig != nullptr) {
1245 compiler::NodeVector inputs(sig->parameter_count(), zone_); 1257 compiler::NodeVector inputs(sig->parameter_count(), zone_);
1246 for (size_t i = sig->parameter_count(); i > 0; i--) { 1258 for (size_t i = sig->parameter_count(); i > 0; i--) {
1247 Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1)); 1259 Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1));
1248 inputs[i - 1] = val.node; 1260 inputs[i - 1] = val.node;
1249 } 1261 }
1250 TFNode* node = BUILD(SimdOp, opcode, inputs); 1262 TFNode* node = BUILD(SimdOp, opcode, inputs);
1251 Push(GetReturnType(sig), node); 1263 Push(GetReturnType(sig), node);
1252 } else { 1264 } else {
1253 error("invalid simd opcode"); 1265 error(pc_, pc_, "invalid simd opcode");
1254 } 1266 }
1255 } 1267 }
1256 } 1268 }
1257 return len; 1269 return len;
1258 } 1270 }
1259 1271
1260 void DoReturn() { 1272 void DoReturn() {
1261 int count = static_cast<int>(sig_->return_count()); 1273 int count = static_cast<int>(sig_->return_count());
1262 TFNode** buffer = nullptr; 1274 TFNode** buffer = nullptr;
1263 if (build()) buffer = builder_->Buffer(count); 1275 if (build()) buffer = builder_->Buffer(count);
1264 1276
1265 // Pop return values off the stack in reverse order. 1277 // Pop return values off the stack in reverse order.
1266 for (int i = count - 1; i >= 0; i--) { 1278 for (int i = count - 1; i >= 0; i--) {
1267 Value val = Pop(i, sig_->GetReturn(i)); 1279 Value val = Pop(i, sig_->GetReturn(i));
1268 if (buffer) buffer[i] = val.node; 1280 if (buffer) buffer[i] = val.node;
1269 } 1281 }
1270 1282
1271 BUILD(Return, count, buffer); 1283 Push(kAstEnd, BUILD(Return, count, buffer));
1272 EndControl(); 1284 ssa_env_->Kill(SsaEnv::kControlEnd);
1273 } 1285 }
1274 1286
1275 void Push(LocalType type, TFNode* node) { 1287 void Push(LocalType type, TFNode* node) {
1276 if (type != kAstStmt && type != kAstEnd) { 1288 stack_.push_back({pc_, node, type});
1277 stack_.push_back({pc_, node, type});
1278 }
1279 }
1280
1281 void PushReturns(FunctionSig* sig, TFNode** rets) {
1282 for (size_t i = 0; i < sig->return_count(); i++) {
1283 // When verifying only, then {rets} will be null, so push null.
1284 Push(sig->GetReturn(i), rets ? rets[i] : nullptr);
1285 }
1286 } 1289 }
1287 1290
1288 const char* SafeOpcodeNameAt(const byte* pc) { 1291 const char* SafeOpcodeNameAt(const byte* pc) {
1289 if (pc >= end_) return "<end>"; 1292 if (pc >= end_) return "<end>";
1290 return WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc)); 1293 return WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc));
1291 } 1294 }
1292 1295
1293 Value Pop(int index, LocalType expected) { 1296 Value Pop(int index, LocalType expected) {
1294 if (!ssa_env_->go()) {
1295 // Unreachable code is essentially not typechecked.
1296 return {pc_, nullptr, expected};
1297 }
1298 Value val = Pop(); 1297 Value val = Pop();
1299 if (val.type != expected) { 1298 if (val.type != expected) {
1300 if (val.type != kAstEnd) { 1299 if (val.type != kAstEnd) {
1301 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s", 1300 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s",
1302 SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected), 1301 SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected),
1303 SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type)); 1302 SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type));
1304 } 1303 }
1305 } 1304 }
1306 return val; 1305 return val;
1307 } 1306 }
1308 1307
1309 Value Pop() { 1308 Value Pop() {
1310 if (!ssa_env_->go()) {
1311 // Unreachable code is essentially not typechecked.
1312 return {pc_, nullptr, kAstEnd};
1313 }
1314 size_t limit = control_.empty() ? 0 : control_.back().stack_depth; 1309 size_t limit = control_.empty() ? 0 : control_.back().stack_depth;
1315 if (stack_.size() <= limit) { 1310 if (stack_.size() <= limit) {
1316 Value val = {pc_, nullptr, kAstStmt}; 1311 Value val = {pc_, nullptr, kAstStmt};
1317 error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_)); 1312 error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_));
1318 return val; 1313 return val;
1319 } 1314 }
1320 Value val = stack_.back(); 1315 Value val = stack_.back();
1321 stack_.pop_back(); 1316 stack_.pop_back();
1322 return val; 1317 return val;
1323 } 1318 }
1324 1319
1325 Value PopUpTo(int stack_depth) { 1320 Value PopUpTo(int stack_depth) {
1326 if (!ssa_env_->go()) {
1327 // Unreachable code is essentially not typechecked.
1328 return {pc_, nullptr, kAstEnd};
1329 }
1330 if (stack_depth == stack_.size()) { 1321 if (stack_depth == stack_.size()) {
1331 Value val = {pc_, nullptr, kAstStmt}; 1322 Value val = {pc_, nullptr, kAstStmt};
1332 return val; 1323 return val;
1333 } else { 1324 } else {
1334 DCHECK_LE(stack_depth, static_cast<int>(stack_.size())); 1325 DCHECK_LE(stack_depth, static_cast<int>(stack_.size()));
1335 Value val = Pop(); 1326 Value val = Pop();
1336 stack_.resize(stack_depth); 1327 stack_.resize(stack_depth);
1337 return val; 1328 return val;
1338 } 1329 }
1339 } 1330 }
1340 1331
1341 int baserel(const byte* ptr) { 1332 int baserel(const byte* ptr) {
1342 return base_ ? static_cast<int>(ptr - base_) : 0; 1333 return base_ ? static_cast<int>(ptr - base_) : 0;
1343 } 1334 }
1344 1335
1345 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); } 1336 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); }
1346 1337
1347 void BreakTo(unsigned depth) { 1338 void BreakTo(Control* block, const Value& val) {
1348 if (!ssa_env_->go()) return; 1339 if (block->is_loop) {
1349 Control* c = &control_[control_.size() - depth - 1];
1350 if (c->is_loop()) {
1351 // This is the inner loop block, which does not have a value. 1340 // This is the inner loop block, which does not have a value.
1352 Goto(ssa_env_, c->end_env); 1341 Goto(ssa_env_, block->end_env);
1353 } else { 1342 } else {
1354 // Merge the value(s) into the end of the block. 1343 // Merge the value into the production for the block.
1355 if (static_cast<size_t>(c->stack_depth + c->merge.arity) > 1344 MergeInto(block->end_env, &block->node, &block->type, val);
1356 stack_.size()) {
1357 error(
1358 pc_, pc_,
1359 "expected at least %d values on the stack for br to @%d, found %d",
1360 c->merge.arity, startrel(c->pc),
1361 static_cast<int>(stack_.size() - c->stack_depth));
1362 return;
1363 }
1364 MergeValuesInto(c);
1365 } 1345 }
1366 } 1346 }
1367 1347
1368 void FallThruTo(Control* c) { 1348 void MergeInto(SsaEnv* target, TFNode** node, LocalType* type,
1349 const Value& val) {
1369 if (!ssa_env_->go()) return; 1350 if (!ssa_env_->go()) return;
1370 // Merge the value(s) into the end of the block. 1351 DCHECK_NE(kAstEnd, val.type);
1371 int arity = static_cast<int>(c->merge.arity);
1372 if (c->stack_depth + arity != stack_.size()) {
1373 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d",
1374 arity, startrel(c->pc));
1375 return;
1376 }
1377 MergeValuesInto(c);
1378 }
1379 1352
1380 inline Value& GetMergeValueFromStack(Control* c, int i) { 1353 bool first = target->state == SsaEnv::kUnreachable;
1381 return stack_[stack_.size() - c->merge.arity + i]; 1354 Goto(ssa_env_, target);
1382 }
1383 1355
1384 void TypeCheckLoopFallThru(Control* c) { 1356 if (first) {
1385 if (!ssa_env_->go()) return; 1357 // first merge to this environment; set the type and the node.
1386 // Fallthru must match arity exactly. 1358 *type = val.type;
1387 int arity = static_cast<int>(c->merge.arity); 1359 *node = val.node;
1388 if (c->stack_depth + arity != stack_.size()) { 1360 } else if (val.type == *type && val.type != kAstStmt) {
1389 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d", 1361 // merge with the existing value for this block.
1390 arity, startrel(c->pc)); 1362 *node = CreateOrMergeIntoPhi(*type, target->control, *node, val.node);
1391 return; 1363 } else {
1392 } 1364 // types don't match, or block is already a stmt.
1393 // Typecheck the values left on the stack. 1365 *type = kAstStmt;
1394 for (unsigned i = 0; i < c->merge.arity; i++) { 1366 *node = nullptr;
1395 Value& val = GetMergeValueFromStack(c, i);
1396 Value& old =
1397 c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i];
1398 if (val.type != old.type) {
1399 error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i,
1400 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
1401 return;
1402 }
1403 } 1367 }
1404 } 1368 }
1405 1369
1406 void MergeValuesInto(Control* c) {
1407 SsaEnv* target = c->end_env;
1408 bool first = target->state == SsaEnv::kUnreachable;
1409 Goto(ssa_env_, target);
1410
1411 for (unsigned i = 0; i < c->merge.arity; i++) {
1412 Value& val = GetMergeValueFromStack(c, i);
1413 Value& old =
1414 c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i];
1415 if (val.type != old.type) {
1416 error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i,
1417 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
1418 return;
1419 }
1420 old.node =
1421 first ? val.node : CreateOrMergeIntoPhi(old.type, target->control,
1422 old.node, val.node);
1423 }
1424 }
1425
1426 void SetEnv(const char* reason, SsaEnv* env) { 1370 void SetEnv(const char* reason, SsaEnv* env) {
1427 #if DEBUG 1371 #if DEBUG
1428 if (FLAG_trace_wasm_decoder) { 1372 if (FLAG_trace_wasm_decoder) {
1429 char state = 'X'; 1373 char state = 'X';
1430 if (env) { 1374 if (env) {
1431 switch (env->state) { 1375 switch (env->state) {
1432 case SsaEnv::kReached: 1376 case SsaEnv::kReached:
1433 state = 'R'; 1377 state = 'R';
1434 break; 1378 break;
1435 case SsaEnv::kUnreachable: 1379 case SsaEnv::kUnreachable:
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
1641 int depth = 0; 1585 int depth = 0;
1642 // Iteratively process all AST nodes nested inside the loop. 1586 // Iteratively process all AST nodes nested inside the loop.
1643 while (pc < limit_ && ok()) { 1587 while (pc < limit_ && ok()) {
1644 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); 1588 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1645 unsigned length = 1; 1589 unsigned length = 1;
1646 switch (opcode) { 1590 switch (opcode) {
1647 case kExprLoop: 1591 case kExprLoop:
1648 case kExprIf: 1592 case kExprIf:
1649 case kExprBlock: 1593 case kExprBlock:
1650 case kExprTry: 1594 case kExprTry:
1651 length = OpcodeLength(pc);
1652 depth++; 1595 depth++;
1596 DCHECK_EQ(1, OpcodeLength(pc));
1653 break; 1597 break;
1654 case kExprSetLocal: // fallthru 1598 case kExprSetLocal: {
1655 case kExprTeeLocal: {
1656 LocalIndexOperand operand(this, pc); 1599 LocalIndexOperand operand(this, pc);
1657 if (assigned->length() > 0 && 1600 if (assigned->length() > 0 &&
1658 operand.index < static_cast<uint32_t>(assigned->length())) { 1601 operand.index < static_cast<uint32_t>(assigned->length())) {
1659 // Unverified code might have an out-of-bounds index. 1602 // Unverified code might have an out-of-bounds index.
1660 assigned->Add(operand.index); 1603 assigned->Add(operand.index);
1661 } 1604 }
1662 length = 1 + operand.length; 1605 length = 1 + operand.length;
1663 break; 1606 break;
1664 } 1607 }
1665 case kExprEnd: 1608 case kExprEnd:
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
1738 WasmFullDecoder decoder(&zone, builder, body); 1681 WasmFullDecoder decoder(&zone, builder, body);
1739 decoder.Decode(); 1682 decoder.Decode();
1740 return decoder.toResult<DecodeStruct*>(nullptr); 1683 return decoder.toResult<DecodeStruct*>(nullptr);
1741 } 1684 }
1742 1685
1743 unsigned OpcodeLength(const byte* pc, const byte* end) { 1686 unsigned OpcodeLength(const byte* pc, const byte* end) {
1744 WasmDecoder decoder(nullptr, nullptr, pc, end); 1687 WasmDecoder decoder(nullptr, nullptr, pc, end);
1745 return decoder.OpcodeLength(pc); 1688 return decoder.OpcodeLength(pc);
1746 } 1689 }
1747 1690
1691 unsigned OpcodeArity(const byte* pc, const byte* end) {
1692 WasmDecoder decoder(nullptr, nullptr, pc, end);
1693 return decoder.OpcodeArity(pc);
1694 }
1695
1748 void PrintAstForDebugging(const byte* start, const byte* end) { 1696 void PrintAstForDebugging(const byte* start, const byte* end) {
1749 AccountingAllocator allocator; 1697 AccountingAllocator allocator;
1750 OFStream os(stdout); 1698 OFStream os(stdout);
1751 PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr); 1699 PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr);
1752 } 1700 }
1753 1701
1754 bool PrintAst(AccountingAllocator* allocator, const FunctionBody& body, 1702 bool PrintAst(AccountingAllocator* allocator, const FunctionBody& body,
1755 std::ostream& os, 1703 std::ostream& os,
1756 std::vector<std::tuple<uint32_t, int, int>>* offset_table) { 1704 std::vector<std::tuple<uint32_t, int, int>>* offset_table) {
1757 Zone zone(allocator); 1705 Zone zone(allocator);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
1803 const char* padding = 1751 const char* padding =
1804 " "; 1752 " ";
1805 os.write(padding, num_whitespaces); 1753 os.write(padding, num_whitespaces);
1806 os << "k" << WasmOpcodes::OpcodeName(opcode) << ","; 1754 os << "k" << WasmOpcodes::OpcodeName(opcode) << ",";
1807 1755
1808 for (size_t j = 1; j < length; ++j) { 1756 for (size_t j = 1; j < length; ++j) {
1809 os << " " << AsHex(i.pc()[j], 2) << ","; 1757 os << " " << AsHex(i.pc()[j], 2) << ",";
1810 } 1758 }
1811 1759
1812 switch (opcode) { 1760 switch (opcode) {
1761 case kExprIf:
1813 case kExprElse: 1762 case kExprElse:
1763 case kExprLoop:
1764 case kExprBlock:
1765 case kExprTry:
1814 os << " // @" << i.pc_offset(); 1766 os << " // @" << i.pc_offset();
1815 control_depth++; 1767 control_depth++;
1816 break; 1768 break;
1817 case kExprLoop:
1818 case kExprIf:
1819 case kExprBlock:
1820 case kExprTry: {
1821 BlockTypeOperand operand(&i, i.pc());
1822 os << " // @" << i.pc_offset();
1823 for (unsigned i = 0; i < operand.arity; i++) {
1824 os << " " << WasmOpcodes::TypeName(operand.read_entry(i));
1825 }
1826 control_depth++;
1827 break;
1828 }
1829 case kExprEnd: 1769 case kExprEnd:
1830 os << " // @" << i.pc_offset(); 1770 os << " // @" << i.pc_offset();
1831 control_depth--; 1771 control_depth--;
1832 break; 1772 break;
1833 case kExprBr: { 1773 case kExprBr: {
1834 BreakDepthOperand operand(&i, i.pc()); 1774 BreakDepthOperand operand(&i, i.pc());
1835 os << " // depth=" << operand.depth; 1775 os << " // arity=" << operand.arity << " depth=" << operand.depth;
1836 break; 1776 break;
1837 } 1777 }
1838 case kExprBrIf: { 1778 case kExprBrIf: {
1839 BreakDepthOperand operand(&i, i.pc()); 1779 BreakDepthOperand operand(&i, i.pc());
1840 os << " // depth=" << operand.depth; 1780 os << " // arity=" << operand.arity << " depth" << operand.depth;
1841 break; 1781 break;
1842 } 1782 }
1843 case kExprBrTable: { 1783 case kExprBrTable: {
1844 BranchTableOperand operand(&i, i.pc()); 1784 BranchTableOperand operand(&i, i.pc());
1845 os << " // entries=" << operand.table_count; 1785 os << " // arity=" << operand.arity
1786 << " entries=" << operand.table_count;
1846 break; 1787 break;
1847 } 1788 }
1848 case kExprCallIndirect: { 1789 case kExprCallIndirect: {
1849 CallIndirectOperand operand(&i, i.pc()); 1790 CallIndirectOperand operand(&i, i.pc());
1850 os << " // sig #" << operand.index;
1851 if (decoder.Complete(i.pc(), operand)) { 1791 if (decoder.Complete(i.pc(), operand)) {
1852 os << ": " << *operand.sig; 1792 os << " // sig #" << operand.index << ": " << *operand.sig;
1793 } else {
1794 os << " // arity=" << operand.arity << " sig #" << operand.index;
1795 }
1796 break;
1797 }
1798 case kExprCallImport: {
1799 CallImportOperand operand(&i, i.pc());
1800 if (decoder.Complete(i.pc(), operand)) {
1801 os << " // import #" << operand.index << ": " << *operand.sig;
1802 } else {
1803 os << " // arity=" << operand.arity << " import #" << operand.index;
1853 } 1804 }
1854 break; 1805 break;
1855 } 1806 }
1856 case kExprCallFunction: { 1807 case kExprCallFunction: {
1857 CallFunctionOperand operand(&i, i.pc()); 1808 CallFunctionOperand operand(&i, i.pc());
1858 os << " // function #" << operand.index;
1859 if (decoder.Complete(i.pc(), operand)) { 1809 if (decoder.Complete(i.pc(), operand)) {
1860 os << ": " << *operand.sig; 1810 os << " // function #" << operand.index << ": " << *operand.sig;
1811 } else {
1812 os << " // arity=" << operand.arity << " function #" << operand.index;
1861 } 1813 }
1862 break; 1814 break;
1863 } 1815 }
1816 case kExprReturn: {
1817 ReturnArityOperand operand(&i, i.pc());
1818 os << " // arity=" << operand.arity;
1819 break;
1820 }
1864 default: 1821 default:
1865 break; 1822 break;
1866 } 1823 }
1867 os << std::endl; 1824 os << std::endl;
1868 ++line_nr; 1825 ++line_nr;
1869 } 1826 }
1870 1827
1871 return decoder.ok(); 1828 return decoder.ok();
1872 } 1829 }
1873 1830
1874 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, 1831 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals,
1875 const byte* start, const byte* end) { 1832 const byte* start, const byte* end) {
1876 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; 1833 FunctionBody body = {nullptr, nullptr, nullptr, start, end};
1877 WasmFullDecoder decoder(zone, nullptr, body); 1834 WasmFullDecoder decoder(zone, nullptr, body);
1878 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); 1835 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals);
1879 } 1836 }
1880 1837
1881 } // namespace wasm 1838 } // namespace wasm
1882 } // namespace internal 1839 } // namespace internal
1883 } // namespace v8 1840 } // namespace v8
OLDNEW
« no previous file with comments | « src/wasm/ast-decoder.h ('k') | src/wasm/decoder.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698