Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(372)

Side by Side Diff: src/wasm/ast-decoder.cc

Issue 1664883002: [wasm] Refactor handling of operands to bytecodes. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/base/platform/elapsed-timer.h" 5 #include "src/base/platform/elapsed-timer.h"
6 #include "src/signature.h" 6 #include "src/signature.h"
7 7
8 #include "src/bit-vector.h" 8 #include "src/bit-vector.h"
9 #include "src/flags.h" 9 #include "src/flags.h"
10 #include "src/handles.h" 10 #include "src/handles.h"
(...skipping 23 matching lines...) Expand all
34 struct Tree { 34 struct Tree {
35 LocalType type; // tree type. 35 LocalType type; // tree type.
36 uint32_t count; // number of children. 36 uint32_t count; // number of children.
37 const byte* pc; // start of the syntax tree. 37 const byte* pc; // start of the syntax tree.
38 TFNode* node; // node in the TurboFan graph. 38 TFNode* node; // node in the TurboFan graph.
39 Tree* children[1]; // pointers to children. 39 Tree* children[1]; // pointers to children.
40 40
41 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc); } 41 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc); }
42 }; 42 };
43 43
44
45 // A production represents an incomplete decoded tree in the LR decoder. 44 // A production represents an incomplete decoded tree in the LR decoder.
46 struct Production { 45 struct Production {
47 Tree* tree; // the root of the syntax tree. 46 Tree* tree; // the root of the syntax tree.
48 int index; // the current index into the children of the tree. 47 int index; // the current index into the children of the tree.
49 48
50 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc()); } 49 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc()); }
51 const byte* pc() const { return tree->pc; } 50 const byte* pc() const { return tree->pc; }
52 bool done() const { return index >= static_cast<int>(tree->count); } 51 bool done() const { return index >= static_cast<int>(tree->count); }
53 Tree* last() const { return index > 0 ? tree->children[index - 1] : nullptr; } 52 Tree* last() const { return index > 0 ? tree->children[index - 1] : nullptr; }
54 }; 53 };
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
96 // TF graphs when decoding inputs that have unreachable code. 95 // TF graphs when decoding inputs that have unreachable code.
97 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) 96 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr)
98 #define BUILD0(func) (build() ? builder_->func() : nullptr) 97 #define BUILD0(func) (build() ? builder_->func() : nullptr)
99 98
100 99
101 // Generic Wasm bytecode decoder with utilities for decoding operands, 100 // Generic Wasm bytecode decoder with utilities for decoding operands,
102 // lengths, etc. 101 // lengths, etc.
103 class WasmDecoder : public Decoder { 102 class WasmDecoder : public Decoder {
104 public: 103 public:
105 WasmDecoder() : Decoder(nullptr, nullptr), function_env_(nullptr) {} 104 WasmDecoder() : Decoder(nullptr, nullptr), function_env_(nullptr) {}
106 105 WasmDecoder(FunctionEnv* env, const byte* start, const byte* end)
107 protected: 106 : Decoder(start, end), function_env_(env) {}
108 FunctionEnv* function_env_; 107 FunctionEnv* function_env_;
109 108
110 void Reset(FunctionEnv* function_env, const byte* start, const byte* end) { 109 void Reset(FunctionEnv* function_env, const byte* start, const byte* end) {
111 Decoder::Reset(start, end); 110 Decoder::Reset(start, end);
112 function_env_ = function_env; 111 function_env_ = function_env;
113 } 112 }
114 113
115 byte ByteOperand(const byte* pc, const char* msg = "missing 1-byte operand") { 114 byte ByteOperand(const byte* pc, const char* msg = "missing 1-byte operand") {
116 if ((pc + sizeof(byte)) >= limit_) { 115 if ((pc + sizeof(byte)) >= limit_) {
117 error(pc, msg); 116 error(pc, msg);
(...skipping 11 matching lines...) Expand all
129 } 128 }
130 129
131 uint64_t Uint64Operand(const byte* pc) { 130 uint64_t Uint64Operand(const byte* pc) {
132 if ((pc + sizeof(uint64_t)) >= limit_) { 131 if ((pc + sizeof(uint64_t)) >= limit_) {
133 error(pc, "missing 8-byte operand"); 132 error(pc, "missing 8-byte operand");
134 return 0; 133 return 0;
135 } 134 }
136 return read_u64(pc + 1); 135 return read_u64(pc + 1);
137 } 136 }
138 137
139 LocalType LocalOperand(const byte* pc, uint32_t* index, int* length) { 138 inline bool Validate(const byte* pc, LocalIndexOperand& operand) {
140 *index = UnsignedLEB128Operand(pc, length); 139 if (operand.index < function_env_->total_locals) {
141 if (function_env_->IsValidLocal(*index)) { 140 operand.type = function_env_->GetLocalType(operand.index);
142 return function_env_->GetLocalType(*index); 141 return true;
143 } 142 }
144 error(pc, "invalid local variable index"); 143 error(pc, pc + 1, "invalid local index");
145 return kAstStmt; 144 return false;
146 } 145 }
147 146
148 LocalType GlobalOperand(const byte* pc, uint32_t* index, int* length) { 147 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) {
149 *index = UnsignedLEB128Operand(pc, length); 148 ModuleEnv* m = function_env_->module;
150 if (function_env_->module->IsValidGlobal(*index)) { 149 if (m && m->module && operand.index < m->module->globals->size()) {
151 return WasmOpcodes::LocalTypeFor( 150 operand.machine_type = m->module->globals->at(operand.index).type;
152 function_env_->module->GetGlobalType(*index)); 151 operand.type = WasmOpcodes::LocalTypeFor(operand.machine_type);
153 } 152 return true;
154 error(pc, "invalid global variable index"); 153 }
155 return kAstStmt; 154 error(pc, pc + 1, "invalid global index");
156 } 155 return false;
157 156 }
158 FunctionSig* FunctionSigOperand(const byte* pc, uint32_t* index, 157
159 int* length) { 158 inline bool Validate(const byte* pc, FunctionIndexOperand& operand) {
160 *index = UnsignedLEB128Operand(pc, length); 159 ModuleEnv* m = function_env_->module;
161 if (function_env_->module->IsValidFunction(*index)) { 160 if (m && m->module && operand.index < m->module->functions->size()) {
162 return function_env_->module->GetFunctionSignature(*index); 161 operand.sig = m->module->functions->at(operand.index).sig;
163 } 162 return true;
164 error(pc, "invalid function index"); 163 }
165 return nullptr; 164 error(pc, pc + 1, "invalid function index");
166 } 165 return false;
167 166 }
168 FunctionSig* SigOperand(const byte* pc, uint32_t* index, int* length) { 167
169 *index = UnsignedLEB128Operand(pc, length); 168 inline bool Validate(const byte* pc, SignatureIndexOperand& operand) {
170 if (function_env_->module->IsValidSignature(*index)) { 169 ModuleEnv* m = function_env_->module;
171 return function_env_->module->GetSignature(*index); 170 if (m && m->module && operand.index < m->module->signatures->size()) {
172 } 171 operand.sig = m->module->signatures->at(operand.index);
173 error(pc, "invalid signature index"); 172 return true;
174 return nullptr; 173 }
175 } 174 error(pc, pc + 1, "invalid signature index");
176 175 return false;
177 uint32_t UnsignedLEB128Operand(const byte* pc, int* length) { 176 }
178 uint32_t result = 0; 177
179 ReadUnsignedLEB128ErrorCode error_code = 178 inline bool Validate(const byte* pc, BreakDepthOperand& operand,
180 ReadUnsignedLEB128Operand(pc + 1, limit_, length, &result); 179 ZoneVector<Block>& blocks) {
181 if (error_code == kInvalidLEB128) error(pc, "invalid LEB128 varint"); 180 if (operand.depth < blocks.size()) {
182 if (error_code == kMissingLEB128) error(pc, "expected LEB128 varint"); 181 operand.target = &blocks[blocks.size() - operand.depth - 1];
183 (*length)++; 182 return true;
184 return result; 183 }
185 } 184 error(pc, pc + 1, "invalid break depth");
186 185 return false;
187 void MemoryAccessOperand(const byte* pc, int* length, uint32_t* offset) { 186 }
188 byte bitfield = ByteOperand(pc, "missing memory access operand"); 187
189 if (MemoryAccess::OffsetField::decode(bitfield)) { 188 bool Validate(const byte* pc, TableSwitchOperand& operand,
190 *offset = UnsignedLEB128Operand(pc + 1, length); 189 size_t block_depth) {
191 (*length)++; // to account for the memory access byte 190 if (operand.table_count == 0) {
192 } else { 191 error(pc, "tableswitch with 0 entries");
193 *offset = 0; 192 return false;
194 *length = 2; 193 }
194 // Verify table.
195 for (int i = 0; i < operand.table_count; i++) {
196 uint16_t target = operand.read_entry(this, i);
197 if (target >= 0x8000) {
198 size_t depth = target - 0x8000;
199 if (depth > block_depth) {
200 error(operand.table + i * 2, "improper branch in tableswitch");
201 return false;
202 }
203 } else {
204 if (target >= operand.case_count) {
205 error(operand.table + i * 2, "invalid case target in tableswitch");
206 return false;
207 }
208 }
209 }
210 return true;
211 }
212
213 int OpcodeArity(const byte* pc) {
214 #define DECLARE_ARITY(name, ...) \
215 static const LocalType kTypes_##name[] = {__VA_ARGS__}; \
216 static const int kArity_##name = \
217 static_cast<int>(arraysize(kTypes_##name) - 1);
218
219 FOREACH_SIGNATURE(DECLARE_ARITY);
220 #undef DECLARE_ARITY
221
222 switch (static_cast<WasmOpcode>(*pc)) {
223 case kExprI8Const:
224 case kExprI32Const:
225 case kExprI64Const:
226 case kExprF64Const:
227 case kExprF32Const:
228 case kExprGetLocal:
229 case kExprLoadGlobal:
230 case kExprNop:
231 case kExprUnreachable:
232 return 0;
233
234 case kExprBr:
235 case kExprStoreGlobal:
236 case kExprSetLocal:
237 return 1;
238
239 case kExprIf:
240 case kExprBrIf:
241 return 2;
242 case kExprIfElse:
243 case kExprSelect:
244 return 3;
245
246 case kExprBlock:
247 case kExprLoop: {
248 BlockCountOperand operand(this, pc);
249 return operand.count;
250 }
251
252 case kExprCallFunction: {
253 FunctionIndexOperand operand(this, pc);
254 return function_env_->module->GetFunctionSignature(operand.index)
255 ->parameter_count();
256 }
257 case kExprCallIndirect: {
258 SignatureIndexOperand operand(this, pc);
259 return 1 + static_cast<int>(
260 function_env_->module->GetSignature(operand.index)
261 ->parameter_count());
262 }
263 case kExprReturn: {
264 return static_cast<int>(function_env_->sig->return_count());
265 }
266 case kExprTableSwitch: {
267 TableSwitchOperand operand(this, pc);
268 return 1 + operand.case_count;
269 }
270
271 #define DECLARE_OPCODE_CASE(name, opcode, sig) \
272 case kExpr##name: \
273 return kArity_##sig;
274
275 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
276 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
277 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE)
278 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE)
279 #undef DECLARE_OPCODE_CASE
280 }
281 UNREACHABLE();
282 return 0;
283 }
284
285 int OpcodeLength(const byte* pc) {
286 switch (static_cast<WasmOpcode>(*pc)) {
287 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
288 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
289 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
290 #undef DECLARE_OPCODE_CASE
291 {
292 MemoryAccessOperand operand(this, pc);
293 return 1 + operand.length;
294 }
295 case kExprBlock:
296 case kExprLoop: {
297 BlockCountOperand operand(this, pc);
298 return 1 + operand.length;
299 }
300 case kExprBr:
301 case kExprBrIf: {
302 BreakDepthOperand operand(this, pc);
303 return 1 + operand.length;
304 }
305 case kExprStoreGlobal:
306 case kExprLoadGlobal: {
307 GlobalIndexOperand operand(this, pc);
308 return 1 + operand.length;
309 }
310
311 case kExprCallFunction: {
312 FunctionIndexOperand operand(this, pc);
313 return 1 + operand.length;
314 }
315 case kExprCallIndirect: {
316 SignatureIndexOperand operand(this, pc);
317 return 1 + operand.length;
318 }
319
320 case kExprSetLocal:
321 case kExprGetLocal: {
322 LocalIndexOperand operand(this, pc);
323 return 1 + operand.length;
324 }
325 case kExprTableSwitch: {
326 TableSwitchOperand operand(this, pc);
327 return 1 + operand.length;
328 }
329 case kExprI8Const:
330 return 2;
331 case kExprI32Const:
332 case kExprF32Const:
333 return 5;
334 case kExprI64Const:
335 case kExprF64Const:
336 return 9;
337
338 default:
339 return 1;
195 } 340 }
196 } 341 }
197 }; 342 };
198 343
199 344
200 // A shift-reduce-parser strategy for decoding Wasm code that uses an explicit 345 // A shift-reduce-parser strategy for decoding Wasm code that uses an explicit
201 // shift-reduce strategy with multiple internal stacks. 346 // shift-reduce strategy with multiple internal stacks.
202 class LR_WasmDecoder : public WasmDecoder { 347 class LR_WasmDecoder : public WasmDecoder {
203 public: 348 public:
204 LR_WasmDecoder(Zone* zone, TFBuilder* builder) 349 LR_WasmDecoder(Zone* zone, TFBuilder* builder)
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after
424 return; 569 return;
425 } 570 }
426 continue; // back to decoding loop. 571 continue; // back to decoding loop.
427 } 572 }
428 573
429 switch (opcode) { 574 switch (opcode) {
430 case kExprNop: 575 case kExprNop:
431 Leaf(kAstStmt); 576 Leaf(kAstStmt);
432 break; 577 break;
433 case kExprBlock: { 578 case kExprBlock: {
434 int length = ByteOperand(pc_); 579 BlockCountOperand operand(this, pc_);
435 if (length < 1) { 580 if (operand.count < 1) {
436 Leaf(kAstStmt); 581 Leaf(kAstStmt);
437 } else { 582 } else {
438 Shift(kAstEnd, length); 583 Shift(kAstEnd, operand.count);
439 // The break environment is the outer environment. 584 // The break environment is the outer environment.
440 SsaEnv* break_env = ssa_env_; 585 SsaEnv* break_env = ssa_env_;
441 PushBlock(break_env); 586 PushBlock(break_env);
442 SetEnv("block:start", Steal(break_env)); 587 SetEnv("block:start", Steal(break_env));
443 } 588 }
444 len = 2; 589 len = 1 + operand.length;
445 break; 590 break;
446 } 591 }
447 case kExprLoop: { 592 case kExprLoop: {
448 int length = ByteOperand(pc_); 593 BlockCountOperand operand(this, pc_);
449 if (length < 1) { 594 if (operand.count < 1) {
450 Leaf(kAstStmt); 595 Leaf(kAstStmt);
451 } else { 596 } else {
452 Shift(kAstEnd, length); 597 Shift(kAstEnd, operand.count);
453 // The break environment is the outer environment. 598 // The break environment is the outer environment.
454 SsaEnv* break_env = ssa_env_; 599 SsaEnv* break_env = ssa_env_;
455 PushBlock(break_env); 600 PushBlock(break_env);
456 SsaEnv* cont_env = Steal(break_env); 601 SsaEnv* cont_env = Steal(break_env);
457 // The continue environment is the inner environment. 602 // The continue environment is the inner environment.
458 PrepareForLoop(cont_env); 603 PrepareForLoop(cont_env);
459 SetEnv("loop:start", Split(cont_env)); 604 SetEnv("loop:start", Split(cont_env));
460 if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached; 605 if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached;
461 PushBlock(cont_env); 606 PushBlock(cont_env);
462 blocks_.back().stack_depth = -1; // no production for inner block. 607 blocks_.back().stack_depth = -1; // no production for inner block.
463 } 608 }
464 len = 2; 609 len = 1 + operand.length;
465 break; 610 break;
466 } 611 }
467 case kExprIf: 612 case kExprIf:
468 Shift(kAstStmt, 2); 613 Shift(kAstStmt, 2);
469 break; 614 break;
470 case kExprIfElse: 615 case kExprIfElse:
471 Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}. 616 Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}.
472 break; 617 break;
473 case kExprSelect: 618 case kExprSelect:
474 Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}. 619 Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}.
475 break; 620 break;
476 case kExprBr: { 621 case kExprBr: {
477 uint32_t depth = ByteOperand(pc_); 622 BreakDepthOperand operand(this, pc_);
478 Shift(kAstEnd, 1); 623 if (Validate(pc_, operand, blocks_)) {
479 if (depth >= blocks_.size()) { 624 Shift(kAstEnd, 1);
480 error("improperly nested branch");
481 } 625 }
482 len = 2; 626 len = 1 + operand.length;
483 break; 627 break;
484 } 628 }
485 case kExprBrIf: { 629 case kExprBrIf: {
486 uint32_t depth = ByteOperand(pc_); 630 BreakDepthOperand operand(this, pc_);
487 Shift(kAstStmt, 2); 631 if (Validate(pc_, operand, blocks_)) {
488 if (depth >= blocks_.size()) { 632 Shift(kAstStmt, 2);
489 error("improperly nested conditional branch");
490 } 633 }
491 len = 2; 634 len = 1 + operand.length;
492 break; 635 break;
493 } 636 }
494 case kExprTableSwitch: { 637 case kExprTableSwitch: {
495 if (!checkAvailable(5)) { 638 TableSwitchOperand operand(this, pc_);
496 error("expected #tableswitch <cases> <table>, fell off end"); 639 if (Validate(pc_, operand, blocks_.size())) {
497 break; 640 Shift(kAstEnd, 1 + operand.case_count);
498 } 641 }
499 uint16_t case_count = read_u16(pc_ + 1); 642 len = 1 + operand.length;
500 uint16_t table_count = read_u16(pc_ + 3);
501 len = 5 + table_count * 2;
502
503 if (table_count == 0) {
504 error("tableswitch with 0 entries");
505 break;
506 }
507
508 if (!checkAvailable(len)) {
509 error("expected #tableswitch <cases> <table>, fell off end");
510 break;
511 }
512
513 Shift(kAstEnd, 1 + case_count);
514
515 // Verify table.
516 for (int i = 0; i < table_count; i++) {
517 uint16_t target = read_u16(pc_ + 5 + i * 2);
518 if (target >= 0x8000) {
519 size_t depth = target - 0x8000;
520 if (depth > blocks_.size()) {
521 error(pc_ + 5 + i * 2, "improper branch in tableswitch");
522 }
523 } else {
524 if (target >= case_count) {
525 error(pc_ + 5 + i * 2, "invalid case target in tableswitch");
526 }
527 }
528 }
529 break; 643 break;
530 } 644 }
531 case kExprReturn: { 645 case kExprReturn: {
532 int count = static_cast<int>(function_env_->sig->return_count()); 646 int count = static_cast<int>(function_env_->sig->return_count());
533 if (count == 0) { 647 if (count == 0) {
534 BUILD(Return, 0, builder_->Buffer(0)); 648 BUILD(Return, 0, builder_->Buffer(0));
535 ssa_env_->Kill(); 649 ssa_env_->Kill();
536 Leaf(kAstEnd); 650 Leaf(kAstEnd);
537 } else { 651 } else {
538 Shift(kAstEnd, count); 652 Shift(kAstEnd, count);
539 } 653 }
540 break; 654 break;
541 } 655 }
542 case kExprUnreachable: { 656 case kExprUnreachable: {
543 BUILD0(Unreachable); 657 BUILD0(Unreachable);
544 ssa_env_->Kill(SsaEnv::kControlEnd); 658 ssa_env_->Kill(SsaEnv::kControlEnd);
545 Leaf(kAstEnd, nullptr); 659 Leaf(kAstEnd, nullptr);
546 break; 660 break;
547 } 661 }
548 case kExprI8Const: { 662 case kExprI8Const: {
549 int32_t value = bit_cast<int8_t>(ByteOperand(pc_)); 663 ImmI8Operand operand(this, pc_);
550 Leaf(kAstI32, BUILD(Int32Constant, value)); 664 Leaf(kAstI32, BUILD(Int32Constant, operand.value));
551 len = 2; 665 len = 1 + operand.length;
bradnelson 2016/02/03 19:44:47 FWIW, the bookkeeping on these lengths is getting
titzer 2016/02/04 09:34:08 I was thinking of renaming {len} to {total_len} pa
552 break; 666 break;
553 } 667 }
554 case kExprI32Const: { 668 case kExprI32Const: {
555 uint32_t value = Uint32Operand(pc_); 669 ImmI32Operand operand(this, pc_);
556 Leaf(kAstI32, BUILD(Int32Constant, value)); 670 Leaf(kAstI32, BUILD(Int32Constant, operand.value));
557 len = 5; 671 len = 1 + operand.length;
558 break; 672 break;
559 } 673 }
560 case kExprI64Const: { 674 case kExprI64Const: {
561 uint64_t value = Uint64Operand(pc_); 675 ImmI64Operand operand(this, pc_);
562 Leaf(kAstI64, BUILD(Int64Constant, value)); 676 Leaf(kAstI64, BUILD(Int64Constant, operand.value));
563 len = 9; 677 len = 1 + operand.length;
564 break; 678 break;
565 } 679 }
566 case kExprF32Const: { 680 case kExprF32Const: {
567 float value = bit_cast<float>(Uint32Operand(pc_)); 681 ImmF32Operand operand(this, pc_);
568 Leaf(kAstF32, BUILD(Float32Constant, value)); 682 Leaf(kAstF32, BUILD(Float32Constant, operand.value));
569 len = 5; 683 len = 1 + operand.length;
570 break; 684 break;
571 } 685 }
572 case kExprF64Const: { 686 case kExprF64Const: {
573 double value = bit_cast<double>(Uint64Operand(pc_)); 687 ImmF64Operand operand(this, pc_);
574 Leaf(kAstF64, BUILD(Float64Constant, value)); 688 Leaf(kAstF64, BUILD(Float64Constant, operand.value));
575 len = 9; 689 len = 1 + operand.length;
576 break; 690 break;
577 } 691 }
578 case kExprGetLocal: { 692 case kExprGetLocal: {
579 uint32_t index; 693 LocalIndexOperand operand(this, pc_);
580 LocalType type = LocalOperand(pc_, &index, &len); 694 if (Validate(pc_, operand)) {
581 TFNode* val = 695 TFNode* val = build() ? ssa_env_->locals[operand.index] : nullptr;
582 build() && type != kAstStmt ? ssa_env_->locals[index] : nullptr; 696 Leaf(operand.type, val);
583 Leaf(type, val); 697 }
698 len = 1 + operand.length;
584 break; 699 break;
585 } 700 }
586 case kExprSetLocal: { 701 case kExprSetLocal: {
587 uint32_t index; 702 LocalIndexOperand operand(this, pc_);
588 LocalType type = LocalOperand(pc_, &index, &len); 703 if (Validate(pc_, operand)) {
589 Shift(type, 1); 704 Shift(operand.type, 1);
705 }
706 len = 1 + operand.length;
590 break; 707 break;
591 } 708 }
592 case kExprLoadGlobal: { 709 case kExprLoadGlobal: {
593 uint32_t index; 710 GlobalIndexOperand operand(this, pc_);
594 LocalType type = GlobalOperand(pc_, &index, &len); 711 if (Validate(pc_, operand)) {
595 Leaf(type, BUILD(LoadGlobal, index)); 712 Leaf(operand.type, BUILD(LoadGlobal, operand.index));
713 }
714 len = 1 + operand.length;
596 break; 715 break;
597 } 716 }
598 case kExprStoreGlobal: { 717 case kExprStoreGlobal: {
599 uint32_t index; 718 GlobalIndexOperand operand(this, pc_);
600 LocalType type = GlobalOperand(pc_, &index, &len); 719 if (Validate(pc_, operand)) {
601 Shift(type, 1); 720 Shift(operand.type, 1);
721 }
722 len = 1 + operand.length;
602 break; 723 break;
603 } 724 }
604 case kExprI32LoadMem8S: 725 case kExprI32LoadMem8S:
605 case kExprI32LoadMem8U: 726 case kExprI32LoadMem8U:
606 case kExprI32LoadMem16S: 727 case kExprI32LoadMem16S:
607 case kExprI32LoadMem16U: 728 case kExprI32LoadMem16U:
608 case kExprI32LoadMem: 729 case kExprI32LoadMem:
609 len = DecodeLoadMem(pc_, kAstI32); 730 len = DecodeLoadMem(pc_, kAstI32);
610 break; 731 break;
611 case kExprI64LoadMem8S: 732 case kExprI64LoadMem8S:
(...skipping 28 matching lines...) Expand all
640 case kExprF64StoreMem: 761 case kExprF64StoreMem:
641 len = DecodeStoreMem(pc_, kAstF64); 762 len = DecodeStoreMem(pc_, kAstF64);
642 break; 763 break;
643 case kExprMemorySize: 764 case kExprMemorySize:
644 Leaf(kAstI32, BUILD(MemSize, 0)); 765 Leaf(kAstI32, BUILD(MemSize, 0));
645 break; 766 break;
646 case kExprGrowMemory: 767 case kExprGrowMemory:
647 Shift(kAstI32, 1); 768 Shift(kAstI32, 1);
648 break; 769 break;
649 case kExprCallFunction: { 770 case kExprCallFunction: {
650 uint32_t unused; 771 FunctionIndexOperand operand(this, pc_);
651 FunctionSig* sig = FunctionSigOperand(pc_, &unused, &len); 772 if (Validate(pc_, operand)) {
652 if (sig) { 773 LocalType type = operand.sig->return_count() == 0
653 LocalType type = 774 ? kAstStmt
654 sig->return_count() == 0 ? kAstStmt : sig->GetReturn(); 775 : operand.sig->GetReturn();
655 Shift(type, static_cast<int>(sig->parameter_count())); 776 Shift(type, static_cast<int>(operand.sig->parameter_count()));
656 } else {
657 Leaf(kAstI32); // error
658 } 777 }
778 len = 1 + operand.length;
659 break; 779 break;
660 } 780 }
661 case kExprCallIndirect: { 781 case kExprCallIndirect: {
662 uint32_t unused; 782 SignatureIndexOperand operand(this, pc_);
663 FunctionSig* sig = SigOperand(pc_, &unused, &len); 783 if (Validate(pc_, operand)) {
664 if (sig) { 784 LocalType type = operand.sig->return_count() == 0
665 LocalType type = 785 ? kAstStmt
666 sig->return_count() == 0 ? kAstStmt : sig->GetReturn(); 786 : operand.sig->GetReturn();
667 Shift(type, static_cast<int>(1 + sig->parameter_count())); 787 Shift(type, static_cast<int>(1 + operand.sig->parameter_count()));
668 } else {
669 Leaf(kAstI32); // error
670 } 788 }
789 len = 1 + operand.length;
671 break; 790 break;
672 } 791 }
673 default: 792 default:
674 error("Invalid opcode"); 793 error("Invalid opcode");
675 return; 794 return;
676 } 795 }
677 pc_ += len; 796 pc_ += len;
678 if (pc_ >= limit_) { 797 if (pc_ >= limit_) {
679 // End of code reached or exceeded. 798 // End of code reached or exceeded.
680 if (pc_ > limit_ && ok()) { 799 if (pc_ > limit_ && ok()) {
681 error("Beyond end of code"); 800 error("Beyond end of code");
682 } 801 }
683 return; 802 return;
684 } 803 }
685 } 804 }
686 } 805 }
687 806
688 void PushBlock(SsaEnv* ssa_env) { 807 void PushBlock(SsaEnv* ssa_env) {
689 blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)}); 808 blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)});
690 } 809 }
691 810
692 int DecodeLoadMem(const byte* pc, LocalType type) { 811 int DecodeLoadMem(const byte* pc, LocalType type) {
693 int length = 2; 812 MemoryAccessOperand operand(this, pc);
694 uint32_t offset;
695 MemoryAccessOperand(pc, &length, &offset);
696 Shift(type, 1); 813 Shift(type, 1);
697 return length; 814 return 1 + operand.length;
698 } 815 }
699 816
700 int DecodeStoreMem(const byte* pc, LocalType type) { 817 int DecodeStoreMem(const byte* pc, LocalType type) {
701 int length = 2; 818 MemoryAccessOperand operand(this, pc);
702 uint32_t offset;
703 MemoryAccessOperand(pc, &length, &offset);
704 Shift(type, 2); 819 Shift(type, 2);
705 return length; 820 return 1 + operand.length;
706 } 821 }
707 822
708 void AddImplicitReturnAtEnd() { 823 void AddImplicitReturnAtEnd() {
709 int retcount = static_cast<int>(function_env_->sig->return_count()); 824 int retcount = static_cast<int>(function_env_->sig->return_count());
710 if (retcount == 0) { 825 if (retcount == 0) {
711 BUILD0(ReturnVoid); 826 BUILD0(ReturnVoid);
712 return; 827 return;
713 } 828 }
714 829
715 if (static_cast<int>(trees_.size()) < retcount) { 830 if (static_cast<int>(trees_.size()) < retcount) {
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
869 TFNode* vals[2] = {p->tree->children[1]->node, 984 TFNode* vals[2] = {p->tree->children[1]->node,
870 p->tree->children[2]->node}; 985 p->tree->children[2]->node};
871 TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge); 986 TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge);
872 p->tree->node = phi; 987 p->tree->node = phi;
873 ssa_env_->control = merge; 988 ssa_env_->control = merge;
874 } 989 }
875 } 990 }
876 break; 991 break;
877 } 992 }
878 case kExprBr: { 993 case kExprBr: {
879 uint32_t depth = ByteOperand(p->pc()); 994 BreakDepthOperand operand(this, p->pc());
880 if (depth >= blocks_.size()) { 995 CHECK(Validate(p->pc(), operand, blocks_));
881 error("improperly nested branch"); 996 ReduceBreakToExprBlock(p, operand.target);
882 break;
883 }
884 Block* block = &blocks_[blocks_.size() - depth - 1];
885 ReduceBreakToExprBlock(p, block);
886 break; 997 break;
887 } 998 }
888 case kExprBrIf: { 999 case kExprBrIf: {
889 if (p->index == 1) { 1000 if (p->index == 1) {
890 TypeCheckLast(p, kAstI32); 1001 TypeCheckLast(p, kAstI32);
891 } else if (p->done()) { 1002 } else if (p->done()) {
892 uint32_t depth = ByteOperand(p->pc()); 1003 BreakDepthOperand operand(this, p->pc());
893 if (depth >= blocks_.size()) { 1004 CHECK(Validate(p->pc(), operand, blocks_));
894 error("improperly nested branch");
895 break;
896 }
897 Block* block = &blocks_[blocks_.size() - depth - 1];
898 SsaEnv* fenv = ssa_env_; 1005 SsaEnv* fenv = ssa_env_;
899 SsaEnv* tenv = Split(fenv); 1006 SsaEnv* tenv = Split(fenv);
900 BUILD(Branch, p->tree->children[0]->node, &tenv->control, 1007 BUILD(Branch, p->tree->children[0]->node, &tenv->control,
901 &fenv->control); 1008 &fenv->control);
902 ssa_env_ = tenv; 1009 ssa_env_ = tenv;
903 ReduceBreakToExprBlock(p, block); 1010 ReduceBreakToExprBlock(p, operand.target);
904 ssa_env_ = fenv; 1011 ssa_env_ = fenv;
905 } 1012 }
906 break; 1013 break;
907 } 1014 }
908 case kExprTableSwitch: { 1015 case kExprTableSwitch: {
909 if (p->index == 1) { 1016 if (p->index == 1) {
910 // Switch key finished. 1017 // Switch key finished.
911 TypeCheckLast(p, kAstI32); 1018 TypeCheckLast(p, kAstI32);
1019 if (failed()) break;
912 1020
913 uint16_t table_count = read_u16(p->pc() + 3); 1021 TableSwitchOperand operand(this, p->pc());
1022 DCHECK(Validate(p->pc(), operand, blocks_.size()));
914 1023
915 // Build the switch only if it has more than just a default target. 1024 // Build the switch only if it has more than just a default target.
916 bool build_switch = table_count > 1; 1025 bool build_switch = operand.table_count > 1;
917 TFNode* sw = nullptr; 1026 TFNode* sw = nullptr;
918 if (build_switch) sw = BUILD(Switch, table_count, p->last()->node); 1027 if (build_switch)
1028 sw = BUILD(Switch, operand.table_count, p->last()->node);
919 1029
920 // Allocate environments for each case. 1030 // Allocate environments for each case.
921 uint16_t case_count = read_u16(p->pc() + 1); 1031 SsaEnv** case_envs = zone_->NewArray<SsaEnv*>(operand.case_count);
922 SsaEnv** case_envs = zone_->NewArray<SsaEnv*>(case_count); 1032 for (int i = 0; i < operand.case_count; i++) {
923 for (int i = 0; i < case_count; i++) case_envs[i] = UnreachableEnv(); 1033 case_envs[i] = UnreachableEnv();
1034 }
924 1035
925 ifs_.push_back({nullptr, nullptr, case_envs}); 1036 ifs_.push_back({nullptr, nullptr, case_envs});
926 SsaEnv* break_env = ssa_env_; 1037 SsaEnv* break_env = ssa_env_;
927 PushBlock(break_env); 1038 PushBlock(break_env);
928 SsaEnv* copy = Steal(break_env); 1039 SsaEnv* copy = Steal(break_env);
929 ssa_env_ = copy; 1040 ssa_env_ = copy;
930 1041
931 // Build the environments for each case based on the table. 1042 // Build the environments for each case based on the table.
932 for (int i = 0; i < table_count; i++) { 1043 for (int i = 0; i < operand.table_count; i++) {
933 uint16_t target = read_u16(p->pc() + 5 + i * 2); 1044 uint16_t target = operand.read_entry(this, i);
934 SsaEnv* env = copy; 1045 SsaEnv* env = copy;
935 if (build_switch) { 1046 if (build_switch) {
936 env = Split(env); 1047 env = Split(env);
937 env->control = (i == table_count - 1) ? BUILD(IfDefault, sw) 1048 env->control = (i == operand.table_count - 1)
938 : BUILD(IfValue, i, sw); 1049 ? BUILD(IfDefault, sw)
1050 : BUILD(IfValue, i, sw);
939 } 1051 }
940 if (target >= 0x8000) { 1052 if (target >= 0x8000) {
941 // Targets an outer block. 1053 // Targets an outer block.
942 int depth = target - 0x8000; 1054 int depth = target - 0x8000;
943 SsaEnv* tenv = blocks_[blocks_.size() - depth - 1].ssa_env; 1055 SsaEnv* tenv = blocks_[blocks_.size() - depth - 1].ssa_env;
944 Goto(env, tenv); 1056 Goto(env, tenv);
945 } else { 1057 } else {
946 // Targets a case. 1058 // Targets a case.
947 Goto(env, case_envs[target]); 1059 Goto(env, case_envs[target]);
948 } 1060 }
(...skipping 25 matching lines...) Expand all
974 for (int i = 0; i < count; i++) { 1086 for (int i = 0; i < count; i++) {
975 buffer[i] = p->tree->children[i]->node; 1087 buffer[i] = p->tree->children[i]->node;
976 } 1088 }
977 BUILD(Return, count, buffer); 1089 BUILD(Return, count, buffer);
978 } 1090 }
979 ssa_env_->Kill(SsaEnv::kControlEnd); 1091 ssa_env_->Kill(SsaEnv::kControlEnd);
980 } 1092 }
981 break; 1093 break;
982 } 1094 }
983 case kExprSetLocal: { 1095 case kExprSetLocal: {
984 int unused = 0; 1096 LocalIndexOperand operand(this, p->pc());
985 uint32_t index; 1097 CHECK(Validate(p->pc(), operand));
986 LocalType type = LocalOperand(p->pc(), &index, &unused);
987 Tree* val = p->last(); 1098 Tree* val = p->last();
988 if (type == val->type) { 1099 if (operand.type == val->type) {
989 if (build()) ssa_env_->locals[index] = val->node; 1100 if (build()) ssa_env_->locals[operand.index] = val->node;
990 p->tree->node = val->node; 1101 p->tree->node = val->node;
991 } else { 1102 } else {
992 error(p->pc(), val->pc, "Typecheck failed in SetLocal"); 1103 error(p->pc(), val->pc, "Typecheck failed in SetLocal");
993 } 1104 }
994 break; 1105 break;
995 } 1106 }
996 case kExprStoreGlobal: { 1107 case kExprStoreGlobal: {
997 int unused = 0; 1108 GlobalIndexOperand operand(this, p->pc());
998 uint32_t index; 1109 CHECK(Validate(p->pc(), operand));
999 LocalType type = GlobalOperand(p->pc(), &index, &unused);
1000 Tree* val = p->last(); 1110 Tree* val = p->last();
1001 if (type == val->type) { 1111 if (operand.type == val->type) {
1002 BUILD(StoreGlobal, index, val->node); 1112 BUILD(StoreGlobal, operand.index, val->node);
1003 p->tree->node = val->node; 1113 p->tree->node = val->node;
1004 } else { 1114 } else {
1005 error(p->pc(), val->pc, "Typecheck failed in StoreGlobal"); 1115 error(p->pc(), val->pc, "Typecheck failed in StoreGlobal");
1006 } 1116 }
1007 break; 1117 break;
1008 } 1118 }
1009 1119
1010 case kExprI32LoadMem8S: 1120 case kExprI32LoadMem8S:
1011 return ReduceLoadMem(p, kAstI32, MachineType::Int8()); 1121 return ReduceLoadMem(p, kAstI32, MachineType::Int8());
1012 case kExprI32LoadMem8U: 1122 case kExprI32LoadMem8U:
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1061 case kExprF64StoreMem: 1171 case kExprF64StoreMem:
1062 return ReduceStoreMem(p, kAstF64, MachineType::Float64()); 1172 return ReduceStoreMem(p, kAstF64, MachineType::Float64());
1063 1173
1064 case kExprGrowMemory: 1174 case kExprGrowMemory:
1065 TypeCheckLast(p, kAstI32); 1175 TypeCheckLast(p, kAstI32);
1066 // TODO(titzer): build node for GrowMemory 1176 // TODO(titzer): build node for GrowMemory
1067 p->tree->node = BUILD(Int32Constant, 0); 1177 p->tree->node = BUILD(Int32Constant, 0);
1068 return; 1178 return;
1069 1179
1070 case kExprCallFunction: { 1180 case kExprCallFunction: {
1071 int len; 1181 FunctionIndexOperand operand(this, p->pc());
1072 uint32_t index; 1182 CHECK(Validate(p->pc(), operand));
1073 FunctionSig* sig = FunctionSigOperand(p->pc(), &index, &len);
1074 if (!sig) break;
1075 if (p->index > 0) { 1183 if (p->index > 0) {
1076 TypeCheckLast(p, sig->GetParam(p->index - 1)); 1184 TypeCheckLast(p, operand.sig->GetParam(p->index - 1));
1077 } 1185 }
1078 if (p->done() && build()) { 1186 if (p->done() && build()) {
1079 uint32_t count = p->tree->count + 1; 1187 uint32_t count = p->tree->count + 1;
1080 TFNode** buffer = builder_->Buffer(count); 1188 TFNode** buffer = builder_->Buffer(count);
1081 FunctionSig* sig = FunctionSigOperand(p->pc(), &index, &len);
1082 USE(sig);
1083 buffer[0] = nullptr; // reserved for code object. 1189 buffer[0] = nullptr; // reserved for code object.
1084 for (uint32_t i = 1; i < count; i++) { 1190 for (uint32_t i = 1; i < count; i++) {
1085 buffer[i] = p->tree->children[i - 1]->node; 1191 buffer[i] = p->tree->children[i - 1]->node;
1086 } 1192 }
1087 p->tree->node = builder_->CallDirect(index, buffer); 1193 p->tree->node = builder_->CallDirect(operand.index, buffer);
1088 } 1194 }
1089 break; 1195 break;
1090 } 1196 }
1091 case kExprCallIndirect: { 1197 case kExprCallIndirect: {
1092 int len; 1198 SignatureIndexOperand operand(this, p->pc());
1093 uint32_t index; 1199 CHECK(Validate(p->pc(), operand));
1094 FunctionSig* sig = SigOperand(p->pc(), &index, &len);
1095 if (p->index == 1) { 1200 if (p->index == 1) {
1096 TypeCheckLast(p, kAstI32); 1201 TypeCheckLast(p, kAstI32);
1097 } else { 1202 } else {
1098 TypeCheckLast(p, sig->GetParam(p->index - 2)); 1203 TypeCheckLast(p, operand.sig->GetParam(p->index - 2));
1099 } 1204 }
1100 if (p->done() && build()) { 1205 if (p->done() && build()) {
1101 uint32_t count = p->tree->count; 1206 uint32_t count = p->tree->count;
1102 TFNode** buffer = builder_->Buffer(count); 1207 TFNode** buffer = builder_->Buffer(count);
1103 for (uint32_t i = 0; i < count; i++) { 1208 for (uint32_t i = 0; i < count; i++) {
1104 buffer[i] = p->tree->children[i]->node; 1209 buffer[i] = p->tree->children[i]->node;
1105 } 1210 }
1106 p->tree->node = builder_->CallIndirect(index, buffer); 1211 p->tree->node = builder_->CallIndirect(operand.index, buffer);
1107 } 1212 }
1108 break; 1213 break;
1109 } 1214 }
1110 default: 1215 default:
1111 break; 1216 break;
1112 } 1217 }
1113 } 1218 }
1114 1219
1115 void ReduceBreakToExprBlock(Production* p, Block* block) { 1220 void ReduceBreakToExprBlock(Production* p, Block* block) {
1116 if (block->stack_depth < 0) { 1221 if (block->stack_depth < 0) {
(...skipping 28 matching lines...) Expand all
1145 p->tree->node = CreateOrMergeIntoPhi(type, target->control, 1250 p->tree->node = CreateOrMergeIntoPhi(type, target->control,
1146 p->tree->node, expr->node); 1251 p->tree->node, expr->node);
1147 } 1252 }
1148 } 1253 }
1149 } 1254 }
1150 1255
1151 void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) { 1256 void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) {
1152 DCHECK_EQ(1, p->index); 1257 DCHECK_EQ(1, p->index);
1153 TypeCheckLast(p, kAstI32); // index 1258 TypeCheckLast(p, kAstI32); // index
1154 if (build()) { 1259 if (build()) {
1155 int length = 0; 1260 MemoryAccessOperand operand(this, p->pc());
1156 uint32_t offset = 0;
1157 MemoryAccessOperand(p->pc(), &length, &offset);
1158 p->tree->node = 1261 p->tree->node =
1159 builder_->LoadMem(type, mem_type, p->last()->node, offset); 1262 builder_->LoadMem(type, mem_type, p->last()->node, operand.offset);
1160 } 1263 }
1161 } 1264 }
1162 1265
1163 void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) { 1266 void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) {
1164 if (p->index == 1) { 1267 if (p->index == 1) {
1165 TypeCheckLast(p, kAstI32); // index 1268 TypeCheckLast(p, kAstI32); // index
1166 } else { 1269 } else {
1167 DCHECK_EQ(2, p->index); 1270 DCHECK_EQ(2, p->index);
1168 TypeCheckLast(p, type); 1271 TypeCheckLast(p, type);
1169 if (build()) { 1272 if (build()) {
1170 int length = 0; 1273 MemoryAccessOperand operand(this, p->pc());
1171 uint32_t offset = 0;
1172 MemoryAccessOperand(p->pc(), &length, &offset);
1173 TFNode* val = p->tree->children[1]->node; 1274 TFNode* val = p->tree->children[1]->node;
1174 builder_->StoreMem(mem_type, p->tree->children[0]->node, offset, val); 1275 builder_->StoreMem(mem_type, p->tree->children[0]->node, operand.offset,
1276 val);
1175 p->tree->node = val; 1277 p->tree->node = val;
1176 } 1278 }
1177 } 1279 }
1178 } 1280 }
1179 1281
1180 void TypeCheckLast(Production* p, LocalType expected) { 1282 void TypeCheckLast(Production* p, LocalType expected) {
1181 LocalType result = p->last()->type; 1283 LocalType result = p->last()->type;
1182 if (result == expected) return; 1284 if (result == expected) return;
1183 if (result == kAstEnd) return; 1285 if (result == kAstEnd) return;
1184 if (expected != kAstStmt) { 1286 if (expected != kAstStmt) {
1185 error(p->pc(), p->last()->pc, 1287 error(p->pc(), p->last()->pc,
1186 "%s[%d] expected type %s, found %s of type %s", 1288 "%s[%d] expected type %s, found %s of type %s",
1187 WasmOpcodes::OpcodeName(p->opcode()), p->index - 1, 1289 WasmOpcodes::OpcodeName(p->opcode()), p->index - 1,
1188 WasmOpcodes::TypeName(expected), 1290 WasmOpcodes::TypeName(expected),
1189 WasmOpcodes::OpcodeName(p->last()->opcode()), 1291 WasmOpcodes::OpcodeName(p->last()->opcode()),
1190 WasmOpcodes::TypeName(p->last()->type)); 1292 WasmOpcodes::TypeName(p->last()->type));
1191 } 1293 }
1192 } 1294 }
1193 1295
1194 void SetEnv(const char* reason, SsaEnv* env) { 1296 void SetEnv(const char* reason, SsaEnv* env) {
1195 TRACE(" env = %p, block depth = %d, reason = %s", static_cast<void*>(env), 1297 TRACE(" env = %p, block depth = %d, reason = %s", static_cast<void*>(env),
1196 static_cast<int>(blocks_.size()), reason); 1298 static_cast<int>(blocks_.size()), reason);
1197 if (env->control != nullptr && FLAG_trace_wasm_decoder) { 1299 if (FLAG_trace_wasm_decoder && env && env->control) {
1198 TRACE(", control = "); 1300 TRACE(", control = ");
1199 compiler::WasmGraphBuilder::PrintDebugName(env->control); 1301 compiler::WasmGraphBuilder::PrintDebugName(env->control);
1200 } 1302 }
1201 TRACE("\n"); 1303 TRACE("\n");
1202 ssa_env_ = env; 1304 ssa_env_ = env;
1203 if (builder_) { 1305 if (builder_) {
1204 builder_->set_control_ptr(&env->control); 1306 builder_->set_control_ptr(&env->control);
1205 builder_->set_effect_ptr(&env->effect); 1307 builder_->set_effect_ptr(&env->effect);
1206 } 1308 }
1207 } 1309 }
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
1440 } 1542 }
1441 if (tree.count > 0) os << ")"; 1543 if (tree.count > 0) os << ")";
1442 return os; 1544 return os;
1443 } 1545 }
1444 1546
1445 1547
1446 ReadUnsignedLEB128ErrorCode ReadUnsignedLEB128Operand(const byte* pc, 1548 ReadUnsignedLEB128ErrorCode ReadUnsignedLEB128Operand(const byte* pc,
1447 const byte* limit, 1549 const byte* limit,
1448 int* length, 1550 int* length,
1449 uint32_t* result) { 1551 uint32_t* result) {
1450 *result = 0; 1552 Decoder decoder(pc, limit);
1451 const byte* ptr = pc; 1553 *result = decoder.checked_read_u32v(pc, 0, length);
1452 const byte* end = pc + 5; // maximum 5 bytes. 1554 if (decoder.ok()) return kNoError;
1453 if (end > limit) end = limit; 1555 return (limit - pc) > 1 ? kInvalidLEB128 : kMissingLEB128;
1454 int shift = 0;
1455 byte b = 0;
1456 while (ptr < end) {
1457 b = *ptr++;
1458 *result = *result | ((b & 0x7F) << shift);
1459 if ((b & 0x80) == 0) break;
1460 shift += 7;
1461 }
1462 DCHECK_LE(ptr - pc, 5);
1463 *length = static_cast<int>(ptr - pc);
1464 if (ptr == end && (b & 0x80)) {
1465 return kInvalidLEB128;
1466 } else if (*length == 0) {
1467 return kMissingLEB128;
1468 } else {
1469 return kNoError;
1470 }
1471 } 1556 }
1472 1557
1473 1558 int OpcodeLength(const byte* pc, const byte* end) {
1474 // TODO(titzer): move this into WasmDecoder and bounds check accesses. 1559 WasmDecoder decoder(nullptr, pc, end);
1475 int OpcodeLength(const byte* pc) { 1560 return decoder.OpcodeLength(pc);
1476 switch (static_cast<WasmOpcode>(*pc)) {
1477 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
1478 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
1479 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
1480 #undef DECLARE_OPCODE_CASE
1481 {
1482 // Loads and stores have an optional offset.
1483 byte bitfield = pc[1];
1484 if (MemoryAccess::OffsetField::decode(bitfield)) {
1485 int length;
1486 uint32_t result = 0;
1487 ReadUnsignedLEB128Operand(pc + 2, pc + 7, &length, &result);
1488 return 2 + length;
1489 }
1490 return 2;
1491 }
1492 case kExprI8Const:
1493 case kExprBlock:
1494 case kExprLoop:
1495 case kExprBr:
1496 case kExprBrIf:
1497 return 2;
1498 case kExprI32Const:
1499 case kExprF32Const:
1500 return 5;
1501 case kExprI64Const:
1502 case kExprF64Const:
1503 return 9;
1504 case kExprStoreGlobal:
1505 case kExprSetLocal:
1506 case kExprLoadGlobal:
1507 case kExprCallFunction:
1508 case kExprCallIndirect:
1509 case kExprGetLocal: {
1510 int length;
1511 uint32_t result = 0;
1512 ReadUnsignedLEB128Operand(pc + 1, pc + 6, &length, &result);
1513 return 1 + length;
1514 }
1515 case kExprTableSwitch: {
1516 uint16_t table_count = *reinterpret_cast<const uint16_t*>(pc + 3);
1517 return 5 + table_count * 2;
1518 }
1519
1520 default:
1521 return 1;
1522 }
1523 } 1561 }
1524 1562
1525 1563 int OpcodeArity(FunctionEnv* env, const byte* pc, const byte* end) {
1526 // TODO(titzer): move this into WasmDecoder and bounds check accesses. 1564 WasmDecoder decoder(env, pc, end);
1527 int OpcodeArity(FunctionEnv* env, const byte* pc) { 1565 return decoder.OpcodeArity(pc);
1528 #define DECLARE_ARITY(name, ...) \
1529 static const LocalType kTypes_##name[] = {__VA_ARGS__}; \
1530 static const int kArity_##name = \
1531 static_cast<int>(arraysize(kTypes_##name) - 1);
1532
1533 FOREACH_SIGNATURE(DECLARE_ARITY);
1534 #undef DECLARE_ARITY
1535
1536 switch (static_cast<WasmOpcode>(*pc)) {
1537 case kExprI8Const:
1538 case kExprI32Const:
1539 case kExprI64Const:
1540 case kExprF64Const:
1541 case kExprF32Const:
1542 case kExprGetLocal:
1543 case kExprLoadGlobal:
1544 case kExprNop:
1545 case kExprUnreachable:
1546 return 0;
1547
1548 case kExprBr:
1549 case kExprStoreGlobal:
1550 case kExprSetLocal:
1551 return 1;
1552
1553 case kExprIf:
1554 case kExprBrIf:
1555 return 2;
1556 case kExprIfElse:
1557 case kExprSelect:
1558 return 3;
1559 case kExprBlock:
1560 case kExprLoop:
1561 return *(pc + 1);
1562
1563 case kExprCallFunction: {
1564 int index = *(pc + 1);
1565 return static_cast<int>(
1566 env->module->GetFunctionSignature(index)->parameter_count());
1567 }
1568 case kExprCallIndirect: {
1569 int index = *(pc + 1);
1570 return 1 + static_cast<int>(
1571 env->module->GetSignature(index)->parameter_count());
1572 }
1573 case kExprReturn:
1574 return static_cast<int>(env->sig->return_count());
1575 case kExprTableSwitch: {
1576 uint16_t case_count = *reinterpret_cast<const uint16_t*>(pc + 1);
1577 return 1 + case_count;
1578 }
1579
1580 #define DECLARE_OPCODE_CASE(name, opcode, sig) \
1581 case kExpr##name: \
1582 return kArity_##sig;
1583
1584 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
1585 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
1586 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE)
1587 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE)
1588 #undef DECLARE_OPCODE_CASE
1589 }
1590 UNREACHABLE();
1591 return 0;
1592 } 1566 }
1593 1567
1594
1595
1596 void PrintAst(FunctionEnv* env, const byte* start, const byte* end) { 1568 void PrintAst(FunctionEnv* env, const byte* start, const byte* end) {
1569 WasmDecoder decoder(env, start, end);
1597 const byte* pc = start; 1570 const byte* pc = start;
1598 std::vector<int> arity_stack; 1571 std::vector<int> arity_stack;
1599 while (pc < end) { 1572 while (pc < end) {
1600 int arity = OpcodeArity(env, pc); 1573 int arity = decoder.OpcodeArity(pc);
1601 size_t length = OpcodeLength(pc); 1574 size_t length = decoder.OpcodeLength(pc);
1602 1575
1603 for (auto arity : arity_stack) { 1576 for (auto arity : arity_stack) {
1604 printf(" "); 1577 printf(" ");
1605 USE(arity); 1578 USE(arity);
1606 } 1579 }
1607 1580
1608 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); 1581 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1609 printf("k%s,", WasmOpcodes::OpcodeName(opcode)); 1582 printf("k%s,", WasmOpcodes::OpcodeName(opcode));
1610 1583
1611 for (size_t i = 1; i < length; i++) { 1584 for (size_t i = 1; i < length; i++) {
1612 printf(" 0x%02x,", pc[i]); 1585 printf(" 0x%02x,", pc[i]);
1613 } 1586 }
1614 pc += length; 1587 pc += length;
1615 printf("\n"); 1588 printf("\n");
1616 1589
1617 arity_stack.push_back(arity); 1590 arity_stack.push_back(arity);
1618 while (arity_stack.back() == 0) { 1591 while (arity_stack.back() == 0) {
1619 arity_stack.pop_back(); 1592 arity_stack.pop_back();
1620 if (arity_stack.empty()) break; 1593 if (arity_stack.empty()) break;
1621 arity_stack.back()--; 1594 arity_stack.back()--;
1622 } 1595 }
1623 } 1596 }
1624 } 1597 }
1625 1598
1626
1627 // Analyzes loop bodies for static assignments to locals, which helps in 1599 // Analyzes loop bodies for static assignments to locals, which helps in
1628 // reducing the number of phis introduced at loop headers. 1600 // reducing the number of phis introduced at loop headers.
1629 class LoopAssignmentAnalyzer : public WasmDecoder { 1601 class LoopAssignmentAnalyzer : public WasmDecoder {
1630 public: 1602 public:
1631 LoopAssignmentAnalyzer(Zone* zone, FunctionEnv* function_env) : zone_(zone) { 1603 LoopAssignmentAnalyzer(Zone* zone, FunctionEnv* function_env) : zone_(zone) {
1632 function_env_ = function_env; 1604 function_env_ = function_env;
1633 } 1605 }
1634 1606
1635 BitVector* Analyze(const byte* pc, const byte* limit) { 1607 BitVector* Analyze(const byte* pc, const byte* limit) {
1636 Decoder::Reset(pc, limit); 1608 Decoder::Reset(pc, limit);
1637 if (pc_ >= limit_) return nullptr; 1609 if (pc_ >= limit_) return nullptr;
1638 if (*pc_ != kExprLoop) return nullptr; 1610 if (*pc_ != kExprLoop) return nullptr;
1639 1611
1640 BitVector* assigned = 1612 BitVector* assigned =
1641 new (zone_) BitVector(function_env_->total_locals, zone_); 1613 new (zone_) BitVector(function_env_->total_locals, zone_);
1642 // Keep a stack to model the nesting of expressions. 1614 // Keep a stack to model the nesting of expressions.
1643 std::vector<int> arity_stack; 1615 std::vector<int> arity_stack;
1644 arity_stack.push_back(OpcodeArity(function_env_, pc_)); 1616 arity_stack.push_back(OpcodeArity(pc_));
1645 pc_ += OpcodeLength(pc_); 1617 pc_ += OpcodeLength(pc_);
1646 1618
1647 // Iteratively process all AST nodes nested inside the loop. 1619 // Iteratively process all AST nodes nested inside the loop.
1648 while (pc_ < limit_) { 1620 while (pc_ < limit_) {
1649 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); 1621 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
1650 int arity = 0; 1622 int arity = 0;
1651 int length = 1; 1623 int length = 1;
1652 if (opcode == kExprSetLocal) { 1624 if (opcode == kExprSetLocal) {
1653 uint32_t index; 1625 LocalIndexOperand operand(this, pc_);
1654 LocalOperand(pc_, &index, &length);
1655 if (assigned->length() > 0 && 1626 if (assigned->length() > 0 &&
1656 static_cast<int>(index) < assigned->length()) { 1627 static_cast<int>(operand.index) < assigned->length()) {
1657 // Unverified code might have an out-of-bounds index. 1628 // Unverified code might have an out-of-bounds index.
1658 assigned->Add(index); 1629 assigned->Add(operand.index);
1659 } 1630 }
1660 arity = 1; 1631 arity = 1;
1632 length = 1 + operand.length;
1661 } else { 1633 } else {
1662 arity = OpcodeArity(function_env_, pc_); 1634 arity = OpcodeArity(pc_);
1663 length = OpcodeLength(pc_); 1635 length = OpcodeLength(pc_);
1664 } 1636 }
1665 1637
1666 pc_ += length; 1638 pc_ += length;
1667 arity_stack.push_back(arity); 1639 arity_stack.push_back(arity);
1668 while (arity_stack.back() == 0) { 1640 while (arity_stack.back() == 0) {
1669 arity_stack.pop_back(); 1641 arity_stack.pop_back();
1670 if (arity_stack.empty()) return assigned; // reached end of loop 1642 if (arity_stack.empty()) return assigned; // reached end of loop
1671 arity_stack.back()--; 1643 arity_stack.back()--;
1672 } 1644 }
1673 } 1645 }
1674 return assigned; 1646 return assigned;
1675 } 1647 }
1676 1648
1677 private: 1649 private:
1678 Zone* zone_; 1650 Zone* zone_;
1679 }; 1651 };
1680 1652
1681 1653
1682 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, FunctionEnv* env, 1654 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, FunctionEnv* env,
1683 const byte* start, const byte* end) { 1655 const byte* start, const byte* end) {
1684 LoopAssignmentAnalyzer analyzer(zone, env); 1656 LoopAssignmentAnalyzer analyzer(zone, env);
1685 return analyzer.Analyze(start, end); 1657 return analyzer.Analyze(start, end);
1686 } 1658 }
1687 1659
1688 } // namespace wasm 1660 } // namespace wasm
1689 } // namespace internal 1661 } // namespace internal
1690 } // namespace v8 1662 } // namespace v8
OLDNEW
« src/wasm/ast-decoder.h ('K') | « src/wasm/ast-decoder.h ('k') | src/wasm/decoder.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698