OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/base/platform/elapsed-timer.h" | 5 #include "src/base/platform/elapsed-timer.h" |
6 #include "src/signature.h" | 6 #include "src/signature.h" |
7 | 7 |
8 #include "src/bit-vector.h" | 8 #include "src/bit-vector.h" |
9 #include "src/flags.h" | 9 #include "src/flags.h" |
10 #include "src/handles.h" | 10 #include "src/handles.h" |
(...skipping 23 matching lines...) Expand all Loading... |
34 struct Tree { | 34 struct Tree { |
35 LocalType type; // tree type. | 35 LocalType type; // tree type. |
36 uint32_t count; // number of children. | 36 uint32_t count; // number of children. |
37 const byte* pc; // start of the syntax tree. | 37 const byte* pc; // start of the syntax tree. |
38 TFNode* node; // node in the TurboFan graph. | 38 TFNode* node; // node in the TurboFan graph. |
39 Tree* children[1]; // pointers to children. | 39 Tree* children[1]; // pointers to children. |
40 | 40 |
41 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc); } | 41 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc); } |
42 }; | 42 }; |
43 | 43 |
44 | |
45 // A production represents an incomplete decoded tree in the LR decoder. | 44 // A production represents an incomplete decoded tree in the LR decoder. |
46 struct Production { | 45 struct Production { |
47 Tree* tree; // the root of the syntax tree. | 46 Tree* tree; // the root of the syntax tree. |
48 int index; // the current index into the children of the tree. | 47 int index; // the current index into the children of the tree. |
49 | 48 |
50 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc()); } | 49 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc()); } |
51 const byte* pc() const { return tree->pc; } | 50 const byte* pc() const { return tree->pc; } |
52 bool done() const { return index >= static_cast<int>(tree->count); } | 51 bool done() const { return index >= static_cast<int>(tree->count); } |
53 Tree* last() const { return index > 0 ? tree->children[index - 1] : nullptr; } | 52 Tree* last() const { return index > 0 ? tree->children[index - 1] : nullptr; } |
54 }; | 53 }; |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
96 // TF graphs when decoding inputs that have unreachable code. | 95 // TF graphs when decoding inputs that have unreachable code. |
97 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) | 96 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) |
98 #define BUILD0(func) (build() ? builder_->func() : nullptr) | 97 #define BUILD0(func) (build() ? builder_->func() : nullptr) |
99 | 98 |
100 | 99 |
101 // Generic Wasm bytecode decoder with utilities for decoding operands, | 100 // Generic Wasm bytecode decoder with utilities for decoding operands, |
102 // lengths, etc. | 101 // lengths, etc. |
103 class WasmDecoder : public Decoder { | 102 class WasmDecoder : public Decoder { |
104 public: | 103 public: |
105 WasmDecoder() : Decoder(nullptr, nullptr), function_env_(nullptr) {} | 104 WasmDecoder() : Decoder(nullptr, nullptr), function_env_(nullptr) {} |
106 | 105 WasmDecoder(FunctionEnv* env, const byte* start, const byte* end) |
107 protected: | 106 : Decoder(start, end), function_env_(env) {} |
108 FunctionEnv* function_env_; | 107 FunctionEnv* function_env_; |
109 | 108 |
110 void Reset(FunctionEnv* function_env, const byte* start, const byte* end) { | 109 void Reset(FunctionEnv* function_env, const byte* start, const byte* end) { |
111 Decoder::Reset(start, end); | 110 Decoder::Reset(start, end); |
112 function_env_ = function_env; | 111 function_env_ = function_env; |
113 } | 112 } |
114 | 113 |
115 byte ByteOperand(const byte* pc, const char* msg = "missing 1-byte operand") { | 114 byte ByteOperand(const byte* pc, const char* msg = "missing 1-byte operand") { |
116 if ((pc + sizeof(byte)) >= limit_) { | 115 if ((pc + sizeof(byte)) >= limit_) { |
117 error(pc, msg); | 116 error(pc, msg); |
(...skipping 11 matching lines...) Expand all Loading... |
129 } | 128 } |
130 | 129 |
131 uint64_t Uint64Operand(const byte* pc) { | 130 uint64_t Uint64Operand(const byte* pc) { |
132 if ((pc + sizeof(uint64_t)) >= limit_) { | 131 if ((pc + sizeof(uint64_t)) >= limit_) { |
133 error(pc, "missing 8-byte operand"); | 132 error(pc, "missing 8-byte operand"); |
134 return 0; | 133 return 0; |
135 } | 134 } |
136 return read_u64(pc + 1); | 135 return read_u64(pc + 1); |
137 } | 136 } |
138 | 137 |
139 LocalType LocalOperand(const byte* pc, uint32_t* index, int* length) { | 138 inline bool Validate(const byte* pc, LocalIndexOperand& operand) { |
140 *index = UnsignedLEB128Operand(pc, length); | 139 if (operand.index < function_env_->total_locals) { |
141 if (function_env_->IsValidLocal(*index)) { | 140 operand.type = function_env_->GetLocalType(operand.index); |
142 return function_env_->GetLocalType(*index); | 141 return true; |
143 } | 142 } |
144 error(pc, "invalid local variable index"); | 143 error(pc, pc + 1, "invalid local index"); |
145 return kAstStmt; | 144 return false; |
146 } | 145 } |
147 | 146 |
148 LocalType GlobalOperand(const byte* pc, uint32_t* index, int* length) { | 147 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) { |
149 *index = UnsignedLEB128Operand(pc, length); | 148 ModuleEnv* m = function_env_->module; |
150 if (function_env_->module->IsValidGlobal(*index)) { | 149 if (m && m->module && operand.index < m->module->globals->size()) { |
151 return WasmOpcodes::LocalTypeFor( | 150 operand.machine_type = m->module->globals->at(operand.index).type; |
152 function_env_->module->GetGlobalType(*index)); | 151 operand.type = WasmOpcodes::LocalTypeFor(operand.machine_type); |
153 } | 152 return true; |
154 error(pc, "invalid global variable index"); | 153 } |
155 return kAstStmt; | 154 error(pc, pc + 1, "invalid global index"); |
156 } | 155 return false; |
157 | 156 } |
158 FunctionSig* FunctionSigOperand(const byte* pc, uint32_t* index, | 157 |
159 int* length) { | 158 inline bool Validate(const byte* pc, FunctionIndexOperand& operand) { |
160 *index = UnsignedLEB128Operand(pc, length); | 159 ModuleEnv* m = function_env_->module; |
161 if (function_env_->module->IsValidFunction(*index)) { | 160 if (m && m->module && operand.index < m->module->functions->size()) { |
162 return function_env_->module->GetFunctionSignature(*index); | 161 operand.sig = m->module->functions->at(operand.index).sig; |
163 } | 162 return true; |
164 error(pc, "invalid function index"); | 163 } |
165 return nullptr; | 164 error(pc, pc + 1, "invalid function index"); |
166 } | 165 return false; |
167 | 166 } |
168 FunctionSig* SigOperand(const byte* pc, uint32_t* index, int* length) { | 167 |
169 *index = UnsignedLEB128Operand(pc, length); | 168 inline bool Validate(const byte* pc, SignatureIndexOperand& operand) { |
170 if (function_env_->module->IsValidSignature(*index)) { | 169 ModuleEnv* m = function_env_->module; |
171 return function_env_->module->GetSignature(*index); | 170 if (m && m->module && operand.index < m->module->signatures->size()) { |
172 } | 171 operand.sig = m->module->signatures->at(operand.index); |
173 error(pc, "invalid signature index"); | 172 return true; |
174 return nullptr; | 173 } |
175 } | 174 error(pc, pc + 1, "invalid signature index"); |
176 | 175 return false; |
177 uint32_t UnsignedLEB128Operand(const byte* pc, int* length) { | 176 } |
178 uint32_t result = 0; | 177 |
179 ReadUnsignedLEB128ErrorCode error_code = | 178 inline bool Validate(const byte* pc, BreakDepthOperand& operand, |
180 ReadUnsignedLEB128Operand(pc + 1, limit_, length, &result); | 179 ZoneVector<Block>& blocks) { |
181 if (error_code == kInvalidLEB128) error(pc, "invalid LEB128 varint"); | 180 if (operand.depth < blocks.size()) { |
182 if (error_code == kMissingLEB128) error(pc, "expected LEB128 varint"); | 181 operand.target = &blocks[blocks.size() - operand.depth - 1]; |
183 (*length)++; | 182 return true; |
184 return result; | 183 } |
185 } | 184 error(pc, pc + 1, "invalid break depth"); |
186 | 185 return false; |
187 void MemoryAccessOperand(const byte* pc, int* length, uint32_t* offset) { | 186 } |
188 byte bitfield = ByteOperand(pc, "missing memory access operand"); | 187 |
189 if (MemoryAccess::OffsetField::decode(bitfield)) { | 188 bool Validate(const byte* pc, TableSwitchOperand& operand, |
190 *offset = UnsignedLEB128Operand(pc + 1, length); | 189 size_t block_depth) { |
191 (*length)++; // to account for the memory access byte | 190 if (operand.table_count == 0) { |
192 } else { | 191 error(pc, "tableswitch with 0 entries"); |
193 *offset = 0; | 192 return false; |
194 *length = 2; | 193 } |
| 194 // Verify table. |
| 195 for (uint32_t i = 0; i < operand.table_count; i++) { |
| 196 uint16_t target = operand.read_entry(this, i); |
| 197 if (target >= 0x8000) { |
| 198 size_t depth = target - 0x8000; |
| 199 if (depth > block_depth) { |
| 200 error(operand.table + i * 2, "improper branch in tableswitch"); |
| 201 return false; |
| 202 } |
| 203 } else { |
| 204 if (target >= operand.case_count) { |
| 205 error(operand.table + i * 2, "invalid case target in tableswitch"); |
| 206 return false; |
| 207 } |
| 208 } |
| 209 } |
| 210 return true; |
| 211 } |
| 212 |
| 213 int OpcodeArity(const byte* pc) { |
| 214 #define DECLARE_ARITY(name, ...) \ |
| 215 static const LocalType kTypes_##name[] = {__VA_ARGS__}; \ |
| 216 static const int kArity_##name = \ |
| 217 static_cast<int>(arraysize(kTypes_##name) - 1); |
| 218 |
| 219 FOREACH_SIGNATURE(DECLARE_ARITY); |
| 220 #undef DECLARE_ARITY |
| 221 |
| 222 switch (static_cast<WasmOpcode>(*pc)) { |
| 223 case kExprI8Const: |
| 224 case kExprI32Const: |
| 225 case kExprI64Const: |
| 226 case kExprF64Const: |
| 227 case kExprF32Const: |
| 228 case kExprGetLocal: |
| 229 case kExprLoadGlobal: |
| 230 case kExprNop: |
| 231 case kExprUnreachable: |
| 232 return 0; |
| 233 |
| 234 case kExprBr: |
| 235 case kExprStoreGlobal: |
| 236 case kExprSetLocal: |
| 237 return 1; |
| 238 |
| 239 case kExprIf: |
| 240 case kExprBrIf: |
| 241 return 2; |
| 242 case kExprIfElse: |
| 243 case kExprSelect: |
| 244 return 3; |
| 245 |
| 246 case kExprBlock: |
| 247 case kExprLoop: { |
| 248 BlockCountOperand operand(this, pc); |
| 249 return operand.count; |
| 250 } |
| 251 |
| 252 case kExprCallFunction: { |
| 253 FunctionIndexOperand operand(this, pc); |
| 254 return static_cast<int>( |
| 255 function_env_->module->GetFunctionSignature(operand.index) |
| 256 ->parameter_count()); |
| 257 } |
| 258 case kExprCallIndirect: { |
| 259 SignatureIndexOperand operand(this, pc); |
| 260 return 1 + static_cast<int>( |
| 261 function_env_->module->GetSignature(operand.index) |
| 262 ->parameter_count()); |
| 263 } |
| 264 case kExprReturn: { |
| 265 return static_cast<int>(function_env_->sig->return_count()); |
| 266 } |
| 267 case kExprTableSwitch: { |
| 268 TableSwitchOperand operand(this, pc); |
| 269 return 1 + operand.case_count; |
| 270 } |
| 271 |
| 272 #define DECLARE_OPCODE_CASE(name, opcode, sig) \ |
| 273 case kExpr##name: \ |
| 274 return kArity_##sig; |
| 275 |
| 276 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 277 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 278 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 279 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE) |
| 280 #undef DECLARE_OPCODE_CASE |
| 281 } |
| 282 UNREACHABLE(); |
| 283 return 0; |
| 284 } |
| 285 |
| 286 int OpcodeLength(const byte* pc) { |
| 287 switch (static_cast<WasmOpcode>(*pc)) { |
| 288 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: |
| 289 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 290 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 291 #undef DECLARE_OPCODE_CASE |
| 292 { |
| 293 MemoryAccessOperand operand(this, pc); |
| 294 return 1 + operand.length; |
| 295 } |
| 296 case kExprBlock: |
| 297 case kExprLoop: { |
| 298 BlockCountOperand operand(this, pc); |
| 299 return 1 + operand.length; |
| 300 } |
| 301 case kExprBr: |
| 302 case kExprBrIf: { |
| 303 BreakDepthOperand operand(this, pc); |
| 304 return 1 + operand.length; |
| 305 } |
| 306 case kExprStoreGlobal: |
| 307 case kExprLoadGlobal: { |
| 308 GlobalIndexOperand operand(this, pc); |
| 309 return 1 + operand.length; |
| 310 } |
| 311 |
| 312 case kExprCallFunction: { |
| 313 FunctionIndexOperand operand(this, pc); |
| 314 return 1 + operand.length; |
| 315 } |
| 316 case kExprCallIndirect: { |
| 317 SignatureIndexOperand operand(this, pc); |
| 318 return 1 + operand.length; |
| 319 } |
| 320 |
| 321 case kExprSetLocal: |
| 322 case kExprGetLocal: { |
| 323 LocalIndexOperand operand(this, pc); |
| 324 return 1 + operand.length; |
| 325 } |
| 326 case kExprTableSwitch: { |
| 327 TableSwitchOperand operand(this, pc); |
| 328 return 1 + operand.length; |
| 329 } |
| 330 case kExprI8Const: |
| 331 return 2; |
| 332 case kExprI32Const: |
| 333 case kExprF32Const: |
| 334 return 5; |
| 335 case kExprI64Const: |
| 336 case kExprF64Const: |
| 337 return 9; |
| 338 |
| 339 default: |
| 340 return 1; |
195 } | 341 } |
196 } | 342 } |
197 }; | 343 }; |
198 | 344 |
199 | 345 |
200 // A shift-reduce-parser strategy for decoding Wasm code that uses an explicit | 346 // A shift-reduce-parser strategy for decoding Wasm code that uses an explicit |
201 // shift-reduce strategy with multiple internal stacks. | 347 // shift-reduce strategy with multiple internal stacks. |
202 class LR_WasmDecoder : public WasmDecoder { | 348 class LR_WasmDecoder : public WasmDecoder { |
203 public: | 349 public: |
204 LR_WasmDecoder(Zone* zone, TFBuilder* builder) | 350 LR_WasmDecoder(Zone* zone, TFBuilder* builder) |
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
424 return; | 570 return; |
425 } | 571 } |
426 continue; // back to decoding loop. | 572 continue; // back to decoding loop. |
427 } | 573 } |
428 | 574 |
429 switch (opcode) { | 575 switch (opcode) { |
430 case kExprNop: | 576 case kExprNop: |
431 Leaf(kAstStmt); | 577 Leaf(kAstStmt); |
432 break; | 578 break; |
433 case kExprBlock: { | 579 case kExprBlock: { |
434 int length = ByteOperand(pc_); | 580 BlockCountOperand operand(this, pc_); |
435 if (length < 1) { | 581 if (operand.count < 1) { |
436 Leaf(kAstStmt); | 582 Leaf(kAstStmt); |
437 } else { | 583 } else { |
438 Shift(kAstEnd, length); | 584 Shift(kAstEnd, operand.count); |
439 // The break environment is the outer environment. | 585 // The break environment is the outer environment. |
440 SsaEnv* break_env = ssa_env_; | 586 SsaEnv* break_env = ssa_env_; |
441 PushBlock(break_env); | 587 PushBlock(break_env); |
442 SetEnv("block:start", Steal(break_env)); | 588 SetEnv("block:start", Steal(break_env)); |
443 } | 589 } |
444 len = 2; | 590 len = 1 + operand.length; |
445 break; | 591 break; |
446 } | 592 } |
447 case kExprLoop: { | 593 case kExprLoop: { |
448 int length = ByteOperand(pc_); | 594 BlockCountOperand operand(this, pc_); |
449 if (length < 1) { | 595 if (operand.count < 1) { |
450 Leaf(kAstStmt); | 596 Leaf(kAstStmt); |
451 } else { | 597 } else { |
452 Shift(kAstEnd, length); | 598 Shift(kAstEnd, operand.count); |
453 // The break environment is the outer environment. | 599 // The break environment is the outer environment. |
454 SsaEnv* break_env = ssa_env_; | 600 SsaEnv* break_env = ssa_env_; |
455 PushBlock(break_env); | 601 PushBlock(break_env); |
456 SsaEnv* cont_env = Steal(break_env); | 602 SsaEnv* cont_env = Steal(break_env); |
457 // The continue environment is the inner environment. | 603 // The continue environment is the inner environment. |
458 PrepareForLoop(cont_env); | 604 PrepareForLoop(cont_env); |
459 SetEnv("loop:start", Split(cont_env)); | 605 SetEnv("loop:start", Split(cont_env)); |
460 if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached; | 606 if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached; |
461 PushBlock(cont_env); | 607 PushBlock(cont_env); |
462 blocks_.back().stack_depth = -1; // no production for inner block. | 608 blocks_.back().stack_depth = -1; // no production for inner block. |
463 } | 609 } |
464 len = 2; | 610 len = 1 + operand.length; |
465 break; | 611 break; |
466 } | 612 } |
467 case kExprIf: | 613 case kExprIf: |
468 Shift(kAstStmt, 2); | 614 Shift(kAstStmt, 2); |
469 break; | 615 break; |
470 case kExprIfElse: | 616 case kExprIfElse: |
471 Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}. | 617 Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}. |
472 break; | 618 break; |
473 case kExprSelect: | 619 case kExprSelect: |
474 Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}. | 620 Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}. |
475 break; | 621 break; |
476 case kExprBr: { | 622 case kExprBr: { |
477 uint32_t depth = ByteOperand(pc_); | 623 BreakDepthOperand operand(this, pc_); |
478 Shift(kAstEnd, 1); | 624 if (Validate(pc_, operand, blocks_)) { |
479 if (depth >= blocks_.size()) { | 625 Shift(kAstEnd, 1); |
480 error("improperly nested branch"); | |
481 } | 626 } |
482 len = 2; | 627 len = 1 + operand.length; |
483 break; | 628 break; |
484 } | 629 } |
485 case kExprBrIf: { | 630 case kExprBrIf: { |
486 uint32_t depth = ByteOperand(pc_); | 631 BreakDepthOperand operand(this, pc_); |
487 Shift(kAstStmt, 2); | 632 if (Validate(pc_, operand, blocks_)) { |
488 if (depth >= blocks_.size()) { | 633 Shift(kAstStmt, 2); |
489 error("improperly nested conditional branch"); | |
490 } | 634 } |
491 len = 2; | 635 len = 1 + operand.length; |
492 break; | 636 break; |
493 } | 637 } |
494 case kExprTableSwitch: { | 638 case kExprTableSwitch: { |
495 if (!checkAvailable(5)) { | 639 TableSwitchOperand operand(this, pc_); |
496 error("expected #tableswitch <cases> <table>, fell off end"); | 640 if (Validate(pc_, operand, blocks_.size())) { |
497 break; | 641 Shift(kAstEnd, 1 + operand.case_count); |
498 } | 642 } |
499 uint16_t case_count = read_u16(pc_ + 1); | 643 len = 1 + operand.length; |
500 uint16_t table_count = read_u16(pc_ + 3); | |
501 len = 5 + table_count * 2; | |
502 | |
503 if (table_count == 0) { | |
504 error("tableswitch with 0 entries"); | |
505 break; | |
506 } | |
507 | |
508 if (!checkAvailable(len)) { | |
509 error("expected #tableswitch <cases> <table>, fell off end"); | |
510 break; | |
511 } | |
512 | |
513 Shift(kAstEnd, 1 + case_count); | |
514 | |
515 // Verify table. | |
516 for (int i = 0; i < table_count; i++) { | |
517 uint16_t target = read_u16(pc_ + 5 + i * 2); | |
518 if (target >= 0x8000) { | |
519 size_t depth = target - 0x8000; | |
520 if (depth > blocks_.size()) { | |
521 error(pc_ + 5 + i * 2, "improper branch in tableswitch"); | |
522 } | |
523 } else { | |
524 if (target >= case_count) { | |
525 error(pc_ + 5 + i * 2, "invalid case target in tableswitch"); | |
526 } | |
527 } | |
528 } | |
529 break; | 644 break; |
530 } | 645 } |
531 case kExprReturn: { | 646 case kExprReturn: { |
532 int count = static_cast<int>(function_env_->sig->return_count()); | 647 int count = static_cast<int>(function_env_->sig->return_count()); |
533 if (count == 0) { | 648 if (count == 0) { |
534 BUILD(Return, 0, builder_->Buffer(0)); | 649 BUILD(Return, 0, builder_->Buffer(0)); |
535 ssa_env_->Kill(); | 650 ssa_env_->Kill(); |
536 Leaf(kAstEnd); | 651 Leaf(kAstEnd); |
537 } else { | 652 } else { |
538 Shift(kAstEnd, count); | 653 Shift(kAstEnd, count); |
539 } | 654 } |
540 break; | 655 break; |
541 } | 656 } |
542 case kExprUnreachable: { | 657 case kExprUnreachable: { |
543 BUILD0(Unreachable); | 658 BUILD0(Unreachable); |
544 ssa_env_->Kill(SsaEnv::kControlEnd); | 659 ssa_env_->Kill(SsaEnv::kControlEnd); |
545 Leaf(kAstEnd, nullptr); | 660 Leaf(kAstEnd, nullptr); |
546 break; | 661 break; |
547 } | 662 } |
548 case kExprI8Const: { | 663 case kExprI8Const: { |
549 int32_t value = bit_cast<int8_t>(ByteOperand(pc_)); | 664 ImmI8Operand operand(this, pc_); |
550 Leaf(kAstI32, BUILD(Int32Constant, value)); | 665 Leaf(kAstI32, BUILD(Int32Constant, operand.value)); |
551 len = 2; | 666 len = 1 + operand.length; |
552 break; | 667 break; |
553 } | 668 } |
554 case kExprI32Const: { | 669 case kExprI32Const: { |
555 uint32_t value = Uint32Operand(pc_); | 670 ImmI32Operand operand(this, pc_); |
556 Leaf(kAstI32, BUILD(Int32Constant, value)); | 671 Leaf(kAstI32, BUILD(Int32Constant, operand.value)); |
557 len = 5; | 672 len = 1 + operand.length; |
558 break; | 673 break; |
559 } | 674 } |
560 case kExprI64Const: { | 675 case kExprI64Const: { |
561 uint64_t value = Uint64Operand(pc_); | 676 ImmI64Operand operand(this, pc_); |
562 Leaf(kAstI64, BUILD(Int64Constant, value)); | 677 Leaf(kAstI64, BUILD(Int64Constant, operand.value)); |
563 len = 9; | 678 len = 1 + operand.length; |
564 break; | 679 break; |
565 } | 680 } |
566 case kExprF32Const: { | 681 case kExprF32Const: { |
567 float value = bit_cast<float>(Uint32Operand(pc_)); | 682 ImmF32Operand operand(this, pc_); |
568 Leaf(kAstF32, BUILD(Float32Constant, value)); | 683 Leaf(kAstF32, BUILD(Float32Constant, operand.value)); |
569 len = 5; | 684 len = 1 + operand.length; |
570 break; | 685 break; |
571 } | 686 } |
572 case kExprF64Const: { | 687 case kExprF64Const: { |
573 double value = bit_cast<double>(Uint64Operand(pc_)); | 688 ImmF64Operand operand(this, pc_); |
574 Leaf(kAstF64, BUILD(Float64Constant, value)); | 689 Leaf(kAstF64, BUILD(Float64Constant, operand.value)); |
575 len = 9; | 690 len = 1 + operand.length; |
576 break; | 691 break; |
577 } | 692 } |
578 case kExprGetLocal: { | 693 case kExprGetLocal: { |
579 uint32_t index; | 694 LocalIndexOperand operand(this, pc_); |
580 LocalType type = LocalOperand(pc_, &index, &len); | 695 if (Validate(pc_, operand)) { |
581 TFNode* val = | 696 TFNode* val = build() ? ssa_env_->locals[operand.index] : nullptr; |
582 build() && type != kAstStmt ? ssa_env_->locals[index] : nullptr; | 697 Leaf(operand.type, val); |
583 Leaf(type, val); | 698 } |
| 699 len = 1 + operand.length; |
584 break; | 700 break; |
585 } | 701 } |
586 case kExprSetLocal: { | 702 case kExprSetLocal: { |
587 uint32_t index; | 703 LocalIndexOperand operand(this, pc_); |
588 LocalType type = LocalOperand(pc_, &index, &len); | 704 if (Validate(pc_, operand)) { |
589 Shift(type, 1); | 705 Shift(operand.type, 1); |
| 706 } |
| 707 len = 1 + operand.length; |
590 break; | 708 break; |
591 } | 709 } |
592 case kExprLoadGlobal: { | 710 case kExprLoadGlobal: { |
593 uint32_t index; | 711 GlobalIndexOperand operand(this, pc_); |
594 LocalType type = GlobalOperand(pc_, &index, &len); | 712 if (Validate(pc_, operand)) { |
595 Leaf(type, BUILD(LoadGlobal, index)); | 713 Leaf(operand.type, BUILD(LoadGlobal, operand.index)); |
| 714 } |
| 715 len = 1 + operand.length; |
596 break; | 716 break; |
597 } | 717 } |
598 case kExprStoreGlobal: { | 718 case kExprStoreGlobal: { |
599 uint32_t index; | 719 GlobalIndexOperand operand(this, pc_); |
600 LocalType type = GlobalOperand(pc_, &index, &len); | 720 if (Validate(pc_, operand)) { |
601 Shift(type, 1); | 721 Shift(operand.type, 1); |
| 722 } |
| 723 len = 1 + operand.length; |
602 break; | 724 break; |
603 } | 725 } |
604 case kExprI32LoadMem8S: | 726 case kExprI32LoadMem8S: |
605 case kExprI32LoadMem8U: | 727 case kExprI32LoadMem8U: |
606 case kExprI32LoadMem16S: | 728 case kExprI32LoadMem16S: |
607 case kExprI32LoadMem16U: | 729 case kExprI32LoadMem16U: |
608 case kExprI32LoadMem: | 730 case kExprI32LoadMem: |
609 len = DecodeLoadMem(pc_, kAstI32); | 731 len = DecodeLoadMem(pc_, kAstI32); |
610 break; | 732 break; |
611 case kExprI64LoadMem8S: | 733 case kExprI64LoadMem8S: |
(...skipping 28 matching lines...) Expand all Loading... |
640 case kExprF64StoreMem: | 762 case kExprF64StoreMem: |
641 len = DecodeStoreMem(pc_, kAstF64); | 763 len = DecodeStoreMem(pc_, kAstF64); |
642 break; | 764 break; |
643 case kExprMemorySize: | 765 case kExprMemorySize: |
644 Leaf(kAstI32, BUILD(MemSize, 0)); | 766 Leaf(kAstI32, BUILD(MemSize, 0)); |
645 break; | 767 break; |
646 case kExprGrowMemory: | 768 case kExprGrowMemory: |
647 Shift(kAstI32, 1); | 769 Shift(kAstI32, 1); |
648 break; | 770 break; |
649 case kExprCallFunction: { | 771 case kExprCallFunction: { |
650 uint32_t unused; | 772 FunctionIndexOperand operand(this, pc_); |
651 FunctionSig* sig = FunctionSigOperand(pc_, &unused, &len); | 773 if (Validate(pc_, operand)) { |
652 if (sig) { | 774 LocalType type = operand.sig->return_count() == 0 |
653 LocalType type = | 775 ? kAstStmt |
654 sig->return_count() == 0 ? kAstStmt : sig->GetReturn(); | 776 : operand.sig->GetReturn(); |
655 Shift(type, static_cast<int>(sig->parameter_count())); | 777 Shift(type, static_cast<int>(operand.sig->parameter_count())); |
656 } else { | |
657 Leaf(kAstI32); // error | |
658 } | 778 } |
| 779 len = 1 + operand.length; |
659 break; | 780 break; |
660 } | 781 } |
661 case kExprCallIndirect: { | 782 case kExprCallIndirect: { |
662 uint32_t unused; | 783 SignatureIndexOperand operand(this, pc_); |
663 FunctionSig* sig = SigOperand(pc_, &unused, &len); | 784 if (Validate(pc_, operand)) { |
664 if (sig) { | 785 LocalType type = operand.sig->return_count() == 0 |
665 LocalType type = | 786 ? kAstStmt |
666 sig->return_count() == 0 ? kAstStmt : sig->GetReturn(); | 787 : operand.sig->GetReturn(); |
667 Shift(type, static_cast<int>(1 + sig->parameter_count())); | 788 Shift(type, static_cast<int>(1 + operand.sig->parameter_count())); |
668 } else { | |
669 Leaf(kAstI32); // error | |
670 } | 789 } |
| 790 len = 1 + operand.length; |
671 break; | 791 break; |
672 } | 792 } |
673 default: | 793 default: |
674 error("Invalid opcode"); | 794 error("Invalid opcode"); |
675 return; | 795 return; |
676 } | 796 } |
677 pc_ += len; | 797 pc_ += len; |
678 if (pc_ >= limit_) { | 798 if (pc_ >= limit_) { |
679 // End of code reached or exceeded. | 799 // End of code reached or exceeded. |
680 if (pc_ > limit_ && ok()) { | 800 if (pc_ > limit_ && ok()) { |
681 error("Beyond end of code"); | 801 error("Beyond end of code"); |
682 } | 802 } |
683 return; | 803 return; |
684 } | 804 } |
685 } | 805 } |
686 } | 806 } |
687 | 807 |
688 void PushBlock(SsaEnv* ssa_env) { | 808 void PushBlock(SsaEnv* ssa_env) { |
689 blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)}); | 809 blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)}); |
690 } | 810 } |
691 | 811 |
692 int DecodeLoadMem(const byte* pc, LocalType type) { | 812 int DecodeLoadMem(const byte* pc, LocalType type) { |
693 int length = 2; | 813 MemoryAccessOperand operand(this, pc); |
694 uint32_t offset; | |
695 MemoryAccessOperand(pc, &length, &offset); | |
696 Shift(type, 1); | 814 Shift(type, 1); |
697 return length; | 815 return 1 + operand.length; |
698 } | 816 } |
699 | 817 |
700 int DecodeStoreMem(const byte* pc, LocalType type) { | 818 int DecodeStoreMem(const byte* pc, LocalType type) { |
701 int length = 2; | 819 MemoryAccessOperand operand(this, pc); |
702 uint32_t offset; | |
703 MemoryAccessOperand(pc, &length, &offset); | |
704 Shift(type, 2); | 820 Shift(type, 2); |
705 return length; | 821 return 1 + operand.length; |
706 } | 822 } |
707 | 823 |
708 void AddImplicitReturnAtEnd() { | 824 void AddImplicitReturnAtEnd() { |
709 int retcount = static_cast<int>(function_env_->sig->return_count()); | 825 int retcount = static_cast<int>(function_env_->sig->return_count()); |
710 if (retcount == 0) { | 826 if (retcount == 0) { |
711 BUILD0(ReturnVoid); | 827 BUILD0(ReturnVoid); |
712 return; | 828 return; |
713 } | 829 } |
714 | 830 |
715 if (static_cast<int>(trees_.size()) < retcount) { | 831 if (static_cast<int>(trees_.size()) < retcount) { |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
869 TFNode* vals[2] = {p->tree->children[1]->node, | 985 TFNode* vals[2] = {p->tree->children[1]->node, |
870 p->tree->children[2]->node}; | 986 p->tree->children[2]->node}; |
871 TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge); | 987 TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge); |
872 p->tree->node = phi; | 988 p->tree->node = phi; |
873 ssa_env_->control = merge; | 989 ssa_env_->control = merge; |
874 } | 990 } |
875 } | 991 } |
876 break; | 992 break; |
877 } | 993 } |
878 case kExprBr: { | 994 case kExprBr: { |
879 uint32_t depth = ByteOperand(p->pc()); | 995 BreakDepthOperand operand(this, p->pc()); |
880 if (depth >= blocks_.size()) { | 996 CHECK(Validate(p->pc(), operand, blocks_)); |
881 error("improperly nested branch"); | 997 ReduceBreakToExprBlock(p, operand.target); |
882 break; | |
883 } | |
884 Block* block = &blocks_[blocks_.size() - depth - 1]; | |
885 ReduceBreakToExprBlock(p, block); | |
886 break; | 998 break; |
887 } | 999 } |
888 case kExprBrIf: { | 1000 case kExprBrIf: { |
889 if (p->index == 1) { | 1001 if (p->index == 1) { |
890 TypeCheckLast(p, kAstI32); | 1002 TypeCheckLast(p, kAstI32); |
891 } else if (p->done()) { | 1003 } else if (p->done()) { |
892 uint32_t depth = ByteOperand(p->pc()); | 1004 BreakDepthOperand operand(this, p->pc()); |
893 if (depth >= blocks_.size()) { | 1005 CHECK(Validate(p->pc(), operand, blocks_)); |
894 error("improperly nested branch"); | |
895 break; | |
896 } | |
897 Block* block = &blocks_[blocks_.size() - depth - 1]; | |
898 SsaEnv* fenv = ssa_env_; | 1006 SsaEnv* fenv = ssa_env_; |
899 SsaEnv* tenv = Split(fenv); | 1007 SsaEnv* tenv = Split(fenv); |
900 BUILD(Branch, p->tree->children[0]->node, &tenv->control, | 1008 BUILD(Branch, p->tree->children[0]->node, &tenv->control, |
901 &fenv->control); | 1009 &fenv->control); |
902 ssa_env_ = tenv; | 1010 ssa_env_ = tenv; |
903 ReduceBreakToExprBlock(p, block); | 1011 ReduceBreakToExprBlock(p, operand.target); |
904 ssa_env_ = fenv; | 1012 ssa_env_ = fenv; |
905 } | 1013 } |
906 break; | 1014 break; |
907 } | 1015 } |
908 case kExprTableSwitch: { | 1016 case kExprTableSwitch: { |
909 if (p->index == 1) { | 1017 if (p->index == 1) { |
910 // Switch key finished. | 1018 // Switch key finished. |
911 TypeCheckLast(p, kAstI32); | 1019 TypeCheckLast(p, kAstI32); |
| 1020 if (failed()) break; |
912 | 1021 |
913 uint16_t table_count = read_u16(p->pc() + 3); | 1022 TableSwitchOperand operand(this, p->pc()); |
| 1023 DCHECK(Validate(p->pc(), operand, blocks_.size())); |
914 | 1024 |
915 // Build the switch only if it has more than just a default target. | 1025 // Build the switch only if it has more than just a default target. |
916 bool build_switch = table_count > 1; | 1026 bool build_switch = operand.table_count > 1; |
917 TFNode* sw = nullptr; | 1027 TFNode* sw = nullptr; |
918 if (build_switch) sw = BUILD(Switch, table_count, p->last()->node); | 1028 if (build_switch) |
| 1029 sw = BUILD(Switch, operand.table_count, p->last()->node); |
919 | 1030 |
920 // Allocate environments for each case. | 1031 // Allocate environments for each case. |
921 uint16_t case_count = read_u16(p->pc() + 1); | 1032 SsaEnv** case_envs = zone_->NewArray<SsaEnv*>(operand.case_count); |
922 SsaEnv** case_envs = zone_->NewArray<SsaEnv*>(case_count); | 1033 for (uint32_t i = 0; i < operand.case_count; i++) { |
923 for (int i = 0; i < case_count; i++) case_envs[i] = UnreachableEnv(); | 1034 case_envs[i] = UnreachableEnv(); |
| 1035 } |
924 | 1036 |
925 ifs_.push_back({nullptr, nullptr, case_envs}); | 1037 ifs_.push_back({nullptr, nullptr, case_envs}); |
926 SsaEnv* break_env = ssa_env_; | 1038 SsaEnv* break_env = ssa_env_; |
927 PushBlock(break_env); | 1039 PushBlock(break_env); |
928 SsaEnv* copy = Steal(break_env); | 1040 SsaEnv* copy = Steal(break_env); |
929 ssa_env_ = copy; | 1041 ssa_env_ = copy; |
930 | 1042 |
931 // Build the environments for each case based on the table. | 1043 // Build the environments for each case based on the table. |
932 for (int i = 0; i < table_count; i++) { | 1044 for (uint32_t i = 0; i < operand.table_count; i++) { |
933 uint16_t target = read_u16(p->pc() + 5 + i * 2); | 1045 uint16_t target = operand.read_entry(this, i); |
934 SsaEnv* env = copy; | 1046 SsaEnv* env = copy; |
935 if (build_switch) { | 1047 if (build_switch) { |
936 env = Split(env); | 1048 env = Split(env); |
937 env->control = (i == table_count - 1) ? BUILD(IfDefault, sw) | 1049 env->control = (i == operand.table_count - 1) |
938 : BUILD(IfValue, i, sw); | 1050 ? BUILD(IfDefault, sw) |
| 1051 : BUILD(IfValue, i, sw); |
939 } | 1052 } |
940 if (target >= 0x8000) { | 1053 if (target >= 0x8000) { |
941 // Targets an outer block. | 1054 // Targets an outer block. |
942 int depth = target - 0x8000; | 1055 int depth = target - 0x8000; |
943 SsaEnv* tenv = blocks_[blocks_.size() - depth - 1].ssa_env; | 1056 SsaEnv* tenv = blocks_[blocks_.size() - depth - 1].ssa_env; |
944 Goto(env, tenv); | 1057 Goto(env, tenv); |
945 } else { | 1058 } else { |
946 // Targets a case. | 1059 // Targets a case. |
947 Goto(env, case_envs[target]); | 1060 Goto(env, case_envs[target]); |
948 } | 1061 } |
(...skipping 25 matching lines...) Expand all Loading... |
974 for (int i = 0; i < count; i++) { | 1087 for (int i = 0; i < count; i++) { |
975 buffer[i] = p->tree->children[i]->node; | 1088 buffer[i] = p->tree->children[i]->node; |
976 } | 1089 } |
977 BUILD(Return, count, buffer); | 1090 BUILD(Return, count, buffer); |
978 } | 1091 } |
979 ssa_env_->Kill(SsaEnv::kControlEnd); | 1092 ssa_env_->Kill(SsaEnv::kControlEnd); |
980 } | 1093 } |
981 break; | 1094 break; |
982 } | 1095 } |
983 case kExprSetLocal: { | 1096 case kExprSetLocal: { |
984 int unused = 0; | 1097 LocalIndexOperand operand(this, p->pc()); |
985 uint32_t index; | 1098 CHECK(Validate(p->pc(), operand)); |
986 LocalType type = LocalOperand(p->pc(), &index, &unused); | |
987 Tree* val = p->last(); | 1099 Tree* val = p->last(); |
988 if (type == val->type) { | 1100 if (operand.type == val->type) { |
989 if (build()) ssa_env_->locals[index] = val->node; | 1101 if (build()) ssa_env_->locals[operand.index] = val->node; |
990 p->tree->node = val->node; | 1102 p->tree->node = val->node; |
991 } else { | 1103 } else { |
992 error(p->pc(), val->pc, "Typecheck failed in SetLocal"); | 1104 error(p->pc(), val->pc, "Typecheck failed in SetLocal"); |
993 } | 1105 } |
994 break; | 1106 break; |
995 } | 1107 } |
996 case kExprStoreGlobal: { | 1108 case kExprStoreGlobal: { |
997 int unused = 0; | 1109 GlobalIndexOperand operand(this, p->pc()); |
998 uint32_t index; | 1110 CHECK(Validate(p->pc(), operand)); |
999 LocalType type = GlobalOperand(p->pc(), &index, &unused); | |
1000 Tree* val = p->last(); | 1111 Tree* val = p->last(); |
1001 if (type == val->type) { | 1112 if (operand.type == val->type) { |
1002 BUILD(StoreGlobal, index, val->node); | 1113 BUILD(StoreGlobal, operand.index, val->node); |
1003 p->tree->node = val->node; | 1114 p->tree->node = val->node; |
1004 } else { | 1115 } else { |
1005 error(p->pc(), val->pc, "Typecheck failed in StoreGlobal"); | 1116 error(p->pc(), val->pc, "Typecheck failed in StoreGlobal"); |
1006 } | 1117 } |
1007 break; | 1118 break; |
1008 } | 1119 } |
1009 | 1120 |
1010 case kExprI32LoadMem8S: | 1121 case kExprI32LoadMem8S: |
1011 return ReduceLoadMem(p, kAstI32, MachineType::Int8()); | 1122 return ReduceLoadMem(p, kAstI32, MachineType::Int8()); |
1012 case kExprI32LoadMem8U: | 1123 case kExprI32LoadMem8U: |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1061 case kExprF64StoreMem: | 1172 case kExprF64StoreMem: |
1062 return ReduceStoreMem(p, kAstF64, MachineType::Float64()); | 1173 return ReduceStoreMem(p, kAstF64, MachineType::Float64()); |
1063 | 1174 |
1064 case kExprGrowMemory: | 1175 case kExprGrowMemory: |
1065 TypeCheckLast(p, kAstI32); | 1176 TypeCheckLast(p, kAstI32); |
1066 // TODO(titzer): build node for GrowMemory | 1177 // TODO(titzer): build node for GrowMemory |
1067 p->tree->node = BUILD(Int32Constant, 0); | 1178 p->tree->node = BUILD(Int32Constant, 0); |
1068 return; | 1179 return; |
1069 | 1180 |
1070 case kExprCallFunction: { | 1181 case kExprCallFunction: { |
1071 int len; | 1182 FunctionIndexOperand operand(this, p->pc()); |
1072 uint32_t index; | 1183 CHECK(Validate(p->pc(), operand)); |
1073 FunctionSig* sig = FunctionSigOperand(p->pc(), &index, &len); | |
1074 if (!sig) break; | |
1075 if (p->index > 0) { | 1184 if (p->index > 0) { |
1076 TypeCheckLast(p, sig->GetParam(p->index - 1)); | 1185 TypeCheckLast(p, operand.sig->GetParam(p->index - 1)); |
1077 } | 1186 } |
1078 if (p->done() && build()) { | 1187 if (p->done() && build()) { |
1079 uint32_t count = p->tree->count + 1; | 1188 uint32_t count = p->tree->count + 1; |
1080 TFNode** buffer = builder_->Buffer(count); | 1189 TFNode** buffer = builder_->Buffer(count); |
1081 FunctionSig* sig = FunctionSigOperand(p->pc(), &index, &len); | |
1082 USE(sig); | |
1083 buffer[0] = nullptr; // reserved for code object. | 1190 buffer[0] = nullptr; // reserved for code object. |
1084 for (uint32_t i = 1; i < count; i++) { | 1191 for (uint32_t i = 1; i < count; i++) { |
1085 buffer[i] = p->tree->children[i - 1]->node; | 1192 buffer[i] = p->tree->children[i - 1]->node; |
1086 } | 1193 } |
1087 p->tree->node = builder_->CallDirect(index, buffer); | 1194 p->tree->node = builder_->CallDirect(operand.index, buffer); |
1088 } | 1195 } |
1089 break; | 1196 break; |
1090 } | 1197 } |
1091 case kExprCallIndirect: { | 1198 case kExprCallIndirect: { |
1092 int len; | 1199 SignatureIndexOperand operand(this, p->pc()); |
1093 uint32_t index; | 1200 CHECK(Validate(p->pc(), operand)); |
1094 FunctionSig* sig = SigOperand(p->pc(), &index, &len); | |
1095 if (p->index == 1) { | 1201 if (p->index == 1) { |
1096 TypeCheckLast(p, kAstI32); | 1202 TypeCheckLast(p, kAstI32); |
1097 } else { | 1203 } else { |
1098 TypeCheckLast(p, sig->GetParam(p->index - 2)); | 1204 TypeCheckLast(p, operand.sig->GetParam(p->index - 2)); |
1099 } | 1205 } |
1100 if (p->done() && build()) { | 1206 if (p->done() && build()) { |
1101 uint32_t count = p->tree->count; | 1207 uint32_t count = p->tree->count; |
1102 TFNode** buffer = builder_->Buffer(count); | 1208 TFNode** buffer = builder_->Buffer(count); |
1103 for (uint32_t i = 0; i < count; i++) { | 1209 for (uint32_t i = 0; i < count; i++) { |
1104 buffer[i] = p->tree->children[i]->node; | 1210 buffer[i] = p->tree->children[i]->node; |
1105 } | 1211 } |
1106 p->tree->node = builder_->CallIndirect(index, buffer); | 1212 p->tree->node = builder_->CallIndirect(operand.index, buffer); |
1107 } | 1213 } |
1108 break; | 1214 break; |
1109 } | 1215 } |
1110 default: | 1216 default: |
1111 break; | 1217 break; |
1112 } | 1218 } |
1113 } | 1219 } |
1114 | 1220 |
1115 void ReduceBreakToExprBlock(Production* p, Block* block) { | 1221 void ReduceBreakToExprBlock(Production* p, Block* block) { |
1116 if (block->stack_depth < 0) { | 1222 if (block->stack_depth < 0) { |
(...skipping 28 matching lines...) Expand all Loading... |
1145 p->tree->node = CreateOrMergeIntoPhi(type, target->control, | 1251 p->tree->node = CreateOrMergeIntoPhi(type, target->control, |
1146 p->tree->node, expr->node); | 1252 p->tree->node, expr->node); |
1147 } | 1253 } |
1148 } | 1254 } |
1149 } | 1255 } |
1150 | 1256 |
1151 void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) { | 1257 void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) { |
1152 DCHECK_EQ(1, p->index); | 1258 DCHECK_EQ(1, p->index); |
1153 TypeCheckLast(p, kAstI32); // index | 1259 TypeCheckLast(p, kAstI32); // index |
1154 if (build()) { | 1260 if (build()) { |
1155 int length = 0; | 1261 MemoryAccessOperand operand(this, p->pc()); |
1156 uint32_t offset = 0; | |
1157 MemoryAccessOperand(p->pc(), &length, &offset); | |
1158 p->tree->node = | 1262 p->tree->node = |
1159 builder_->LoadMem(type, mem_type, p->last()->node, offset); | 1263 builder_->LoadMem(type, mem_type, p->last()->node, operand.offset); |
1160 } | 1264 } |
1161 } | 1265 } |
1162 | 1266 |
1163 void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) { | 1267 void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) { |
1164 if (p->index == 1) { | 1268 if (p->index == 1) { |
1165 TypeCheckLast(p, kAstI32); // index | 1269 TypeCheckLast(p, kAstI32); // index |
1166 } else { | 1270 } else { |
1167 DCHECK_EQ(2, p->index); | 1271 DCHECK_EQ(2, p->index); |
1168 TypeCheckLast(p, type); | 1272 TypeCheckLast(p, type); |
1169 if (build()) { | 1273 if (build()) { |
1170 int length = 0; | 1274 MemoryAccessOperand operand(this, p->pc()); |
1171 uint32_t offset = 0; | |
1172 MemoryAccessOperand(p->pc(), &length, &offset); | |
1173 TFNode* val = p->tree->children[1]->node; | 1275 TFNode* val = p->tree->children[1]->node; |
1174 builder_->StoreMem(mem_type, p->tree->children[0]->node, offset, val); | 1276 builder_->StoreMem(mem_type, p->tree->children[0]->node, operand.offset, |
| 1277 val); |
1175 p->tree->node = val; | 1278 p->tree->node = val; |
1176 } | 1279 } |
1177 } | 1280 } |
1178 } | 1281 } |
1179 | 1282 |
1180 void TypeCheckLast(Production* p, LocalType expected) { | 1283 void TypeCheckLast(Production* p, LocalType expected) { |
1181 LocalType result = p->last()->type; | 1284 LocalType result = p->last()->type; |
1182 if (result == expected) return; | 1285 if (result == expected) return; |
1183 if (result == kAstEnd) return; | 1286 if (result == kAstEnd) return; |
1184 if (expected != kAstStmt) { | 1287 if (expected != kAstStmt) { |
1185 error(p->pc(), p->last()->pc, | 1288 error(p->pc(), p->last()->pc, |
1186 "%s[%d] expected type %s, found %s of type %s", | 1289 "%s[%d] expected type %s, found %s of type %s", |
1187 WasmOpcodes::OpcodeName(p->opcode()), p->index - 1, | 1290 WasmOpcodes::OpcodeName(p->opcode()), p->index - 1, |
1188 WasmOpcodes::TypeName(expected), | 1291 WasmOpcodes::TypeName(expected), |
1189 WasmOpcodes::OpcodeName(p->last()->opcode()), | 1292 WasmOpcodes::OpcodeName(p->last()->opcode()), |
1190 WasmOpcodes::TypeName(p->last()->type)); | 1293 WasmOpcodes::TypeName(p->last()->type)); |
1191 } | 1294 } |
1192 } | 1295 } |
1193 | 1296 |
1194 void SetEnv(const char* reason, SsaEnv* env) { | 1297 void SetEnv(const char* reason, SsaEnv* env) { |
1195 TRACE(" env = %p, block depth = %d, reason = %s", static_cast<void*>(env), | 1298 TRACE(" env = %p, block depth = %d, reason = %s", static_cast<void*>(env), |
1196 static_cast<int>(blocks_.size()), reason); | 1299 static_cast<int>(blocks_.size()), reason); |
1197 if (env->control != nullptr && FLAG_trace_wasm_decoder) { | 1300 if (FLAG_trace_wasm_decoder && env && env->control) { |
1198 TRACE(", control = "); | 1301 TRACE(", control = "); |
1199 compiler::WasmGraphBuilder::PrintDebugName(env->control); | 1302 compiler::WasmGraphBuilder::PrintDebugName(env->control); |
1200 } | 1303 } |
1201 TRACE("\n"); | 1304 TRACE("\n"); |
1202 ssa_env_ = env; | 1305 ssa_env_ = env; |
1203 if (builder_) { | 1306 if (builder_) { |
1204 builder_->set_control_ptr(&env->control); | 1307 builder_->set_control_ptr(&env->control); |
1205 builder_->set_effect_ptr(&env->effect); | 1308 builder_->set_effect_ptr(&env->effect); |
1206 } | 1309 } |
1207 } | 1310 } |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1440 } | 1543 } |
1441 if (tree.count > 0) os << ")"; | 1544 if (tree.count > 0) os << ")"; |
1442 return os; | 1545 return os; |
1443 } | 1546 } |
1444 | 1547 |
1445 | 1548 |
1446 ReadUnsignedLEB128ErrorCode ReadUnsignedLEB128Operand(const byte* pc, | 1549 ReadUnsignedLEB128ErrorCode ReadUnsignedLEB128Operand(const byte* pc, |
1447 const byte* limit, | 1550 const byte* limit, |
1448 int* length, | 1551 int* length, |
1449 uint32_t* result) { | 1552 uint32_t* result) { |
1450 *result = 0; | 1553 Decoder decoder(pc, limit); |
1451 const byte* ptr = pc; | 1554 *result = decoder.checked_read_u32v(pc, 0, length); |
1452 const byte* end = pc + 5; // maximum 5 bytes. | 1555 if (decoder.ok()) return kNoError; |
1453 if (end > limit) end = limit; | 1556 return (limit - pc) > 1 ? kInvalidLEB128 : kMissingLEB128; |
1454 int shift = 0; | |
1455 byte b = 0; | |
1456 while (ptr < end) { | |
1457 b = *ptr++; | |
1458 *result = *result | ((b & 0x7F) << shift); | |
1459 if ((b & 0x80) == 0) break; | |
1460 shift += 7; | |
1461 } | |
1462 DCHECK_LE(ptr - pc, 5); | |
1463 *length = static_cast<int>(ptr - pc); | |
1464 if (ptr == end && (b & 0x80)) { | |
1465 return kInvalidLEB128; | |
1466 } else if (*length == 0) { | |
1467 return kMissingLEB128; | |
1468 } else { | |
1469 return kNoError; | |
1470 } | |
1471 } | 1557 } |
1472 | 1558 |
1473 | 1559 int OpcodeLength(const byte* pc, const byte* end) { |
1474 // TODO(titzer): move this into WasmDecoder and bounds check accesses. | 1560 WasmDecoder decoder(nullptr, pc, end); |
1475 int OpcodeLength(const byte* pc) { | 1561 return decoder.OpcodeLength(pc); |
1476 switch (static_cast<WasmOpcode>(*pc)) { | |
1477 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: | |
1478 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
1479 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
1480 #undef DECLARE_OPCODE_CASE | |
1481 { | |
1482 // Loads and stores have an optional offset. | |
1483 byte bitfield = pc[1]; | |
1484 if (MemoryAccess::OffsetField::decode(bitfield)) { | |
1485 int length; | |
1486 uint32_t result = 0; | |
1487 ReadUnsignedLEB128Operand(pc + 2, pc + 7, &length, &result); | |
1488 return 2 + length; | |
1489 } | |
1490 return 2; | |
1491 } | |
1492 case kExprI8Const: | |
1493 case kExprBlock: | |
1494 case kExprLoop: | |
1495 case kExprBr: | |
1496 case kExprBrIf: | |
1497 return 2; | |
1498 case kExprI32Const: | |
1499 case kExprF32Const: | |
1500 return 5; | |
1501 case kExprI64Const: | |
1502 case kExprF64Const: | |
1503 return 9; | |
1504 case kExprStoreGlobal: | |
1505 case kExprSetLocal: | |
1506 case kExprLoadGlobal: | |
1507 case kExprCallFunction: | |
1508 case kExprCallIndirect: | |
1509 case kExprGetLocal: { | |
1510 int length; | |
1511 uint32_t result = 0; | |
1512 ReadUnsignedLEB128Operand(pc + 1, pc + 6, &length, &result); | |
1513 return 1 + length; | |
1514 } | |
1515 case kExprTableSwitch: { | |
1516 uint16_t table_count = *reinterpret_cast<const uint16_t*>(pc + 3); | |
1517 return 5 + table_count * 2; | |
1518 } | |
1519 | |
1520 default: | |
1521 return 1; | |
1522 } | |
1523 } | 1562 } |
1524 | 1563 |
1525 | 1564 int OpcodeArity(FunctionEnv* env, const byte* pc, const byte* end) { |
1526 // TODO(titzer): move this into WasmDecoder and bounds check accesses. | 1565 WasmDecoder decoder(env, pc, end); |
1527 int OpcodeArity(FunctionEnv* env, const byte* pc) { | 1566 return decoder.OpcodeArity(pc); |
1528 #define DECLARE_ARITY(name, ...) \ | |
1529 static const LocalType kTypes_##name[] = {__VA_ARGS__}; \ | |
1530 static const int kArity_##name = \ | |
1531 static_cast<int>(arraysize(kTypes_##name) - 1); | |
1532 | |
1533 FOREACH_SIGNATURE(DECLARE_ARITY); | |
1534 #undef DECLARE_ARITY | |
1535 | |
1536 switch (static_cast<WasmOpcode>(*pc)) { | |
1537 case kExprI8Const: | |
1538 case kExprI32Const: | |
1539 case kExprI64Const: | |
1540 case kExprF64Const: | |
1541 case kExprF32Const: | |
1542 case kExprGetLocal: | |
1543 case kExprLoadGlobal: | |
1544 case kExprNop: | |
1545 case kExprUnreachable: | |
1546 return 0; | |
1547 | |
1548 case kExprBr: | |
1549 case kExprStoreGlobal: | |
1550 case kExprSetLocal: | |
1551 return 1; | |
1552 | |
1553 case kExprIf: | |
1554 case kExprBrIf: | |
1555 return 2; | |
1556 case kExprIfElse: | |
1557 case kExprSelect: | |
1558 return 3; | |
1559 case kExprBlock: | |
1560 case kExprLoop: | |
1561 return *(pc + 1); | |
1562 | |
1563 case kExprCallFunction: { | |
1564 int index = *(pc + 1); | |
1565 return static_cast<int>( | |
1566 env->module->GetFunctionSignature(index)->parameter_count()); | |
1567 } | |
1568 case kExprCallIndirect: { | |
1569 int index = *(pc + 1); | |
1570 return 1 + static_cast<int>( | |
1571 env->module->GetSignature(index)->parameter_count()); | |
1572 } | |
1573 case kExprReturn: | |
1574 return static_cast<int>(env->sig->return_count()); | |
1575 case kExprTableSwitch: { | |
1576 uint16_t case_count = *reinterpret_cast<const uint16_t*>(pc + 1); | |
1577 return 1 + case_count; | |
1578 } | |
1579 | |
1580 #define DECLARE_OPCODE_CASE(name, opcode, sig) \ | |
1581 case kExpr##name: \ | |
1582 return kArity_##sig; | |
1583 | |
1584 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
1585 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
1586 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
1587 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE) | |
1588 #undef DECLARE_OPCODE_CASE | |
1589 } | |
1590 UNREACHABLE(); | |
1591 return 0; | |
1592 } | 1567 } |
1593 | 1568 |
1594 | |
1595 | |
1596 void PrintAst(FunctionEnv* env, const byte* start, const byte* end) { | 1569 void PrintAst(FunctionEnv* env, const byte* start, const byte* end) { |
| 1570 WasmDecoder decoder(env, start, end); |
1597 const byte* pc = start; | 1571 const byte* pc = start; |
1598 std::vector<int> arity_stack; | 1572 std::vector<int> arity_stack; |
1599 while (pc < end) { | 1573 while (pc < end) { |
1600 int arity = OpcodeArity(env, pc); | 1574 int arity = decoder.OpcodeArity(pc); |
1601 size_t length = OpcodeLength(pc); | 1575 size_t length = decoder.OpcodeLength(pc); |
1602 | 1576 |
1603 for (auto arity : arity_stack) { | 1577 for (auto arity : arity_stack) { |
1604 printf(" "); | 1578 printf(" "); |
1605 USE(arity); | 1579 USE(arity); |
1606 } | 1580 } |
1607 | 1581 |
1608 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); | 1582 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); |
1609 printf("k%s,", WasmOpcodes::OpcodeName(opcode)); | 1583 printf("k%s,", WasmOpcodes::OpcodeName(opcode)); |
1610 | 1584 |
1611 for (size_t i = 1; i < length; i++) { | 1585 for (size_t i = 1; i < length; i++) { |
1612 printf(" 0x%02x,", pc[i]); | 1586 printf(" 0x%02x,", pc[i]); |
1613 } | 1587 } |
1614 pc += length; | 1588 pc += length; |
1615 printf("\n"); | 1589 printf("\n"); |
1616 | 1590 |
1617 arity_stack.push_back(arity); | 1591 arity_stack.push_back(arity); |
1618 while (arity_stack.back() == 0) { | 1592 while (arity_stack.back() == 0) { |
1619 arity_stack.pop_back(); | 1593 arity_stack.pop_back(); |
1620 if (arity_stack.empty()) break; | 1594 if (arity_stack.empty()) break; |
1621 arity_stack.back()--; | 1595 arity_stack.back()--; |
1622 } | 1596 } |
1623 } | 1597 } |
1624 } | 1598 } |
1625 | 1599 |
1626 | |
1627 // Analyzes loop bodies for static assignments to locals, which helps in | 1600 // Analyzes loop bodies for static assignments to locals, which helps in |
1628 // reducing the number of phis introduced at loop headers. | 1601 // reducing the number of phis introduced at loop headers. |
1629 class LoopAssignmentAnalyzer : public WasmDecoder { | 1602 class LoopAssignmentAnalyzer : public WasmDecoder { |
1630 public: | 1603 public: |
1631 LoopAssignmentAnalyzer(Zone* zone, FunctionEnv* function_env) : zone_(zone) { | 1604 LoopAssignmentAnalyzer(Zone* zone, FunctionEnv* function_env) : zone_(zone) { |
1632 function_env_ = function_env; | 1605 function_env_ = function_env; |
1633 } | 1606 } |
1634 | 1607 |
1635 BitVector* Analyze(const byte* pc, const byte* limit) { | 1608 BitVector* Analyze(const byte* pc, const byte* limit) { |
1636 Decoder::Reset(pc, limit); | 1609 Decoder::Reset(pc, limit); |
1637 if (pc_ >= limit_) return nullptr; | 1610 if (pc_ >= limit_) return nullptr; |
1638 if (*pc_ != kExprLoop) return nullptr; | 1611 if (*pc_ != kExprLoop) return nullptr; |
1639 | 1612 |
1640 BitVector* assigned = | 1613 BitVector* assigned = |
1641 new (zone_) BitVector(function_env_->total_locals, zone_); | 1614 new (zone_) BitVector(function_env_->total_locals, zone_); |
1642 // Keep a stack to model the nesting of expressions. | 1615 // Keep a stack to model the nesting of expressions. |
1643 std::vector<int> arity_stack; | 1616 std::vector<int> arity_stack; |
1644 arity_stack.push_back(OpcodeArity(function_env_, pc_)); | 1617 arity_stack.push_back(OpcodeArity(pc_)); |
1645 pc_ += OpcodeLength(pc_); | 1618 pc_ += OpcodeLength(pc_); |
1646 | 1619 |
1647 // Iteratively process all AST nodes nested inside the loop. | 1620 // Iteratively process all AST nodes nested inside the loop. |
1648 while (pc_ < limit_) { | 1621 while (pc_ < limit_) { |
1649 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); | 1622 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); |
1650 int arity = 0; | 1623 int arity = 0; |
1651 int length = 1; | 1624 int length = 1; |
1652 if (opcode == kExprSetLocal) { | 1625 if (opcode == kExprSetLocal) { |
1653 uint32_t index; | 1626 LocalIndexOperand operand(this, pc_); |
1654 LocalOperand(pc_, &index, &length); | |
1655 if (assigned->length() > 0 && | 1627 if (assigned->length() > 0 && |
1656 static_cast<int>(index) < assigned->length()) { | 1628 static_cast<int>(operand.index) < assigned->length()) { |
1657 // Unverified code might have an out-of-bounds index. | 1629 // Unverified code might have an out-of-bounds index. |
1658 assigned->Add(index); | 1630 assigned->Add(operand.index); |
1659 } | 1631 } |
1660 arity = 1; | 1632 arity = 1; |
| 1633 length = 1 + operand.length; |
1661 } else { | 1634 } else { |
1662 arity = OpcodeArity(function_env_, pc_); | 1635 arity = OpcodeArity(pc_); |
1663 length = OpcodeLength(pc_); | 1636 length = OpcodeLength(pc_); |
1664 } | 1637 } |
1665 | 1638 |
1666 pc_ += length; | 1639 pc_ += length; |
1667 arity_stack.push_back(arity); | 1640 arity_stack.push_back(arity); |
1668 while (arity_stack.back() == 0) { | 1641 while (arity_stack.back() == 0) { |
1669 arity_stack.pop_back(); | 1642 arity_stack.pop_back(); |
1670 if (arity_stack.empty()) return assigned; // reached end of loop | 1643 if (arity_stack.empty()) return assigned; // reached end of loop |
1671 arity_stack.back()--; | 1644 arity_stack.back()--; |
1672 } | 1645 } |
1673 } | 1646 } |
1674 return assigned; | 1647 return assigned; |
1675 } | 1648 } |
1676 | 1649 |
1677 private: | 1650 private: |
1678 Zone* zone_; | 1651 Zone* zone_; |
1679 }; | 1652 }; |
1680 | 1653 |
1681 | 1654 |
1682 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, FunctionEnv* env, | 1655 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, FunctionEnv* env, |
1683 const byte* start, const byte* end) { | 1656 const byte* start, const byte* end) { |
1684 LoopAssignmentAnalyzer analyzer(zone, env); | 1657 LoopAssignmentAnalyzer analyzer(zone, env); |
1685 return analyzer.Analyze(start, end); | 1658 return analyzer.Analyze(start, end); |
1686 } | 1659 } |
1687 | 1660 |
1688 } // namespace wasm | 1661 } // namespace wasm |
1689 } // namespace internal | 1662 } // namespace internal |
1690 } // namespace v8 | 1663 } // namespace v8 |
OLD | NEW |