| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/signature.h" | 5 #include "src/signature.h" |
| 6 | 6 |
| 7 #include "src/bit-vector.h" | 7 #include "src/bit-vector.h" |
| 8 #include "src/flags.h" | 8 #include "src/flags.h" |
| 9 #include "src/handles.h" | 9 #include "src/handles.h" |
| 10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
| (...skipping 24 matching lines...) Expand all Loading... |
| 35 struct Tree { | 35 struct Tree { |
| 36 LocalType type; // tree type. | 36 LocalType type; // tree type. |
| 37 uint32_t count; // number of children. | 37 uint32_t count; // number of children. |
| 38 const byte* pc; // start of the syntax tree. | 38 const byte* pc; // start of the syntax tree. |
| 39 TFNode* node; // node in the TurboFan graph. | 39 TFNode* node; // node in the TurboFan graph. |
| 40 Tree* children[1]; // pointers to children. | 40 Tree* children[1]; // pointers to children. |
| 41 | 41 |
| 42 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc); } | 42 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc); } |
| 43 }; | 43 }; |
| 44 | 44 |
| 45 // A production represents an incomplete decoded tree in the LR decoder. | |
| 46 struct Production { | |
| 47 Tree* tree; // the root of the syntax tree. | |
| 48 int index; // the current index into the children of the tree. | |
| 49 | |
| 50 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc()); } | |
| 51 const byte* pc() const { return tree->pc; } | |
| 52 bool done() const { return index >= static_cast<int>(tree->count); } | |
| 53 Tree* last() const { return index > 0 ? tree->children[index - 1] : nullptr; } | |
| 54 }; | |
| 55 | |
| 56 // An SsaEnv environment carries the current local variable renaming | 45 // An SsaEnv environment carries the current local variable renaming |
| 57 // as well as the current effect and control dependency in the TF graph. | 46 // as well as the current effect and control dependency in the TF graph. |
| 58 // It maintains a control state that tracks whether the environment | 47 // It maintains a control state that tracks whether the environment |
| 59 // is reachable, has reached a control end, or has been merged. | 48 // is reachable, has reached a control end, or has been merged. |
| 60 struct SsaEnv { | 49 struct SsaEnv { |
| 61 enum State { kControlEnd, kUnreachable, kReached, kMerged }; | 50 enum State { kControlEnd, kUnreachable, kReached, kMerged }; |
| 62 | 51 |
| 63 State state; | 52 State state; |
| 64 TFNode* control; | 53 TFNode* control; |
| 65 TFNode* effect; | 54 TFNode* effect; |
| 66 TFNode** locals; | 55 TFNode** locals; |
| 67 | 56 |
| 68 bool go() { return state >= kReached; } | 57 bool go() { return state >= kReached; } |
| 69 void Kill(State new_state = kControlEnd) { | 58 void Kill(State new_state = kControlEnd) { |
| 70 state = new_state; | 59 state = new_state; |
| 71 locals = nullptr; | 60 locals = nullptr; |
| 72 control = nullptr; | 61 control = nullptr; |
| 73 effect = nullptr; | 62 effect = nullptr; |
| 74 } | 63 } |
| 64 void SetNotMerged() { |
| 65 if (state == kMerged) state = kReached; |
| 66 } |
| 75 }; | 67 }; |
| 76 | 68 |
| 77 // An entry in the stack of blocks during decoding. | 69 // An entry on the value stack. |
| 78 struct Block { | 70 struct Value { |
| 79 SsaEnv* ssa_env; // SSA renaming environment. | 71 const byte* pc; |
| 80 int stack_depth; // production stack depth. | 72 TFNode* node; |
| 73 LocalType type; |
| 81 }; | 74 }; |
| 82 | 75 |
| 83 // An entry in the stack of ifs during decoding. | 76 // An entry on the control stack (i.e. if, block, loop). |
| 84 struct IfEnv { | 77 struct Control { |
| 85 SsaEnv* false_env; | 78 const byte* pc; |
| 86 SsaEnv* merge_env; | 79 int stack_depth; // stack height at the beginning of the construct. |
| 87 SsaEnv** case_envs; | 80 int block_depth; // block height at the beginning of the construct. |
| 81 SsaEnv* ssa_env; // end environment for the construct. |
| 82 TFNode* node; // result node for the construct. |
| 83 LocalType type; // result type for the construct. |
| 84 |
| 85 int counter; // used by if. |
| 86 int max; // used by fi |
| 87 |
| 88 bool is_if() { return *pc == kExprIf; } |
| 89 bool is_block() { return *pc == kExprBlock; } |
| 90 bool is_loop() { return *pc == kExprLoop && stack_depth < 0; } |
| 88 }; | 91 }; |
| 89 | 92 |
| 90 // Macros that build nodes only if there is a graph and the current SSA | 93 // Macros that build nodes only if there is a graph and the current SSA |
| 91 // environment is reachable from start. This avoids problems with malformed | 94 // environment is reachable from start. This avoids problems with malformed |
| 92 // TF graphs when decoding inputs that have unreachable code. | 95 // TF graphs when decoding inputs that have unreachable code. |
| 93 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) | 96 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) |
| 94 #define BUILD0(func) (build() ? builder_->func() : nullptr) | 97 #define BUILD0(func) (build() ? builder_->func() : nullptr) |
| 95 | 98 |
| 96 // Generic Wasm bytecode decoder with utilities for decoding operands, | 99 // Generic Wasm bytecode decoder with utilities for decoding operands, |
| 97 // lengths, etc. | 100 // lengths, etc. |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 181 ModuleEnv* m = module_; | 184 ModuleEnv* m = module_; |
| 182 if (m && m->module && operand.index < m->module->import_table.size()) { | 185 if (m && m->module && operand.index < m->module->import_table.size()) { |
| 183 operand.sig = m->module->import_table[operand.index].sig; | 186 operand.sig = m->module->import_table[operand.index].sig; |
| 184 return true; | 187 return true; |
| 185 } | 188 } |
| 186 error(pc, pc + 1, "invalid signature index"); | 189 error(pc, pc + 1, "invalid signature index"); |
| 187 return false; | 190 return false; |
| 188 } | 191 } |
| 189 | 192 |
| 190 inline bool Validate(const byte* pc, BreakDepthOperand& operand, | 193 inline bool Validate(const byte* pc, BreakDepthOperand& operand, |
| 191 ZoneVector<Block>& blocks) { | 194 ZoneVector<size_t>& blocks, |
| 195 ZoneVector<Control>& control) { |
| 192 if (operand.depth < blocks.size()) { | 196 if (operand.depth < blocks.size()) { |
| 193 operand.target = &blocks[blocks.size() - operand.depth - 1]; | 197 operand.target = &control[blocks[blocks.size() - operand.depth - 1]]; |
| 198 DCHECK(!operand.target->is_if()); |
| 194 return true; | 199 return true; |
| 195 } | 200 } |
| 196 error(pc, pc + 1, "invalid break depth"); | 201 error(pc, pc + 1, "invalid break depth"); |
| 197 return false; | 202 return false; |
| 198 } | 203 } |
| 199 | 204 |
| 200 bool Validate(const byte* pc, BranchTableOperand& operand, | 205 bool Validate(const byte* pc, BranchTableOperand& operand, |
| 201 size_t block_depth) { | 206 size_t block_depth) { |
| 202 // Verify table. | 207 // Verify table. |
| 203 for (uint32_t i = 0; i < operand.table_count + 1; i++) { | 208 for (uint32_t i = 0; i < operand.table_count + 1; i++) { |
| (...skipping 18 matching lines...) Expand all Loading... |
| 222 switch (static_cast<WasmOpcode>(*pc)) { | 227 switch (static_cast<WasmOpcode>(*pc)) { |
| 223 case kExprI8Const: | 228 case kExprI8Const: |
| 224 case kExprI32Const: | 229 case kExprI32Const: |
| 225 case kExprI64Const: | 230 case kExprI64Const: |
| 226 case kExprF64Const: | 231 case kExprF64Const: |
| 227 case kExprF32Const: | 232 case kExprF32Const: |
| 228 case kExprGetLocal: | 233 case kExprGetLocal: |
| 229 case kExprLoadGlobal: | 234 case kExprLoadGlobal: |
| 230 case kExprNop: | 235 case kExprNop: |
| 231 case kExprUnreachable: | 236 case kExprUnreachable: |
| 237 case kExprEnd: |
| 232 return 0; | 238 return 0; |
| 233 | 239 |
| 234 case kExprBr: | 240 case kExprBr: |
| 235 case kExprStoreGlobal: | 241 case kExprStoreGlobal: |
| 236 case kExprSetLocal: | 242 case kExprSetLocal: |
| 243 case kExprElse: |
| 237 return 1; | 244 return 1; |
| 238 | 245 |
| 239 case kExprIf: | 246 case kExprIf: |
| 240 case kExprBrIf: | 247 case kExprBrIf: |
| 241 return 2; | 248 return 2; |
| 242 case kExprIfElse: | |
| 243 case kExprSelect: | 249 case kExprSelect: |
| 244 return 3; | 250 return 3; |
| 245 | 251 |
| 246 case kExprBlock: | 252 case kExprBlock: |
| 247 case kExprLoop: { | 253 case kExprLoop: { |
| 248 BlockCountOperand operand(this, pc); | 254 BlockCountOperand operand(this, pc); |
| 249 return operand.count; | 255 return operand.count; |
| 250 } | 256 } |
| 251 | 257 |
| 252 case kExprCallFunction: { | 258 case kExprCallFunction: { |
| (...skipping 21 matching lines...) Expand all Loading... |
| 274 #define DECLARE_OPCODE_CASE(name, opcode, sig) \ | 280 #define DECLARE_OPCODE_CASE(name, opcode, sig) \ |
| 275 case kExpr##name: \ | 281 case kExpr##name: \ |
| 276 return kArity_##sig; | 282 return kArity_##sig; |
| 277 | 283 |
| 278 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) | 284 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 279 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) | 285 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 280 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE) | 286 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 281 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE) | 287 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE) |
| 282 FOREACH_ASMJS_COMPAT_OPCODE(DECLARE_OPCODE_CASE) | 288 FOREACH_ASMJS_COMPAT_OPCODE(DECLARE_OPCODE_CASE) |
| 283 #undef DECLARE_OPCODE_CASE | 289 #undef DECLARE_OPCODE_CASE |
| 284 case kExprDeclLocals: | |
| 285 default: | 290 default: |
| 286 UNREACHABLE(); | 291 UNREACHABLE(); |
| 287 return 0; | 292 return 0; |
| 288 } | 293 } |
| 289 } | 294 } |
| 290 | 295 |
| 291 int OpcodeLength(const byte* pc) { | 296 int OpcodeLength(const byte* pc) { |
| 292 switch (static_cast<WasmOpcode>(*pc)) { | 297 switch (static_cast<WasmOpcode>(*pc)) { |
| 293 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: | 298 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: |
| 294 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) | 299 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 295 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) | 300 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 296 #undef DECLARE_OPCODE_CASE | 301 #undef DECLARE_OPCODE_CASE |
| 297 { | 302 { |
| 298 MemoryAccessOperand operand(this, pc); | 303 MemoryAccessOperand operand(this, pc); |
| 299 return 1 + operand.length; | 304 return 1 + operand.length; |
| 300 } | 305 } |
| 301 case kExprBlock: | |
| 302 case kExprLoop: { | |
| 303 BlockCountOperand operand(this, pc); | |
| 304 return 1 + operand.length; | |
| 305 } | |
| 306 case kExprBr: | 306 case kExprBr: |
| 307 case kExprBrIf: { | 307 case kExprBrIf: { |
| 308 BreakDepthOperand operand(this, pc); | 308 BreakDepthOperand operand(this, pc); |
| 309 return 1 + operand.length; | 309 return 1 + operand.length; |
| 310 } | 310 } |
| 311 case kExprStoreGlobal: | 311 case kExprStoreGlobal: |
| 312 case kExprLoadGlobal: { | 312 case kExprLoadGlobal: { |
| 313 GlobalIndexOperand operand(this, pc); | 313 GlobalIndexOperand operand(this, pc); |
| 314 return 1 + operand.length; | 314 return 1 + operand.length; |
| 315 } | 315 } |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 361 // A shift-reduce-parser strategy for decoding Wasm code that uses an explicit | 361 // A shift-reduce-parser strategy for decoding Wasm code that uses an explicit |
| 362 // shift-reduce strategy with multiple internal stacks. | 362 // shift-reduce strategy with multiple internal stacks. |
| 363 class SR_WasmDecoder : public WasmDecoder { | 363 class SR_WasmDecoder : public WasmDecoder { |
| 364 public: | 364 public: |
| 365 SR_WasmDecoder(Zone* zone, TFBuilder* builder, FunctionBody& body) | 365 SR_WasmDecoder(Zone* zone, TFBuilder* builder, FunctionBody& body) |
| 366 : WasmDecoder(body.module, body.sig, body.start, body.end), | 366 : WasmDecoder(body.module, body.sig, body.start, body.end), |
| 367 zone_(zone), | 367 zone_(zone), |
| 368 builder_(builder), | 368 builder_(builder), |
| 369 base_(body.base), | 369 base_(body.base), |
| 370 local_type_vec_(zone), | 370 local_type_vec_(zone), |
| 371 trees_(zone), | |
| 372 stack_(zone), | 371 stack_(zone), |
| 373 blocks_(zone), | 372 control_(zone), |
| 374 ifs_(zone) { | 373 blocks_(zone) { |
| 375 local_types_ = &local_type_vec_; | 374 local_types_ = &local_type_vec_; |
| 376 } | 375 } |
| 377 | 376 |
| 378 TreeResult Decode() { | 377 bool Decode() { |
| 378 base::ElapsedTimer decode_timer; |
| 379 if (FLAG_trace_wasm_decode_time) { |
| 380 decode_timer.Start(); |
| 381 } |
| 382 stack_.clear(); |
| 383 control_.clear(); |
| 384 |
| 379 if (end_ < pc_) { | 385 if (end_ < pc_) { |
| 380 error(pc_, "function body end < start"); | 386 error(pc_, "function body end < start"); |
| 381 return result_; | 387 return false; |
| 382 } | 388 } |
| 383 | 389 |
| 384 DecodeLocalDecls(); | 390 DecodeLocalDecls(); |
| 385 InitSsaEnv(); | 391 InitSsaEnv(); |
| 386 DecodeFunctionBody(); | 392 DecodeFunctionBody(); |
| 387 | 393 |
| 388 Tree* tree = nullptr; | 394 if (failed()) return TraceFailed(); |
| 389 if (ok()) { | 395 |
| 390 if (ssa_env_->go()) { | 396 if (!control_.empty()) { |
| 391 if (stack_.size() > 0) { | 397 error(pc_, control_.back().pc, "unterminated control structure"); |
| 392 error(stack_.back().pc(), end_, "fell off end of code"); | 398 return TraceFailed(); |
| 393 } | |
| 394 AddImplicitReturnAtEnd(); | |
| 395 } | |
| 396 if (trees_.size() == 0) { | |
| 397 if (sig_->return_count() > 0) { | |
| 398 error(start_, "no trees created"); | |
| 399 } | |
| 400 } else { | |
| 401 tree = trees_[0]; | |
| 402 } | |
| 403 } | 399 } |
| 404 | 400 |
| 405 if (ok()) { | 401 if (ssa_env_->go()) { |
| 406 TRACE("wasm-decode ok\n"); | 402 TRACE(" @%-6d #xx:%-20s|", startrel(pc_), "ImplicitReturn"); |
| 407 } else { | 403 DoReturn(); |
| 408 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_), | 404 if (failed()) return TraceFailed(); |
| 409 startrel(error_pc_), error_msg_.get()); | 405 TRACE("\n"); |
| 410 } | 406 } |
| 411 | 407 |
| 412 return toResult(tree); | 408 if (FLAG_trace_wasm_decode_time) { |
| 409 double ms = decode_timer.Elapsed().InMillisecondsF(); |
| 410 PrintF("wasm-decode ok (%0.3f ms)\n\n", ms); |
| 411 } else { |
| 412 TRACE("wasm-decode ok\n\n"); |
| 413 } |
| 414 |
| 415 return true; |
| 416 } |
| 417 |
| 418 bool TraceFailed() { |
| 419 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_), |
| 420 startrel(error_pc_), error_msg_.get()); |
| 421 return false; |
| 413 } | 422 } |
| 414 | 423 |
| 415 std::vector<LocalType>* DecodeLocalDeclsForTesting() { | 424 std::vector<LocalType>* DecodeLocalDeclsForTesting() { |
| 416 DecodeLocalDecls(); | 425 DecodeLocalDecls(); |
| 417 if (failed()) return nullptr; | 426 if (failed()) return nullptr; |
| 418 auto result = new std::vector<LocalType>(); | 427 auto result = new std::vector<LocalType>(); |
| 419 result->reserve(local_type_vec_.size()); | 428 result->reserve(local_type_vec_.size()); |
| 420 for (size_t i = 0; i < local_type_vec_.size(); i++) { | 429 for (size_t i = 0; i < local_type_vec_.size(); i++) { |
| 421 result->push_back(local_type_vec_[i]); | 430 result->push_back(local_type_vec_[i]); |
| 422 } | 431 } |
| (...skipping 10 matching lines...) Expand all Loading... |
| 433 } | 442 } |
| 434 return AnalyzeLoopAssignment(pc); | 443 return AnalyzeLoopAssignment(pc); |
| 435 } | 444 } |
| 436 | 445 |
| 437 private: | 446 private: |
| 438 static const size_t kErrorMsgSize = 128; | 447 static const size_t kErrorMsgSize = 128; |
| 439 | 448 |
| 440 Zone* zone_; | 449 Zone* zone_; |
| 441 TFBuilder* builder_; | 450 TFBuilder* builder_; |
| 442 const byte* base_; | 451 const byte* base_; |
| 443 TreeResult result_; | |
| 444 | 452 |
| 445 SsaEnv* ssa_env_; | 453 SsaEnv* ssa_env_; |
| 446 | 454 |
| 447 ZoneVector<LocalType> local_type_vec_; | 455 ZoneVector<LocalType> local_type_vec_; |
| 448 ZoneVector<Tree*> trees_; | 456 ZoneVector<Value> stack_; |
| 449 ZoneVector<Production> stack_; | 457 ZoneVector<Control> control_; |
| 450 ZoneVector<Block> blocks_; | 458 ZoneVector<size_t> blocks_; |
| 451 ZoneVector<IfEnv> ifs_; | |
| 452 | 459 |
| 453 inline bool build() { return builder_ && ssa_env_->go(); } | 460 inline bool build() { return builder_ && ssa_env_->go(); } |
| 454 | 461 |
| 455 void InitSsaEnv() { | 462 void InitSsaEnv() { |
| 456 TFNode* start = nullptr; | 463 TFNode* start = nullptr; |
| 457 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); | 464 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); |
| 458 size_t size = sizeof(TFNode*) * EnvironmentCount(); | 465 size_t size = sizeof(TFNode*) * EnvironmentCount(); |
| 459 ssa_env->state = SsaEnv::kReached; | 466 ssa_env->state = SsaEnv::kReached; |
| 460 ssa_env->locals = | 467 ssa_env->locals = |
| 461 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; | 468 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 493 case kAstF32: | 500 case kAstF32: |
| 494 return builder_->Float32Constant(0); | 501 return builder_->Float32Constant(0); |
| 495 case kAstF64: | 502 case kAstF64: |
| 496 return builder_->Float64Constant(0); | 503 return builder_->Float64Constant(0); |
| 497 default: | 504 default: |
| 498 UNREACHABLE(); | 505 UNREACHABLE(); |
| 499 return nullptr; | 506 return nullptr; |
| 500 } | 507 } |
| 501 } | 508 } |
| 502 | 509 |
| 503 void Leaf(LocalType type, TFNode* node = nullptr) { | |
| 504 size_t size = sizeof(Tree); | |
| 505 Tree* tree = reinterpret_cast<Tree*>(zone_->New(size)); | |
| 506 tree->type = type; | |
| 507 tree->count = 0; | |
| 508 tree->pc = pc_; | |
| 509 tree->node = node; | |
| 510 tree->children[0] = nullptr; | |
| 511 Reduce(tree); | |
| 512 } | |
| 513 | |
| 514 void Shift(LocalType type, uint32_t count) { | |
| 515 size_t size = | |
| 516 sizeof(Tree) + (count == 0 ? 0 : ((count - 1) * sizeof(Tree*))); | |
| 517 Tree* tree = reinterpret_cast<Tree*>(zone_->New(size)); | |
| 518 tree->type = type; | |
| 519 tree->count = count; | |
| 520 tree->pc = pc_; | |
| 521 tree->node = nullptr; | |
| 522 for (uint32_t i = 0; i < count; i++) tree->children[i] = nullptr; | |
| 523 if (count == 0) { | |
| 524 Production p = {tree, 0}; | |
| 525 Reduce(&p); | |
| 526 Reduce(tree); | |
| 527 } else { | |
| 528 stack_.push_back({tree, 0}); | |
| 529 } | |
| 530 } | |
| 531 | |
| 532 void Reduce(Tree* tree) { | |
| 533 while (true) { | |
| 534 if (stack_.size() == 0) { | |
| 535 trees_.push_back(tree); | |
| 536 break; | |
| 537 } | |
| 538 Production* p = &stack_.back(); | |
| 539 p->tree->children[p->index++] = tree; | |
| 540 Reduce(p); | |
| 541 if (p->done()) { | |
| 542 tree = p->tree; | |
| 543 stack_.pop_back(); | |
| 544 } else { | |
| 545 break; | |
| 546 } | |
| 547 } | |
| 548 } | |
| 549 | |
| 550 char* indentation() { | 510 char* indentation() { |
| 551 static const int kMaxIndent = 64; | 511 static const int kMaxIndent = 64; |
| 552 static char bytes[kMaxIndent + 1]; | 512 static char bytes[kMaxIndent + 1]; |
| 553 for (int i = 0; i < kMaxIndent; i++) bytes[i] = ' '; | 513 for (int i = 0; i < kMaxIndent; i++) bytes[i] = ' '; |
| 554 bytes[kMaxIndent] = 0; | 514 bytes[kMaxIndent] = 0; |
| 555 if (stack_.size() < kMaxIndent / 2) { | 515 if (stack_.size() < kMaxIndent / 2) { |
| 556 bytes[stack_.size() * 2] = 0; | 516 bytes[stack_.size() * 2] = 0; |
| 557 } | 517 } |
| 558 return bytes; | 518 return bytes; |
| 559 } | 519 } |
| (...skipping 30 matching lines...) Expand all Loading... |
| 590 break; | 550 break; |
| 591 default: | 551 default: |
| 592 error(pc_ - 1, "invalid local type"); | 552 error(pc_ - 1, "invalid local type"); |
| 593 return; | 553 return; |
| 594 } | 554 } |
| 595 local_type_vec_.insert(local_type_vec_.end(), count, type); | 555 local_type_vec_.insert(local_type_vec_.end(), count, type); |
| 596 } | 556 } |
| 597 total_locals_ = local_type_vec_.size(); | 557 total_locals_ = local_type_vec_.size(); |
| 598 } | 558 } |
| 599 | 559 |
| 600 // Decodes the body of a function, producing reduced trees into {result}. | 560 // Decodes the body of a function. |
| 601 void DecodeFunctionBody() { | 561 void DecodeFunctionBody() { |
| 602 TRACE("wasm-decode %p...%p (%d bytes) %s\n", | 562 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n", |
| 603 reinterpret_cast<const void*>(start_), | 563 reinterpret_cast<const void*>(start_), |
| 604 reinterpret_cast<const void*>(limit_), | 564 reinterpret_cast<const void*>(limit_), baserel(pc_), |
| 605 static_cast<int>(limit_ - start_), builder_ ? "graph building" : ""); | 565 static_cast<int>(limit_ - start_), builder_ ? "graph building" : ""); |
| 606 | 566 |
| 607 if (pc_ >= limit_) return; // Nothing to do. | 567 if (pc_ >= limit_) return; // Nothing to do. |
| 608 | 568 |
| 609 while (true) { // decoding loop. | 569 while (true) { // decoding loop. |
| 610 int len = 1; | 570 int len = 1; |
| 611 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); | 571 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); |
| 612 TRACE("wasm-decode module+%-6d %s func+%d: 0x%02x %s\n", baserel(pc_), | 572 TRACE(" @%-6d #%02x:%-20s|", startrel(pc_), opcode, |
| 613 indentation(), startrel(pc_), opcode, | 573 WasmOpcodes::ShortOpcodeName(opcode)); |
| 614 WasmOpcodes::OpcodeName(opcode)); | |
| 615 | 574 |
| 616 FunctionSig* sig = WasmOpcodes::Signature(opcode); | 575 FunctionSig* sig = WasmOpcodes::Signature(opcode); |
| 617 if (sig) { | 576 if (sig) { |
| 618 // A simple expression with a fixed signature. | 577 // Fast case of a simple operator. |
| 619 Shift(sig->GetReturn(), static_cast<uint32_t>(sig->parameter_count())); | 578 TFNode* node; |
| 620 pc_ += len; | 579 switch (sig->parameter_count()) { |
| 621 if (pc_ >= limit_) { | 580 case 1: { |
| 622 // End of code reached or exceeded. | 581 Value val = Pop(0, sig->GetParam(0)); |
| 623 if (pc_ > limit_ && ok()) { | 582 node = BUILD(Unop, opcode, val.node); |
| 624 error("Beyond end of code"); | 583 break; |
| 625 } | 584 } |
| 626 return; | 585 case 2: { |
| 586 Value rval = Pop(1, sig->GetParam(1)); |
| 587 Value lval = Pop(0, sig->GetParam(0)); |
| 588 node = BUILD(Binop, opcode, lval.node, rval.node); |
| 589 break; |
| 590 } |
| 591 default: |
| 592 UNREACHABLE(); |
| 593 node = nullptr; |
| 594 break; |
| 627 } | 595 } |
| 628 continue; // back to decoding loop. | 596 Push(GetReturnType(sig), node); |
| 629 } | 597 } else { |
| 630 | 598 // Complex bytecode. |
| 631 switch (opcode) { | 599 switch (opcode) { |
| 632 case kExprNop: | 600 case kExprNop: |
| 633 Leaf(kAstStmt); | 601 Push(kAstStmt, nullptr); |
| 634 break; | 602 break; |
| 635 case kExprBlock: { | 603 case kExprBlock: { |
| 636 BlockCountOperand operand(this, pc_); | |
| 637 if (operand.count < 1) { | |
| 638 Leaf(kAstStmt); | |
| 639 } else { | |
| 640 Shift(kAstEnd, operand.count); | |
| 641 // The break environment is the outer environment. | 604 // The break environment is the outer environment. |
| 642 SsaEnv* break_env = ssa_env_; | 605 SsaEnv* break_env = ssa_env_; |
| 643 PushBlock(break_env); | 606 PushBlock(break_env); |
| 644 SetEnv("block:start", Steal(break_env)); | 607 SetEnv("block:start", Steal(break_env)); |
| 608 break; |
| 645 } | 609 } |
| 646 len = 1 + operand.length; | 610 case kExprLoop: { |
| 647 break; | |
| 648 } | |
| 649 case kExprLoop: { | |
| 650 BlockCountOperand operand(this, pc_); | |
| 651 if (operand.count < 1) { | |
| 652 Leaf(kAstStmt); | |
| 653 } else { | |
| 654 Shift(kAstEnd, operand.count); | |
| 655 // The break environment is the outer environment. | 611 // The break environment is the outer environment. |
| 656 SsaEnv* break_env = ssa_env_; | 612 SsaEnv* break_env = ssa_env_; |
| 657 PushBlock(break_env); | 613 PushBlock(break_env); |
| 658 SsaEnv* cont_env = Steal(break_env); | 614 SsaEnv* cont_env = Steal(break_env); |
| 659 // The continue environment is the inner environment. | 615 // The continue environment is the inner environment. |
| 660 PrepareForLoop(pc_, cont_env); | 616 PrepareForLoop(pc_, cont_env); |
| 661 SetEnv("loop:start", Split(cont_env)); | 617 SetEnv("loop:start", Split(cont_env)); |
| 662 if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached; | 618 ssa_env_->SetNotMerged(); |
| 663 PushBlock(cont_env); | 619 PushBlock(cont_env, true); |
| 664 blocks_.back().stack_depth = -1; // no production for inner block. | 620 break; |
| 665 } | 621 } |
| 666 len = 1 + operand.length; | 622 case kExprIf: { |
| 667 break; | 623 // Condition on top of stack. Split environment for true branch. |
| 624 Value cond = Pop(0, kAstI32); |
| 625 SsaEnv* false_env = ssa_env_; |
| 626 SsaEnv* true_env = Split(ssa_env_); |
| 627 false_env->SetNotMerged(); |
| 628 PushIf(false_env); |
| 629 BUILD(Branch, cond.node, &true_env->control, &false_env->control); |
| 630 SetEnv("if:true", true_env); |
| 631 break; |
| 632 } |
| 633 case kExprElse: { |
| 634 if (control_.empty()) { |
| 635 error(pc_, "else does not match any if"); |
| 636 break; |
| 637 } |
| 638 Control* c = &control_.back(); |
| 639 if (!c->is_if()) { |
| 640 error(pc_, c->pc, "else does not match an if"); |
| 641 break; |
| 642 } |
| 643 if (++c->counter > 1) { |
| 644 error(pc_, c->pc, "else already present for if"); |
| 645 break; |
| 646 } |
| 647 Value val = PopUpTo(c->stack_depth); |
| 648 // The {ssa_env} stores the false env for an if. |
| 649 SsaEnv* false_env = Steal(c->ssa_env); |
| 650 MergeInto(c->ssa_env, &c->node, &c->type, val); |
| 651 // Switch to environment for false branch. |
| 652 SetEnv("if_else:false", false_env); |
| 653 break; |
| 654 } |
| 655 case kExprEnd: { |
| 656 if (control_.empty()) { |
| 657 error(pc_, "end does not match any if or block"); |
| 658 break; |
| 659 } |
| 660 const char* name = "block:end"; |
| 661 Control* c = &control_.back(); |
| 662 if (c->is_loop()) { |
| 663 // Loops always push control in pairs. |
| 664 control_.pop_back(); |
| 665 c = &control_.back(); |
| 666 name = "loop:end"; |
| 667 } |
| 668 Value val = PopUpTo(c->stack_depth); |
| 669 if (c->is_if()) { |
| 670 if (c->counter == 0) { |
| 671 // End of a one-armed if. |
| 672 val = {val.pc, nullptr, kAstStmt}; |
| 673 name = "if:merge"; |
| 674 } else { |
| 675 // End of a two-armed if. |
| 676 name = "if_else:merge"; |
| 677 } |
| 678 } |
| 679 if (ssa_env_->go()) { |
| 680 MergeInto(c->ssa_env, &c->node, &c->type, val); |
| 681 } |
| 682 SetEnv(name, c->ssa_env); |
| 683 stack_.resize(c->stack_depth); |
| 684 blocks_.resize(c->block_depth); |
| 685 Push(c->type, c->node); |
| 686 control_.pop_back(); |
| 687 break; |
| 688 } |
| 689 case kExprSelect: { |
| 690 Value cond = Pop(2, kAstI32); |
| 691 Value fval = Pop(); |
| 692 Value tval = Pop(); |
| 693 if (tval.type == kAstStmt || tval.type != fval.type) { |
| 694 if (tval.type != kAstEnd && fval.type != kAstEnd) { |
| 695 error(pc_, "type mismatch in select"); |
| 696 break; |
| 697 } |
| 698 } |
| 699 if (build()) { |
| 700 DCHECK(tval.type != kAstEnd); |
| 701 DCHECK(fval.type != kAstEnd); |
| 702 DCHECK(cond.type != kAstEnd); |
| 703 TFNode* controls[2]; |
| 704 builder_->Branch(cond.node, &controls[0], &controls[1]); |
| 705 TFNode* merge = builder_->Merge(2, controls); |
| 706 TFNode* vals[2] = {tval.node, fval.node}; |
| 707 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge); |
| 708 Push(tval.type, phi); |
| 709 ssa_env_->control = merge; |
| 710 } else { |
| 711 Push(tval.type, nullptr); |
| 712 } |
| 713 break; |
| 714 } |
| 715 case kExprBr: { |
| 716 Value val = Pop(); |
| 717 BreakDepthOperand operand(this, pc_); |
| 718 if (Validate(pc_, operand, blocks_, control_)) { |
| 719 BreakTo(operand.target, val); |
| 720 } |
| 721 len = 1 + operand.length; |
| 722 Push(kAstEnd, nullptr); |
| 723 break; |
| 724 } |
| 725 case kExprBrIf: { |
| 726 Value cond = Pop(1, kAstI32); |
| 727 Value val = Pop(); |
| 728 BreakDepthOperand operand(this, pc_); |
| 729 if (Validate(pc_, operand, blocks_, control_)) { |
| 730 SsaEnv* fenv = ssa_env_; |
| 731 SsaEnv* tenv = Split(fenv); |
| 732 fenv->SetNotMerged(); |
| 733 BUILD(Branch, cond.node, &tenv->control, &fenv->control); |
| 734 ssa_env_ = tenv; |
| 735 BreakTo(operand.target, val); |
| 736 ssa_env_ = fenv; |
| 737 } |
| 738 len = 1 + operand.length; |
| 739 Push(kAstStmt, nullptr); |
| 740 break; |
| 741 } |
| 742 case kExprBrTable: { |
| 743 BranchTableOperand operand(this, pc_); |
| 744 if (Validate(pc_, operand, blocks_.size())) { |
| 745 Value key = Pop(0, kAstI32); |
| 746 if (failed()) break; |
| 747 |
| 748 Value voidval = {pc_, nullptr, kAstStmt}; |
| 749 SsaEnv* break_env = ssa_env_; |
| 750 if (operand.table_count > 0) { |
| 751 // Build branches to the various blocks based on the table. |
| 752 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node); |
| 753 |
| 754 SsaEnv* copy = Steal(break_env); |
| 755 ssa_env_ = copy; |
| 756 for (uint32_t i = 0; i < operand.table_count + 1; i++) { |
| 757 uint16_t target = operand.read_entry(this, i); |
| 758 ssa_env_ = Split(copy); |
| 759 ssa_env_->control = (i == operand.table_count) |
| 760 ? BUILD(IfDefault, sw) |
| 761 : BUILD(IfValue, i, sw); |
| 762 int depth = target; |
| 763 size_t block_index = blocks_[blocks_.size() - depth - 1]; |
| 764 Control* c = &control_[block_index]; |
| 765 MergeInto(c->ssa_env, &c->node, &c->type, voidval); |
| 766 } |
| 767 } else { |
| 768 // Only a default target. Do the equivalent of br. |
| 769 uint16_t target = operand.read_entry(this, 0); |
| 770 int depth = target; |
| 771 size_t block_index = blocks_[blocks_.size() - depth - 1]; |
| 772 Control* c = &control_[block_index]; |
| 773 MergeInto(c->ssa_env, &c->node, &c->type, voidval); |
| 774 } |
| 775 // br_table ends the control flow like br. |
| 776 ssa_env_ = break_env; |
| 777 Push(kAstStmt, nullptr); |
| 778 } |
| 779 len = 1 + operand.length; |
| 780 break; |
| 781 } |
| 782 case kExprReturn: { |
| 783 DoReturn(); |
| 784 break; |
| 785 } |
| 786 case kExprUnreachable: { |
| 787 Push(kAstEnd, BUILD0(Unreachable)); |
| 788 ssa_env_->Kill(SsaEnv::kControlEnd); |
| 789 break; |
| 790 } |
| 791 case kExprI8Const: { |
| 792 ImmI8Operand operand(this, pc_); |
| 793 Push(kAstI32, BUILD(Int32Constant, operand.value)); |
| 794 len = 1 + operand.length; |
| 795 break; |
| 796 } |
| 797 case kExprI32Const: { |
| 798 ImmI32Operand operand(this, pc_); |
| 799 Push(kAstI32, BUILD(Int32Constant, operand.value)); |
| 800 len = 1 + operand.length; |
| 801 break; |
| 802 } |
| 803 case kExprI64Const: { |
| 804 ImmI64Operand operand(this, pc_); |
| 805 Push(kAstI64, BUILD(Int64Constant, operand.value)); |
| 806 len = 1 + operand.length; |
| 807 break; |
| 808 } |
| 809 case kExprF32Const: { |
| 810 ImmF32Operand operand(this, pc_); |
| 811 Push(kAstF32, BUILD(Float32Constant, operand.value)); |
| 812 len = 1 + operand.length; |
| 813 break; |
| 814 } |
| 815 case kExprF64Const: { |
| 816 ImmF64Operand operand(this, pc_); |
| 817 Push(kAstF64, BUILD(Float64Constant, operand.value)); |
| 818 len = 1 + operand.length; |
| 819 break; |
| 820 } |
| 821 case kExprGetLocal: { |
| 822 LocalIndexOperand operand(this, pc_); |
| 823 if (Validate(pc_, operand)) { |
| 824 if (build()) { |
| 825 Push(operand.type, ssa_env_->locals[operand.index]); |
| 826 } else { |
| 827 Push(operand.type, nullptr); |
| 828 } |
| 829 } |
| 830 len = 1 + operand.length; |
| 831 break; |
| 832 } |
| 833 case kExprSetLocal: { |
| 834 LocalIndexOperand operand(this, pc_); |
| 835 if (Validate(pc_, operand)) { |
| 836 Value val = Pop(0, local_type_vec_[operand.index]); |
| 837 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node; |
| 838 Push(val.type, val.node); |
| 839 } |
| 840 len = 1 + operand.length; |
| 841 break; |
| 842 } |
| 843 case kExprLoadGlobal: { |
| 844 GlobalIndexOperand operand(this, pc_); |
| 845 if (Validate(pc_, operand)) { |
| 846 Push(operand.type, BUILD(LoadGlobal, operand.index)); |
| 847 } |
| 848 len = 1 + operand.length; |
| 849 break; |
| 850 } |
| 851 case kExprStoreGlobal: { |
| 852 GlobalIndexOperand operand(this, pc_); |
| 853 if (Validate(pc_, operand)) { |
| 854 Value val = Pop(0, operand.type); |
| 855 BUILD(StoreGlobal, operand.index, val.node); |
| 856 Push(val.type, val.node); |
| 857 } |
| 858 len = 1 + operand.length; |
| 859 break; |
| 860 } |
| 861 case kExprI32LoadMem8S: |
| 862 len = DecodeLoadMem(kAstI32, MachineType::Int8()); |
| 863 break; |
| 864 case kExprI32LoadMem8U: |
| 865 len = DecodeLoadMem(kAstI32, MachineType::Uint8()); |
| 866 break; |
| 867 case kExprI32LoadMem16S: |
| 868 len = DecodeLoadMem(kAstI32, MachineType::Int16()); |
| 869 break; |
| 870 case kExprI32LoadMem16U: |
| 871 len = DecodeLoadMem(kAstI32, MachineType::Uint16()); |
| 872 break; |
| 873 case kExprI32LoadMem: |
| 874 len = DecodeLoadMem(kAstI32, MachineType::Int32()); |
| 875 break; |
| 876 |
| 877 case kExprI64LoadMem8S: |
| 878 len = DecodeLoadMem(kAstI64, MachineType::Int8()); |
| 879 break; |
| 880 case kExprI64LoadMem8U: |
| 881 len = DecodeLoadMem(kAstI64, MachineType::Uint8()); |
| 882 break; |
| 883 case kExprI64LoadMem16S: |
| 884 len = DecodeLoadMem(kAstI64, MachineType::Int16()); |
| 885 break; |
| 886 case kExprI64LoadMem16U: |
| 887 len = DecodeLoadMem(kAstI64, MachineType::Uint16()); |
| 888 break; |
| 889 case kExprI64LoadMem32S: |
| 890 len = DecodeLoadMem(kAstI64, MachineType::Int32()); |
| 891 break; |
| 892 case kExprI64LoadMem32U: |
| 893 len = DecodeLoadMem(kAstI64, MachineType::Uint32()); |
| 894 break; |
| 895 case kExprI64LoadMem: |
| 896 len = DecodeLoadMem(kAstI64, MachineType::Int64()); |
| 897 break; |
| 898 case kExprF32LoadMem: |
| 899 len = DecodeLoadMem(kAstF32, MachineType::Float32()); |
| 900 break; |
| 901 case kExprF64LoadMem: |
| 902 len = DecodeLoadMem(kAstF64, MachineType::Float64()); |
| 903 break; |
| 904 case kExprI32StoreMem8: |
| 905 len = DecodeStoreMem(kAstI32, MachineType::Int8()); |
| 906 break; |
| 907 case kExprI32StoreMem16: |
| 908 len = DecodeStoreMem(kAstI32, MachineType::Int16()); |
| 909 break; |
| 910 case kExprI32StoreMem: |
| 911 len = DecodeStoreMem(kAstI32, MachineType::Int32()); |
| 912 break; |
| 913 case kExprI64StoreMem8: |
| 914 len = DecodeStoreMem(kAstI64, MachineType::Int8()); |
| 915 break; |
| 916 case kExprI64StoreMem16: |
| 917 len = DecodeStoreMem(kAstI64, MachineType::Int16()); |
| 918 break; |
| 919 case kExprI64StoreMem32: |
| 920 len = DecodeStoreMem(kAstI64, MachineType::Int32()); |
| 921 break; |
| 922 case kExprI64StoreMem: |
| 923 len = DecodeStoreMem(kAstI64, MachineType::Int64()); |
| 924 break; |
| 925 case kExprF32StoreMem: |
| 926 len = DecodeStoreMem(kAstF32, MachineType::Float32()); |
| 927 break; |
| 928 case kExprF64StoreMem: |
| 929 len = DecodeStoreMem(kAstF64, MachineType::Float64()); |
| 930 break; |
| 931 |
| 932 case kExprMemorySize: |
| 933 Push(kAstI32, BUILD(MemSize, 0)); |
| 934 break; |
| 935 case kExprGrowMemory: { |
| 936 Value val = Pop(0, kAstI32); |
| 937 USE(val); // TODO(titzer): build node for grow memory |
| 938 Push(kAstI32, BUILD(Int32Constant, 0)); |
| 939 break; |
| 940 } |
| 941 case kExprCallFunction: { |
| 942 FunctionIndexOperand operand(this, pc_); |
| 943 if (Validate(pc_, operand)) { |
| 944 TFNode** buffer = PopArgs(operand.sig); |
| 945 Push(GetReturnType(operand.sig), |
| 946 BUILD(CallDirect, operand.index, buffer)); |
| 947 } |
| 948 len = 1 + operand.length; |
| 949 break; |
| 950 } |
| 951 case kExprCallIndirect: { |
| 952 SignatureIndexOperand operand(this, pc_); |
| 953 if (Validate(pc_, operand)) { |
| 954 TFNode** buffer = PopArgs(operand.sig); |
| 955 Value index = Pop(0, kAstI32); |
| 956 if (buffer) buffer[0] = index.node; |
| 957 Push(GetReturnType(operand.sig), |
| 958 BUILD(CallIndirect, operand.index, buffer)); |
| 959 } |
| 960 len = 1 + operand.length; |
| 961 break; |
| 962 } |
| 963 case kExprCallImport: { |
| 964 ImportIndexOperand operand(this, pc_); |
| 965 if (Validate(pc_, operand)) { |
| 966 TFNode** buffer = PopArgs(operand.sig); |
| 967 Push(GetReturnType(operand.sig), |
| 968 BUILD(CallImport, operand.index, buffer)); |
| 969 } |
| 970 len = 1 + operand.length; |
| 971 break; |
| 972 } |
| 973 default: |
| 974 error("Invalid opcode"); |
| 975 return; |
| 668 } | 976 } |
| 669 case kExprIf: | 977 } // end complex bytecode |
| 670 Shift(kAstStmt, 2); | 978 |
| 671 break; | 979 #if DEBUG |
| 672 case kExprIfElse: | 980 if (FLAG_trace_wasm_decoder) { |
| 673 Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}. | 981 for (size_t i = 0; i < stack_.size(); i++) { |
| 674 break; | 982 Value& val = stack_[i]; |
| 675 case kExprSelect: | 983 PrintF( |
| 676 Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}. | 984 " %c@%d:%s", WasmOpcodes::ShortNameOf(val.type), |
| 677 break; | 985 static_cast<int>(val.pc - start_), |
| 678 case kExprBr: { | 986 WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*val.pc))); |
| 679 BreakDepthOperand operand(this, pc_); | |
| 680 if (Validate(pc_, operand, blocks_)) { | |
| 681 Shift(kAstEnd, 1); | |
| 682 } | |
| 683 len = 1 + operand.length; | |
| 684 break; | |
| 685 } | 987 } |
| 686 case kExprBrIf: { | 988 PrintF("\n"); |
| 687 BreakDepthOperand operand(this, pc_); | 989 } |
| 688 if (Validate(pc_, operand, blocks_)) { | 990 #endif |
| 689 Shift(kAstStmt, 2); | |
| 690 } | |
| 691 len = 1 + operand.length; | |
| 692 break; | |
| 693 } | |
| 694 case kExprBrTable: { | |
| 695 BranchTableOperand operand(this, pc_); | |
| 696 if (Validate(pc_, operand, blocks_.size())) { | |
| 697 Shift(kAstEnd, 1); | |
| 698 } | |
| 699 len = 1 + operand.length; | |
| 700 break; | |
| 701 } | |
| 702 case kExprReturn: { | |
| 703 int count = static_cast<int>(sig_->return_count()); | |
| 704 if (count == 0) { | |
| 705 BUILD(Return, 0, builder_->Buffer(0)); | |
| 706 ssa_env_->Kill(); | |
| 707 Leaf(kAstEnd); | |
| 708 } else { | |
| 709 Shift(kAstEnd, count); | |
| 710 } | |
| 711 break; | |
| 712 } | |
| 713 case kExprUnreachable: { | |
| 714 BUILD0(Unreachable); | |
| 715 ssa_env_->Kill(SsaEnv::kControlEnd); | |
| 716 Leaf(kAstEnd, nullptr); | |
| 717 break; | |
| 718 } | |
| 719 case kExprI8Const: { | |
| 720 ImmI8Operand operand(this, pc_); | |
| 721 Leaf(kAstI32, BUILD(Int32Constant, operand.value)); | |
| 722 len = 1 + operand.length; | |
| 723 break; | |
| 724 } | |
| 725 case kExprI32Const: { | |
| 726 ImmI32Operand operand(this, pc_); | |
| 727 Leaf(kAstI32, BUILD(Int32Constant, operand.value)); | |
| 728 len = 1 + operand.length; | |
| 729 break; | |
| 730 } | |
| 731 case kExprI64Const: { | |
| 732 ImmI64Operand operand(this, pc_); | |
| 733 Leaf(kAstI64, BUILD(Int64Constant, operand.value)); | |
| 734 len = 1 + operand.length; | |
| 735 break; | |
| 736 } | |
| 737 case kExprF32Const: { | |
| 738 ImmF32Operand operand(this, pc_); | |
| 739 Leaf(kAstF32, BUILD(Float32Constant, operand.value)); | |
| 740 len = 1 + operand.length; | |
| 741 break; | |
| 742 } | |
| 743 case kExprF64Const: { | |
| 744 ImmF64Operand operand(this, pc_); | |
| 745 Leaf(kAstF64, BUILD(Float64Constant, operand.value)); | |
| 746 len = 1 + operand.length; | |
| 747 break; | |
| 748 } | |
| 749 case kExprGetLocal: { | |
| 750 LocalIndexOperand operand(this, pc_); | |
| 751 if (Validate(pc_, operand)) { | |
| 752 TFNode* val = build() ? ssa_env_->locals[operand.index] : nullptr; | |
| 753 Leaf(operand.type, val); | |
| 754 } | |
| 755 len = 1 + operand.length; | |
| 756 break; | |
| 757 } | |
| 758 case kExprSetLocal: { | |
| 759 LocalIndexOperand operand(this, pc_); | |
| 760 if (Validate(pc_, operand)) { | |
| 761 Shift(operand.type, 1); | |
| 762 } | |
| 763 len = 1 + operand.length; | |
| 764 break; | |
| 765 } | |
| 766 case kExprLoadGlobal: { | |
| 767 GlobalIndexOperand operand(this, pc_); | |
| 768 if (Validate(pc_, operand)) { | |
| 769 Leaf(operand.type, BUILD(LoadGlobal, operand.index)); | |
| 770 } | |
| 771 len = 1 + operand.length; | |
| 772 break; | |
| 773 } | |
| 774 case kExprStoreGlobal: { | |
| 775 GlobalIndexOperand operand(this, pc_); | |
| 776 if (Validate(pc_, operand)) { | |
| 777 Shift(operand.type, 1); | |
| 778 } | |
| 779 len = 1 + operand.length; | |
| 780 break; | |
| 781 } | |
| 782 case kExprI32LoadMem8S: | |
| 783 case kExprI32LoadMem8U: | |
| 784 case kExprI32LoadMem16S: | |
| 785 case kExprI32LoadMem16U: | |
| 786 case kExprI32LoadMem: | |
| 787 len = DecodeLoadMem(pc_, kAstI32); | |
| 788 break; | |
| 789 case kExprI64LoadMem8S: | |
| 790 case kExprI64LoadMem8U: | |
| 791 case kExprI64LoadMem16S: | |
| 792 case kExprI64LoadMem16U: | |
| 793 case kExprI64LoadMem32S: | |
| 794 case kExprI64LoadMem32U: | |
| 795 case kExprI64LoadMem: | |
| 796 len = DecodeLoadMem(pc_, kAstI64); | |
| 797 break; | |
| 798 case kExprF32LoadMem: | |
| 799 len = DecodeLoadMem(pc_, kAstF32); | |
| 800 break; | |
| 801 case kExprF64LoadMem: | |
| 802 len = DecodeLoadMem(pc_, kAstF64); | |
| 803 break; | |
| 804 case kExprI32StoreMem8: | |
| 805 case kExprI32StoreMem16: | |
| 806 case kExprI32StoreMem: | |
| 807 len = DecodeStoreMem(pc_, kAstI32); | |
| 808 break; | |
| 809 case kExprI64StoreMem8: | |
| 810 case kExprI64StoreMem16: | |
| 811 case kExprI64StoreMem32: | |
| 812 case kExprI64StoreMem: | |
| 813 len = DecodeStoreMem(pc_, kAstI64); | |
| 814 break; | |
| 815 case kExprF32StoreMem: | |
| 816 len = DecodeStoreMem(pc_, kAstF32); | |
| 817 break; | |
| 818 case kExprF64StoreMem: | |
| 819 len = DecodeStoreMem(pc_, kAstF64); | |
| 820 break; | |
| 821 case kExprMemorySize: | |
| 822 Leaf(kAstI32, BUILD(MemSize, 0)); | |
| 823 break; | |
| 824 case kExprGrowMemory: | |
| 825 Shift(kAstI32, 1); | |
| 826 break; | |
| 827 case kExprCallFunction: { | |
| 828 FunctionIndexOperand operand(this, pc_); | |
| 829 if (Validate(pc_, operand)) { | |
| 830 LocalType type = operand.sig->return_count() == 0 | |
| 831 ? kAstStmt | |
| 832 : operand.sig->GetReturn(); | |
| 833 Shift(type, static_cast<int>(operand.sig->parameter_count())); | |
| 834 } | |
| 835 len = 1 + operand.length; | |
| 836 break; | |
| 837 } | |
| 838 case kExprCallIndirect: { | |
| 839 SignatureIndexOperand operand(this, pc_); | |
| 840 if (Validate(pc_, operand)) { | |
| 841 LocalType type = operand.sig->return_count() == 0 | |
| 842 ? kAstStmt | |
| 843 : operand.sig->GetReturn(); | |
| 844 Shift(type, static_cast<int>(1 + operand.sig->parameter_count())); | |
| 845 } | |
| 846 len = 1 + operand.length; | |
| 847 break; | |
| 848 } | |
| 849 case kExprCallImport: { | |
| 850 ImportIndexOperand operand(this, pc_); | |
| 851 if (Validate(pc_, operand)) { | |
| 852 LocalType type = operand.sig->return_count() == 0 | |
| 853 ? kAstStmt | |
| 854 : operand.sig->GetReturn(); | |
| 855 Shift(type, static_cast<int>(operand.sig->parameter_count())); | |
| 856 } | |
| 857 len = 1 + operand.length; | |
| 858 break; | |
| 859 } | |
| 860 case kExprDeclLocals: | |
| 861 default: | |
| 862 error("Invalid opcode"); | |
| 863 return; | |
| 864 } | |
| 865 pc_ += len; | 991 pc_ += len; |
| 866 if (pc_ >= limit_) { | 992 if (pc_ >= limit_) { |
| 867 // End of code reached or exceeded. | 993 // End of code reached or exceeded. |
| 868 if (pc_ > limit_ && ok()) { | 994 if (pc_ > limit_ && ok()) error("Beyond end of code"); |
| 869 error("Beyond end of code"); | |
| 870 } | |
| 871 return; | 995 return; |
| 872 } | 996 } |
| 873 } | 997 } // end decode loop |
| 874 } | 998 } // end DecodeFunctionBody() |
| 875 | 999 |
| 876 void PushBlock(SsaEnv* ssa_env) { | 1000 TFNode** PopArgs(FunctionSig* sig) { |
| 877 blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)}); | 1001 if (build()) { |
| 878 } | 1002 int count = static_cast<int>(sig->parameter_count()); |
| 879 | 1003 TFNode** buffer = builder_->Buffer(count + 1); |
| 880 int DecodeLoadMem(const byte* pc, LocalType type) { | 1004 buffer[0] = nullptr; // reserved for code object or function index. |
| 881 MemoryAccessOperand operand(this, pc); | 1005 for (int i = count - 1; i >= 0; i--) { |
| 882 Shift(type, 1); | 1006 buffer[i + 1] = Pop(i, sig->GetParam(i)).node; |
| 1007 } |
| 1008 return buffer; |
| 1009 } else { |
| 1010 int count = static_cast<int>(sig->parameter_count()); |
| 1011 for (int i = count - 1; i >= 0; i--) { |
| 1012 Pop(i, sig->GetParam(i)); |
| 1013 } |
| 1014 return nullptr; |
| 1015 } |
| 1016 } |
| 1017 |
| 1018 LocalType GetReturnType(FunctionSig* sig) { |
| 1019 return sig->return_count() == 0 ? kAstStmt : sig->GetReturn(); |
| 1020 } |
| 1021 |
| 1022 void PushBlock(SsaEnv* merge_env, bool loop = false) { |
| 1023 int stack_depth = loop ? -1 : static_cast<int>(stack_.size()); |
| 1024 int block_depth = static_cast<int>(blocks_.size()); |
| 1025 control_.push_back( |
| 1026 {pc_, stack_depth, block_depth, merge_env, nullptr, kAstEnd, 0, 0}); |
| 1027 blocks_.push_back(control_.size() - 1); |
| 1028 } |
| 1029 |
| 1030 void PushIf(SsaEnv* merge_env) { |
| 1031 int stack_depth = static_cast<int>(stack_.size()); |
| 1032 int block_depth = static_cast<int>(blocks_.size()); |
| 1033 control_.push_back( |
| 1034 {pc_, stack_depth, block_depth, merge_env, nullptr, kAstStmt, 0, 0}); |
| 1035 } |
| 1036 |
| 1037 int DecodeLoadMem(LocalType type, MachineType mem_type) { |
| 1038 MemoryAccessOperand operand(this, pc_); |
| 1039 Value index = Pop(0, kAstI32); |
| 1040 TFNode* node = BUILD(LoadMem, type, mem_type, index.node, operand.offset); |
| 1041 Push(type, node); |
| 883 return 1 + operand.length; | 1042 return 1 + operand.length; |
| 884 } | 1043 } |
| 885 | 1044 |
| 886 int DecodeStoreMem(const byte* pc, LocalType type) { | 1045 int DecodeStoreMem(LocalType type, MachineType mem_type) { |
| 887 MemoryAccessOperand operand(this, pc); | 1046 MemoryAccessOperand operand(this, pc_); |
| 888 Shift(type, 2); | 1047 Value val = Pop(1, type); |
| 1048 Value index = Pop(0, kAstI32); |
| 1049 BUILD(StoreMem, mem_type, index.node, operand.offset, val.node); |
| 1050 Push(type, val.node); |
| 889 return 1 + operand.length; | 1051 return 1 + operand.length; |
| 890 } | 1052 } |
| 891 | 1053 |
| 892 void AddImplicitReturnAtEnd() { | 1054 void DoReturn() { |
| 893 int retcount = static_cast<int>(sig_->return_count()); | 1055 int count = static_cast<int>(sig_->return_count()); |
| 894 if (retcount == 0) { | 1056 TFNode** buffer = nullptr; |
| 895 BUILD0(ReturnVoid); | 1057 if (build()) buffer = builder_->Buffer(count); |
| 896 return; | 1058 |
| 897 } | 1059 // Pop return values off the stack in reverse order. |
| 898 | 1060 for (int i = count - 1; i >= 0; i--) { |
| 899 if (static_cast<int>(trees_.size()) < retcount) { | 1061 Value val = Pop(i, sig_->GetReturn(i)); |
| 900 error(limit_, nullptr, | 1062 if (buffer) buffer[i] = val.node; |
| 901 "ImplicitReturn expects %d arguments, only %d remain", retcount, | 1063 } |
| 902 static_cast<int>(trees_.size())); | 1064 |
| 903 return; | 1065 Push(kAstEnd, BUILD(Return, count, buffer)); |
| 904 } | 1066 ssa_env_->Kill(SsaEnv::kControlEnd); |
| 905 | 1067 } |
| 906 TRACE("wasm-decode implicit return of %d args\n", retcount); | 1068 |
| 907 | 1069 void Push(LocalType type, TFNode* node) { |
| 908 TFNode** buffer = BUILD(Buffer, retcount); | 1070 stack_.push_back({pc_, node, type}); |
| 909 for (int index = 0; index < retcount; index++) { | 1071 } |
| 910 Tree* tree = trees_[trees_.size() - 1 - index]; | 1072 |
| 911 if (buffer) buffer[index] = tree->node; | 1073 Value Pop(int index, LocalType expected) { |
| 912 LocalType expected = sig_->GetReturn(index); | 1074 Value val = Pop(); |
| 913 if (tree->type != expected) { | 1075 if (val.type != expected) { |
| 914 error(limit_, tree->pc, | 1076 if (val.type != kAstEnd) { |
| 915 "ImplicitReturn[%d] expected type %s, found %s of type %s", index, | 1077 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s", |
| 916 WasmOpcodes::TypeName(expected), | 1078 WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc_)), |
| 917 WasmOpcodes::OpcodeName(tree->opcode()), | 1079 index, WasmOpcodes::TypeName(expected), |
| 918 WasmOpcodes::TypeName(tree->type)); | 1080 WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*val.pc)), |
| 919 return; | 1081 WasmOpcodes::TypeName(val.type)); |
| 920 } | 1082 } |
| 921 } | 1083 } |
| 922 | 1084 return val; |
| 923 BUILD(Return, retcount, buffer); | 1085 } |
| 1086 |
| 1087 Value Pop() { |
| 1088 if (stack_.empty()) { |
| 1089 Value val = {pc_, nullptr, kAstStmt}; |
| 1090 error(pc_, pc_, "%s found empty stack", |
| 1091 WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc_))); |
| 1092 return val; |
| 1093 } |
| 1094 Value val = stack_.back(); |
| 1095 stack_.pop_back(); |
| 1096 return val; |
| 1097 } |
| 1098 |
| 1099 Value PopUpTo(int stack_depth) { |
| 1100 if (stack_depth == stack_.size()) { |
| 1101 Value val = {pc_, nullptr, kAstStmt}; |
| 1102 return val; |
| 1103 } else { |
| 1104 DCHECK_LE(stack_depth, static_cast<int>(stack_.size())); |
| 1105 Value val = Pop(); |
| 1106 stack_.resize(stack_depth); |
| 1107 return val; |
| 1108 } |
| 924 } | 1109 } |
| 925 | 1110 |
| 926 int baserel(const byte* ptr) { | 1111 int baserel(const byte* ptr) { |
| 927 return base_ ? static_cast<int>(ptr - base_) : 0; | 1112 return base_ ? static_cast<int>(ptr - base_) : 0; |
| 928 } | 1113 } |
| 929 | 1114 |
| 930 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); } | 1115 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); } |
| 931 | 1116 |
| 932 void Reduce(Production* p) { | 1117 void BreakTo(Control* block, Value& val) { |
| 933 WasmOpcode opcode = p->opcode(); | 1118 DCHECK(!block->is_if()); |
| 934 TRACE("-----reduce module+%-6d %s func+%d: 0x%02x %s\n", baserel(p->pc()), | 1119 if (block->is_loop()) { |
| 935 indentation(), startrel(p->pc()), opcode, | |
| 936 WasmOpcodes::OpcodeName(opcode)); | |
| 937 FunctionSig* sig = WasmOpcodes::Signature(opcode); | |
| 938 if (sig) { | |
| 939 // A simple expression with a fixed signature. | |
| 940 TypeCheckLast(p, sig->GetParam(p->index - 1)); | |
| 941 if (p->done() && build()) { | |
| 942 if (sig->parameter_count() == 2) { | |
| 943 p->tree->node = builder_->Binop(opcode, p->tree->children[0]->node, | |
| 944 p->tree->children[1]->node); | |
| 945 } else if (sig->parameter_count() == 1) { | |
| 946 p->tree->node = builder_->Unop(opcode, p->tree->children[0]->node); | |
| 947 } else { | |
| 948 UNREACHABLE(); | |
| 949 } | |
| 950 } | |
| 951 return; | |
| 952 } | |
| 953 | |
| 954 switch (opcode) { | |
| 955 case kExprBlock: { | |
| 956 if (p->done()) { | |
| 957 Block* last = &blocks_.back(); | |
| 958 DCHECK_EQ(stack_.size() - 1, last->stack_depth); | |
| 959 // fallthrough with the last expression. | |
| 960 ReduceBreakToExprBlock(p, last); | |
| 961 SetEnv("block:end", last->ssa_env); | |
| 962 blocks_.pop_back(); | |
| 963 } | |
| 964 break; | |
| 965 } | |
| 966 case kExprLoop: { | |
| 967 if (p->done()) { | |
| 968 // Pop the continue environment. | |
| 969 blocks_.pop_back(); | |
| 970 // Get the break environment. | |
| 971 Block* last = &blocks_.back(); | |
| 972 DCHECK_EQ(stack_.size() - 1, last->stack_depth); | |
| 973 // fallthrough with the last expression. | |
| 974 ReduceBreakToExprBlock(p, last); | |
| 975 SetEnv("loop:end", last->ssa_env); | |
| 976 blocks_.pop_back(); | |
| 977 } | |
| 978 break; | |
| 979 } | |
| 980 case kExprIf: { | |
| 981 if (p->index == 1) { | |
| 982 // Condition done. Split environment for true branch. | |
| 983 TypeCheckLast(p, kAstI32); | |
| 984 SsaEnv* false_env = ssa_env_; | |
| 985 SsaEnv* true_env = Split(ssa_env_); | |
| 986 ifs_.push_back({nullptr, false_env, nullptr}); | |
| 987 BUILD(Branch, p->last()->node, &true_env->control, | |
| 988 &false_env->control); | |
| 989 SetEnv("if:true", true_env); | |
| 990 } else if (p->index == 2) { | |
| 991 // True block done. Merge true and false environments. | |
| 992 IfEnv* env = &ifs_.back(); | |
| 993 SsaEnv* merge = env->merge_env; | |
| 994 if (merge->go()) { | |
| 995 merge->state = SsaEnv::kReached; | |
| 996 Goto(ssa_env_, merge); | |
| 997 } | |
| 998 SetEnv("if:merge", merge); | |
| 999 ifs_.pop_back(); | |
| 1000 } | |
| 1001 break; | |
| 1002 } | |
| 1003 case kExprIfElse: { | |
| 1004 if (p->index == 1) { | |
| 1005 // Condition done. Split environment for true and false branches. | |
| 1006 TypeCheckLast(p, kAstI32); | |
| 1007 SsaEnv* merge_env = ssa_env_; | |
| 1008 TFNode* if_true = nullptr; | |
| 1009 TFNode* if_false = nullptr; | |
| 1010 BUILD(Branch, p->last()->node, &if_true, &if_false); | |
| 1011 SsaEnv* false_env = Split(ssa_env_); | |
| 1012 SsaEnv* true_env = Steal(ssa_env_); | |
| 1013 false_env->control = if_false; | |
| 1014 true_env->control = if_true; | |
| 1015 ifs_.push_back({false_env, merge_env, nullptr}); | |
| 1016 SetEnv("if_else:true", true_env); | |
| 1017 } else if (p->index == 2) { | |
| 1018 // True expr done. | |
| 1019 IfEnv* env = &ifs_.back(); | |
| 1020 MergeIntoProduction(p, env->merge_env, p->last()); | |
| 1021 // Switch to environment for false branch. | |
| 1022 SsaEnv* false_env = ifs_.back().false_env; | |
| 1023 SetEnv("if_else:false", false_env); | |
| 1024 } else if (p->index == 3) { | |
| 1025 // False expr done. | |
| 1026 IfEnv* env = &ifs_.back(); | |
| 1027 MergeIntoProduction(p, env->merge_env, p->last()); | |
| 1028 SetEnv("if_else:merge", env->merge_env); | |
| 1029 ifs_.pop_back(); | |
| 1030 } | |
| 1031 break; | |
| 1032 } | |
| 1033 case kExprSelect: { | |
| 1034 if (p->index == 1) { | |
| 1035 // True expression done. | |
| 1036 p->tree->type = p->last()->type; | |
| 1037 if (p->tree->type == kAstStmt) { | |
| 1038 error(p->pc(), p->tree->children[1]->pc, | |
| 1039 "select operand should be expression"); | |
| 1040 } | |
| 1041 } else if (p->index == 2) { | |
| 1042 // False expression done. | |
| 1043 TypeCheckLast(p, p->tree->type); | |
| 1044 } else { | |
| 1045 // Condition done. | |
| 1046 DCHECK(p->done()); | |
| 1047 TypeCheckLast(p, kAstI32); | |
| 1048 if (build()) { | |
| 1049 TFNode* controls[2]; | |
| 1050 builder_->Branch(p->tree->children[2]->node, &controls[0], | |
| 1051 &controls[1]); | |
| 1052 TFNode* merge = builder_->Merge(2, controls); | |
| 1053 TFNode* vals[2] = {p->tree->children[0]->node, | |
| 1054 p->tree->children[1]->node}; | |
| 1055 TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge); | |
| 1056 p->tree->node = phi; | |
| 1057 ssa_env_->control = merge; | |
| 1058 } | |
| 1059 } | |
| 1060 break; | |
| 1061 } | |
| 1062 case kExprBr: { | |
| 1063 BreakDepthOperand operand(this, p->pc()); | |
| 1064 CHECK(Validate(p->pc(), operand, blocks_)); | |
| 1065 ReduceBreakToExprBlock(p, operand.target); | |
| 1066 break; | |
| 1067 } | |
| 1068 case kExprBrIf: { | |
| 1069 if (p->done()) { | |
| 1070 TypeCheckLast(p, kAstI32); | |
| 1071 BreakDepthOperand operand(this, p->pc()); | |
| 1072 CHECK(Validate(p->pc(), operand, blocks_)); | |
| 1073 SsaEnv* fenv = ssa_env_; | |
| 1074 SsaEnv* tenv = Split(fenv); | |
| 1075 BUILD(Branch, p->tree->children[1]->node, &tenv->control, | |
| 1076 &fenv->control); | |
| 1077 ssa_env_ = tenv; | |
| 1078 ReduceBreakToExprBlock(p, operand.target, p->tree->children[0]); | |
| 1079 ssa_env_ = fenv; | |
| 1080 } | |
| 1081 break; | |
| 1082 } | |
| 1083 case kExprBrTable: { | |
| 1084 if (p->index == 1) { | |
| 1085 // Switch key finished. | |
| 1086 TypeCheckLast(p, kAstI32); | |
| 1087 if (failed()) break; | |
| 1088 | |
| 1089 BranchTableOperand operand(this, p->pc()); | |
| 1090 DCHECK(Validate(p->pc(), operand, blocks_.size())); | |
| 1091 | |
| 1092 // Build a switch only if it has more than just a default target. | |
| 1093 bool build_switch = operand.table_count > 0; | |
| 1094 TFNode* sw = nullptr; | |
| 1095 if (build_switch) { | |
| 1096 sw = BUILD(Switch, operand.table_count + 1, p->last()->node); | |
| 1097 } | |
| 1098 | |
| 1099 // Process the targets of the break table. | |
| 1100 SsaEnv* prev = ssa_env_; | |
| 1101 SsaEnv* copy = Steal(prev); | |
| 1102 for (uint32_t i = 0; i < operand.table_count + 1; i++) { | |
| 1103 uint32_t target = operand.read_entry(this, i); | |
| 1104 SsaEnv* env = copy; | |
| 1105 if (build_switch) { | |
| 1106 ssa_env_ = env = Split(env); | |
| 1107 env->control = i == operand.table_count ? BUILD(IfDefault, sw) | |
| 1108 : BUILD(IfValue, i, sw); | |
| 1109 } | |
| 1110 SsaEnv* tenv = blocks_[blocks_.size() - target - 1].ssa_env; | |
| 1111 Goto(env, tenv); | |
| 1112 } | |
| 1113 ssa_env_ = prev; | |
| 1114 } | |
| 1115 break; | |
| 1116 } | |
| 1117 case kExprReturn: { | |
| 1118 TypeCheckLast(p, sig_->GetReturn(p->index - 1)); | |
| 1119 if (p->done()) { | |
| 1120 if (build()) { | |
| 1121 int count = p->tree->count; | |
| 1122 TFNode** buffer = builder_->Buffer(count); | |
| 1123 for (int i = 0; i < count; i++) { | |
| 1124 buffer[i] = p->tree->children[i]->node; | |
| 1125 } | |
| 1126 BUILD(Return, count, buffer); | |
| 1127 } | |
| 1128 ssa_env_->Kill(SsaEnv::kControlEnd); | |
| 1129 } | |
| 1130 break; | |
| 1131 } | |
| 1132 case kExprSetLocal: { | |
| 1133 LocalIndexOperand operand(this, p->pc()); | |
| 1134 CHECK(Validate(p->pc(), operand)); | |
| 1135 Tree* val = p->last(); | |
| 1136 if (operand.type == val->type) { | |
| 1137 if (build()) ssa_env_->locals[operand.index] = val->node; | |
| 1138 p->tree->node = val->node; | |
| 1139 } else { | |
| 1140 error(p->pc(), val->pc, "Typecheck failed in SetLocal"); | |
| 1141 } | |
| 1142 break; | |
| 1143 } | |
| 1144 case kExprStoreGlobal: { | |
| 1145 GlobalIndexOperand operand(this, p->pc()); | |
| 1146 CHECK(Validate(p->pc(), operand)); | |
| 1147 Tree* val = p->last(); | |
| 1148 if (operand.type == val->type) { | |
| 1149 BUILD(StoreGlobal, operand.index, val->node); | |
| 1150 p->tree->node = val->node; | |
| 1151 } else { | |
| 1152 error(p->pc(), val->pc, "Typecheck failed in StoreGlobal"); | |
| 1153 } | |
| 1154 break; | |
| 1155 } | |
| 1156 | |
| 1157 case kExprI32LoadMem8S: | |
| 1158 return ReduceLoadMem(p, kAstI32, MachineType::Int8()); | |
| 1159 case kExprI32LoadMem8U: | |
| 1160 return ReduceLoadMem(p, kAstI32, MachineType::Uint8()); | |
| 1161 case kExprI32LoadMem16S: | |
| 1162 return ReduceLoadMem(p, kAstI32, MachineType::Int16()); | |
| 1163 case kExprI32LoadMem16U: | |
| 1164 return ReduceLoadMem(p, kAstI32, MachineType::Uint16()); | |
| 1165 case kExprI32LoadMem: | |
| 1166 return ReduceLoadMem(p, kAstI32, MachineType::Int32()); | |
| 1167 | |
| 1168 case kExprI64LoadMem8S: | |
| 1169 return ReduceLoadMem(p, kAstI64, MachineType::Int8()); | |
| 1170 case kExprI64LoadMem8U: | |
| 1171 return ReduceLoadMem(p, kAstI64, MachineType::Uint8()); | |
| 1172 case kExprI64LoadMem16S: | |
| 1173 return ReduceLoadMem(p, kAstI64, MachineType::Int16()); | |
| 1174 case kExprI64LoadMem16U: | |
| 1175 return ReduceLoadMem(p, kAstI64, MachineType::Uint16()); | |
| 1176 case kExprI64LoadMem32S: | |
| 1177 return ReduceLoadMem(p, kAstI64, MachineType::Int32()); | |
| 1178 case kExprI64LoadMem32U: | |
| 1179 return ReduceLoadMem(p, kAstI64, MachineType::Uint32()); | |
| 1180 case kExprI64LoadMem: | |
| 1181 return ReduceLoadMem(p, kAstI64, MachineType::Int64()); | |
| 1182 | |
| 1183 case kExprF32LoadMem: | |
| 1184 return ReduceLoadMem(p, kAstF32, MachineType::Float32()); | |
| 1185 | |
| 1186 case kExprF64LoadMem: | |
| 1187 return ReduceLoadMem(p, kAstF64, MachineType::Float64()); | |
| 1188 | |
| 1189 case kExprI32StoreMem8: | |
| 1190 return ReduceStoreMem(p, kAstI32, MachineType::Int8()); | |
| 1191 case kExprI32StoreMem16: | |
| 1192 return ReduceStoreMem(p, kAstI32, MachineType::Int16()); | |
| 1193 case kExprI32StoreMem: | |
| 1194 return ReduceStoreMem(p, kAstI32, MachineType::Int32()); | |
| 1195 | |
| 1196 case kExprI64StoreMem8: | |
| 1197 return ReduceStoreMem(p, kAstI64, MachineType::Int8()); | |
| 1198 case kExprI64StoreMem16: | |
| 1199 return ReduceStoreMem(p, kAstI64, MachineType::Int16()); | |
| 1200 case kExprI64StoreMem32: | |
| 1201 return ReduceStoreMem(p, kAstI64, MachineType::Int32()); | |
| 1202 case kExprI64StoreMem: | |
| 1203 return ReduceStoreMem(p, kAstI64, MachineType::Int64()); | |
| 1204 | |
| 1205 case kExprF32StoreMem: | |
| 1206 return ReduceStoreMem(p, kAstF32, MachineType::Float32()); | |
| 1207 | |
| 1208 case kExprF64StoreMem: | |
| 1209 return ReduceStoreMem(p, kAstF64, MachineType::Float64()); | |
| 1210 | |
| 1211 case kExprGrowMemory: | |
| 1212 TypeCheckLast(p, kAstI32); | |
| 1213 // TODO(titzer): build node for GrowMemory | |
| 1214 p->tree->node = BUILD(Int32Constant, 0); | |
| 1215 return; | |
| 1216 | |
| 1217 case kExprCallFunction: { | |
| 1218 FunctionIndexOperand operand(this, p->pc()); | |
| 1219 CHECK(Validate(p->pc(), operand)); | |
| 1220 if (p->index > 0) { | |
| 1221 TypeCheckLast(p, operand.sig->GetParam(p->index - 1)); | |
| 1222 } | |
| 1223 if (p->done() && build()) { | |
| 1224 uint32_t count = p->tree->count + 1; | |
| 1225 TFNode** buffer = builder_->Buffer(count); | |
| 1226 buffer[0] = nullptr; // reserved for code object. | |
| 1227 for (uint32_t i = 1; i < count; i++) { | |
| 1228 buffer[i] = p->tree->children[i - 1]->node; | |
| 1229 } | |
| 1230 p->tree->node = builder_->CallDirect(operand.index, buffer); | |
| 1231 } | |
| 1232 break; | |
| 1233 } | |
| 1234 case kExprCallIndirect: { | |
| 1235 SignatureIndexOperand operand(this, p->pc()); | |
| 1236 CHECK(Validate(p->pc(), operand)); | |
| 1237 if (p->index == 1) { | |
| 1238 TypeCheckLast(p, kAstI32); | |
| 1239 } else { | |
| 1240 TypeCheckLast(p, operand.sig->GetParam(p->index - 2)); | |
| 1241 } | |
| 1242 if (p->done() && build()) { | |
| 1243 uint32_t count = p->tree->count; | |
| 1244 TFNode** buffer = builder_->Buffer(count); | |
| 1245 for (uint32_t i = 0; i < count; i++) { | |
| 1246 buffer[i] = p->tree->children[i]->node; | |
| 1247 } | |
| 1248 p->tree->node = builder_->CallIndirect(operand.index, buffer); | |
| 1249 } | |
| 1250 break; | |
| 1251 } | |
| 1252 case kExprCallImport: { | |
| 1253 ImportIndexOperand operand(this, p->pc()); | |
| 1254 CHECK(Validate(p->pc(), operand)); | |
| 1255 if (p->index > 0) { | |
| 1256 TypeCheckLast(p, operand.sig->GetParam(p->index - 1)); | |
| 1257 } | |
| 1258 if (p->done() && build()) { | |
| 1259 uint32_t count = p->tree->count + 1; | |
| 1260 TFNode** buffer = builder_->Buffer(count); | |
| 1261 buffer[0] = nullptr; // reserved for code object. | |
| 1262 for (uint32_t i = 1; i < count; i++) { | |
| 1263 buffer[i] = p->tree->children[i - 1]->node; | |
| 1264 } | |
| 1265 p->tree->node = builder_->CallImport(operand.index, buffer); | |
| 1266 } | |
| 1267 break; | |
| 1268 } | |
| 1269 default: | |
| 1270 break; | |
| 1271 } | |
| 1272 } | |
| 1273 | |
| 1274 void ReduceBreakToExprBlock(Production* p, Block* block) { | |
| 1275 ReduceBreakToExprBlock(p, block, p->tree->count > 0 ? p->last() : nullptr); | |
| 1276 } | |
| 1277 | |
| 1278 void ReduceBreakToExprBlock(Production* p, Block* block, Tree* val) { | |
| 1279 if (block->stack_depth < 0) { | |
| 1280 // This is the inner loop block, which does not have a value. | 1120 // This is the inner loop block, which does not have a value. |
| 1281 Goto(ssa_env_, block->ssa_env); | 1121 Goto(ssa_env_, block->ssa_env); |
| 1282 } else { | 1122 } else { |
| 1283 // Merge the value into the production for the block. | 1123 // Merge the value into the production for the block. |
| 1284 Production* bp = &stack_[block->stack_depth]; | 1124 MergeInto(block->ssa_env, &block->node, &block->type, val); |
| 1285 MergeIntoProduction(bp, block->ssa_env, val); | 1125 } |
| 1286 } | 1126 } |
| 1287 } | 1127 |
| 1288 | 1128 void MergeInto(SsaEnv* target, TFNode** node, LocalType* type, Value& val) { |
| 1289 void MergeIntoProduction(Production* p, SsaEnv* target, Tree* expr) { | |
| 1290 if (!ssa_env_->go()) return; | 1129 if (!ssa_env_->go()) return; |
| 1130 DCHECK_NE(kAstEnd, val.type); |
| 1291 | 1131 |
| 1292 bool first = target->state == SsaEnv::kUnreachable; | 1132 bool first = target->state == SsaEnv::kUnreachable; |
| 1293 Goto(ssa_env_, target); | 1133 Goto(ssa_env_, target); |
| 1294 if (expr == nullptr || expr->type == kAstEnd) return; | |
| 1295 | 1134 |
| 1296 if (first) { | 1135 if (first) { |
| 1297 // first merge to this environment; set the type and the node. | 1136 // first merge to this environment; set the type and the node. |
| 1298 p->tree->type = expr->type; | 1137 *type = val.type; |
| 1299 p->tree->node = expr->node; | 1138 *node = val.node; |
| 1139 } else if (val.type == *type && val.type != kAstStmt) { |
| 1140 // merge with the existing value for this block. |
| 1141 *node = CreateOrMergeIntoPhi(*type, target->control, *node, val.node); |
| 1300 } else { | 1142 } else { |
| 1301 // merge with the existing value for this block. | 1143 // types don't match, or block is already a stmt. |
| 1302 LocalType type = p->tree->type; | 1144 *type = kAstStmt; |
| 1303 if (expr->type != type) { | 1145 *node = nullptr; |
| 1304 type = kAstStmt; | |
| 1305 p->tree->type = kAstStmt; | |
| 1306 p->tree->node = nullptr; | |
| 1307 } else if (type != kAstStmt) { | |
| 1308 p->tree->node = CreateOrMergeIntoPhi(type, target->control, | |
| 1309 p->tree->node, expr->node); | |
| 1310 } | |
| 1311 } | |
| 1312 } | |
| 1313 | |
| 1314 void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) { | |
| 1315 DCHECK_EQ(1, p->index); | |
| 1316 TypeCheckLast(p, kAstI32); // index | |
| 1317 if (build()) { | |
| 1318 MemoryAccessOperand operand(this, p->pc()); | |
| 1319 p->tree->node = | |
| 1320 builder_->LoadMem(type, mem_type, p->last()->node, operand.offset); | |
| 1321 } | |
| 1322 } | |
| 1323 | |
| 1324 void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) { | |
| 1325 if (p->index == 1) { | |
| 1326 TypeCheckLast(p, kAstI32); // index | |
| 1327 } else { | |
| 1328 DCHECK_EQ(2, p->index); | |
| 1329 TypeCheckLast(p, type); | |
| 1330 if (build()) { | |
| 1331 MemoryAccessOperand operand(this, p->pc()); | |
| 1332 TFNode* val = p->tree->children[1]->node; | |
| 1333 builder_->StoreMem(mem_type, p->tree->children[0]->node, operand.offset, | |
| 1334 val); | |
| 1335 p->tree->node = val; | |
| 1336 } | |
| 1337 } | |
| 1338 } | |
| 1339 | |
| 1340 void TypeCheckLast(Production* p, LocalType expected) { | |
| 1341 LocalType result = p->last()->type; | |
| 1342 if (result == expected) return; | |
| 1343 if (result == kAstEnd) return; | |
| 1344 if (expected != kAstStmt) { | |
| 1345 error(p->pc(), p->last()->pc, | |
| 1346 "%s[%d] expected type %s, found %s of type %s", | |
| 1347 WasmOpcodes::OpcodeName(p->opcode()), p->index - 1, | |
| 1348 WasmOpcodes::TypeName(expected), | |
| 1349 WasmOpcodes::OpcodeName(p->last()->opcode()), | |
| 1350 WasmOpcodes::TypeName(p->last()->type)); | |
| 1351 } | 1146 } |
| 1352 } | 1147 } |
| 1353 | 1148 |
| 1354 void SetEnv(const char* reason, SsaEnv* env) { | 1149 void SetEnv(const char* reason, SsaEnv* env) { |
| 1355 #if DEBUG | 1150 #if DEBUG |
| 1356 TRACE(" env = %p, block depth = %d, reason = %s", static_cast<void*>(env), | 1151 if (FLAG_trace_wasm_decoder) { |
| 1357 static_cast<int>(blocks_.size()), reason); | 1152 char state = 'X'; |
| 1358 if (FLAG_trace_wasm_decoder && env && env->control) { | 1153 if (env) { |
| 1359 TRACE(", control = "); | 1154 switch (env->state) { |
| 1360 compiler::WasmGraphBuilder::PrintDebugName(env->control); | 1155 case SsaEnv::kReached: |
| 1361 } | 1156 state = 'R'; |
| 1362 TRACE("\n"); | 1157 break; |
| 1158 case SsaEnv::kUnreachable: |
| 1159 state = 'U'; |
| 1160 break; |
| 1161 case SsaEnv::kMerged: |
| 1162 state = 'M'; |
| 1163 break; |
| 1164 case SsaEnv::kControlEnd: |
| 1165 state = 'E'; |
| 1166 break; |
| 1167 } |
| 1168 } |
| 1169 PrintF(" env = %p, state = %c, reason = %s", static_cast<void*>(env), |
| 1170 state, reason); |
| 1171 if (env && env->control) { |
| 1172 PrintF(", control = "); |
| 1173 compiler::WasmGraphBuilder::PrintDebugName(env->control); |
| 1174 } |
| 1175 PrintF("\n"); |
| 1176 } |
| 1363 #endif | 1177 #endif |
| 1364 ssa_env_ = env; | 1178 ssa_env_ = env; |
| 1365 if (builder_) { | 1179 if (builder_) { |
| 1366 builder_->set_control_ptr(&env->control); | 1180 builder_->set_control_ptr(&env->control); |
| 1367 builder_->set_effect_ptr(&env->effect); | 1181 builder_->set_effect_ptr(&env->effect); |
| 1368 } | 1182 } |
| 1369 } | 1183 } |
| 1370 | 1184 |
| 1371 void Goto(SsaEnv* from, SsaEnv* to) { | 1185 void Goto(SsaEnv* from, SsaEnv* to) { |
| 1372 DCHECK_NOT_NULL(to); | 1186 DCHECK_NOT_NULL(to); |
| (...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1486 } | 1300 } |
| 1487 } | 1301 } |
| 1488 | 1302 |
| 1489 // Create a complete copy of the {from}. | 1303 // Create a complete copy of the {from}. |
| 1490 SsaEnv* Split(SsaEnv* from) { | 1304 SsaEnv* Split(SsaEnv* from) { |
| 1491 DCHECK_NOT_NULL(from); | 1305 DCHECK_NOT_NULL(from); |
| 1492 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); | 1306 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); |
| 1493 size_t size = sizeof(TFNode*) * EnvironmentCount(); | 1307 size_t size = sizeof(TFNode*) * EnvironmentCount(); |
| 1494 result->control = from->control; | 1308 result->control = from->control; |
| 1495 result->effect = from->effect; | 1309 result->effect = from->effect; |
| 1496 result->state = from->state == SsaEnv::kUnreachable ? SsaEnv::kUnreachable | |
| 1497 : SsaEnv::kReached; | |
| 1498 | 1310 |
| 1499 if (from->go()) { | 1311 if (from->go()) { |
| 1500 result->state = SsaEnv::kReached; | 1312 result->state = SsaEnv::kReached; |
| 1501 result->locals = | 1313 result->locals = |
| 1502 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; | 1314 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; |
| 1503 memcpy(result->locals, from->locals, size); | 1315 memcpy(result->locals, from->locals, size); |
| 1504 } else { | 1316 } else { |
| 1505 result->state = SsaEnv::kUnreachable; | 1317 result->state = SsaEnv::kUnreachable; |
| 1506 result->locals = nullptr; | 1318 result->locals = nullptr; |
| 1507 } | 1319 } |
| (...skipping 26 matching lines...) Expand all Loading... |
| 1534 } | 1346 } |
| 1535 | 1347 |
| 1536 int EnvironmentCount() { | 1348 int EnvironmentCount() { |
| 1537 if (builder_) return static_cast<int>(local_type_vec_.size()); | 1349 if (builder_) return static_cast<int>(local_type_vec_.size()); |
| 1538 return 0; // if we aren't building a graph, don't bother with SSA renaming. | 1350 return 0; // if we aren't building a graph, don't bother with SSA renaming. |
| 1539 } | 1351 } |
| 1540 | 1352 |
| 1541 virtual void onFirstError() { | 1353 virtual void onFirstError() { |
| 1542 limit_ = start_; // Terminate decoding loop. | 1354 limit_ = start_; // Terminate decoding loop. |
| 1543 builder_ = nullptr; // Don't build any more nodes. | 1355 builder_ = nullptr; // Don't build any more nodes. |
| 1544 #if DEBUG | 1356 TRACE(" !%s\n", error_msg_.get()); |
| 1545 PrintStackForDebugging(); | |
| 1546 #endif | |
| 1547 } | 1357 } |
| 1548 | |
| 1549 #if DEBUG | |
| 1550 void PrintStackForDebugging() { PrintProduction(0); } | |
| 1551 | |
| 1552 void PrintProduction(size_t depth) { | |
| 1553 if (depth >= stack_.size()) return; | |
| 1554 Production* p = &stack_[depth]; | |
| 1555 for (size_t d = 0; d < depth; d++) PrintF(" "); | |
| 1556 | |
| 1557 PrintF("@%d %s [%d]\n", static_cast<int>(p->tree->pc - start_), | |
| 1558 WasmOpcodes::OpcodeName(p->opcode()), p->tree->count); | |
| 1559 for (int i = 0; i < p->index; i++) { | |
| 1560 Tree* child = p->tree->children[i]; | |
| 1561 for (size_t d = 0; d <= depth; d++) PrintF(" "); | |
| 1562 PrintF("@%d %s [%d]", static_cast<int>(child->pc - start_), | |
| 1563 WasmOpcodes::OpcodeName(child->opcode()), child->count); | |
| 1564 if (child->node) { | |
| 1565 PrintF(" => TF"); | |
| 1566 compiler::WasmGraphBuilder::PrintDebugName(child->node); | |
| 1567 } | |
| 1568 PrintF("\n"); | |
| 1569 } | |
| 1570 PrintProduction(depth + 1); | |
| 1571 } | |
| 1572 #endif | |
| 1573 | |
| 1574 BitVector* AnalyzeLoopAssignment(const byte* pc) { | 1358 BitVector* AnalyzeLoopAssignment(const byte* pc) { |
| 1575 if (pc >= limit_) return nullptr; | 1359 if (pc >= limit_) return nullptr; |
| 1576 if (*pc != kExprLoop) return nullptr; | 1360 if (*pc != kExprLoop) return nullptr; |
| 1577 | 1361 |
| 1578 BitVector* assigned = | 1362 BitVector* assigned = |
| 1579 new (zone_) BitVector(static_cast<int>(total_locals_), zone_); | 1363 new (zone_) BitVector(static_cast<int>(local_type_vec_.size()), zone_); |
| 1580 // Keep a stack to model the nesting of expressions. | 1364 int depth = 0; |
| 1581 std::vector<int> arity_stack; | |
| 1582 arity_stack.push_back(OpcodeArity(pc)); | |
| 1583 pc += OpcodeLength(pc); | |
| 1584 | |
| 1585 // Iteratively process all AST nodes nested inside the loop. | 1365 // Iteratively process all AST nodes nested inside the loop. |
| 1586 while (pc < limit_) { | 1366 while (pc_ < limit_) { |
| 1587 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); | 1367 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); |
| 1588 int arity = 0; | |
| 1589 int length = 1; | 1368 int length = 1; |
| 1590 int assigned_index = -1; | 1369 switch (opcode) { |
| 1591 if (opcode == kExprSetLocal) { | 1370 case kExprLoop: |
| 1592 LocalIndexOperand operand(this, pc); | 1371 case kExprIf: |
| 1593 if (assigned->length() > 0 && | 1372 case kExprBlock: |
| 1594 static_cast<int>(operand.index) < assigned->length()) { | 1373 depth++; |
| 1595 // Unverified code might have an out-of-bounds index. | 1374 DCHECK_EQ(1, OpcodeLength(pc_)); |
| 1596 // Ignore out-of-bounds indices, as the main verification will fail. | 1375 break; |
| 1597 assigned->Add(operand.index); | 1376 case kExprSetLocal: { |
| 1598 assigned_index = operand.index; | 1377 LocalIndexOperand operand(this, pc_); |
| 1378 if (assigned->length() > 0 && |
| 1379 static_cast<int>(operand.index) < assigned->length()) { |
| 1380 // Unverified code might have an out-of-bounds index. |
| 1381 assigned->Add(operand.index); |
| 1382 } |
| 1383 length = 1 + operand.length; |
| 1384 break; |
| 1599 } | 1385 } |
| 1600 arity = 1; | 1386 case kExprEnd: |
| 1601 length = 1 + operand.length; | 1387 depth--; |
| 1602 } else { | 1388 break; |
| 1603 arity = OpcodeArity(pc); | 1389 default: |
| 1604 length = OpcodeLength(pc); | 1390 length = OpcodeLength(pc_); |
| 1391 break; |
| 1605 } | 1392 } |
| 1606 | 1393 if (depth <= 0) break; |
| 1607 TRACE("loop-assign module+%-6d %s func+%d: 0x%02x %s", baserel(pc), | 1394 pc_ += length; |
| 1608 indentation(), startrel(pc), opcode, | |
| 1609 WasmOpcodes::OpcodeName(opcode)); | |
| 1610 | |
| 1611 if (assigned_index >= 0) { | |
| 1612 TRACE(" (assigned local #%d)\n", assigned_index); | |
| 1613 } else { | |
| 1614 TRACE("\n"); | |
| 1615 } | |
| 1616 | |
| 1617 pc += length; | |
| 1618 arity_stack.push_back(arity); | |
| 1619 while (arity_stack.back() == 0) { | |
| 1620 arity_stack.pop_back(); | |
| 1621 if (arity_stack.empty()) return assigned; // reached end of loop | |
| 1622 arity_stack.back()--; | |
| 1623 } | |
| 1624 } | 1395 } |
| 1625 return assigned; | 1396 return assigned; |
| 1626 } | 1397 } |
| 1627 }; | 1398 }; |
| 1628 | 1399 |
| 1629 std::vector<LocalType>* DecodeLocalDeclsForTesting(const byte* start, | 1400 std::vector<LocalType>* DecodeLocalDeclsForTesting(const byte* start, |
| 1630 const byte* end) { | 1401 const byte* end) { |
| 1631 Zone zone; | 1402 Zone zone; |
| 1632 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; | 1403 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; |
| 1633 SR_WasmDecoder decoder(&zone, nullptr, body); | 1404 SR_WasmDecoder decoder(&zone, nullptr, body); |
| 1634 return decoder.DecodeLocalDeclsForTesting(); | 1405 return decoder.DecodeLocalDeclsForTesting(); |
| 1635 } | 1406 } |
| 1636 | 1407 |
| 1637 TreeResult VerifyWasmCode(FunctionBody& body) { | 1408 TreeResult VerifyWasmCode(FunctionBody& body) { |
| 1638 Zone zone; | 1409 Zone zone; |
| 1639 SR_WasmDecoder decoder(&zone, nullptr, body); | 1410 SR_WasmDecoder decoder(&zone, nullptr, body); |
| 1640 TreeResult result = decoder.Decode(); | 1411 decoder.Decode(); |
| 1641 return result; | 1412 return decoder.toResult<Tree*>(nullptr); |
| 1642 } | 1413 } |
| 1643 | 1414 |
| 1644 TreeResult BuildTFGraph(TFBuilder* builder, FunctionBody& body) { | 1415 TreeResult BuildTFGraph(TFBuilder* builder, FunctionBody& body) { |
| 1645 Zone zone; | 1416 Zone zone; |
| 1646 SR_WasmDecoder decoder(&zone, builder, body); | 1417 SR_WasmDecoder decoder(&zone, builder, body); |
| 1647 TreeResult result = decoder.Decode(); | 1418 decoder.Decode(); |
| 1648 return result; | 1419 return decoder.toResult<Tree*>(nullptr); |
| 1649 } | 1420 } |
| 1650 | 1421 |
| 1651 | 1422 |
| 1652 std::ostream& operator<<(std::ostream& os, const Tree& tree) { | 1423 std::ostream& operator<<(std::ostream& os, const Tree& tree) { |
| 1653 if (tree.pc == nullptr) { | 1424 if (tree.pc == nullptr) { |
| 1654 os << "null"; | 1425 os << "null"; |
| 1655 return os; | 1426 return os; |
| 1656 } | 1427 } |
| 1657 PrintF("%s", WasmOpcodes::OpcodeName(tree.opcode())); | 1428 PrintF("%s", WasmOpcodes::OpcodeName(tree.opcode())); |
| 1658 if (tree.count > 0) os << "("; | 1429 if (tree.count > 0) os << "("; |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1783 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, | 1554 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, |
| 1784 const byte* start, const byte* end) { | 1555 const byte* start, const byte* end) { |
| 1785 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; | 1556 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; |
| 1786 SR_WasmDecoder decoder(zone, nullptr, body); | 1557 SR_WasmDecoder decoder(zone, nullptr, body); |
| 1787 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); | 1558 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); |
| 1788 } | 1559 } |
| 1789 | 1560 |
| 1790 } // namespace wasm | 1561 } // namespace wasm |
| 1791 } // namespace internal | 1562 } // namespace internal |
| 1792 } // namespace v8 | 1563 } // namespace v8 |
| OLD | NEW |