| Index: src/wasm/ast-decoder.cc
|
| diff --git a/src/wasm/ast-decoder.cc b/src/wasm/ast-decoder.cc
|
| index 9e943b0fa534b8b643aff485d6f6e04f8279288d..7fabbbc367403f26ff4be6a9c47db12cd9c0a1ee 100644
|
| --- a/src/wasm/ast-decoder.cc
|
| +++ b/src/wasm/ast-decoder.cc
|
| @@ -42,17 +42,6 @@ struct Tree {
|
| WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc); }
|
| };
|
|
|
| -// A production represents an incomplete decoded tree in the LR decoder.
|
| -struct Production {
|
| - Tree* tree; // the root of the syntax tree.
|
| - int index; // the current index into the children of the tree.
|
| -
|
| - WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc()); }
|
| - const byte* pc() const { return tree->pc; }
|
| - bool done() const { return index >= static_cast<int>(tree->count); }
|
| - Tree* last() const { return index > 0 ? tree->children[index - 1] : nullptr; }
|
| -};
|
| -
|
| // An SsaEnv environment carries the current local variable renaming
|
| // as well as the current effect and control dependency in the TF graph.
|
| // It maintains a control state that tracks whether the environment
|
| @@ -72,19 +61,33 @@ struct SsaEnv {
|
| control = nullptr;
|
| effect = nullptr;
|
| }
|
| + void SetNotMerged() {
|
| + if (state == kMerged) state = kReached;
|
| + }
|
| };
|
|
|
| -// An entry in the stack of blocks during decoding.
|
| -struct Block {
|
| - SsaEnv* ssa_env; // SSA renaming environment.
|
| - int stack_depth; // production stack depth.
|
| +// An entry on the value stack.
|
| +struct Value {
|
| + const byte* pc;
|
| + TFNode* node;
|
| + LocalType type;
|
| };
|
|
|
| -// An entry in the stack of ifs during decoding.
|
| -struct IfEnv {
|
| - SsaEnv* false_env;
|
| - SsaEnv* merge_env;
|
| - SsaEnv** case_envs;
|
| +// An entry on the control stack (i.e. if, block, loop).
|
| +struct Control {
|
| + const byte* pc;
|
| + int stack_depth; // stack height at the beginning of the construct.
|
| + int block_depth; // block height at the beginning of the construct.
|
| + SsaEnv* ssa_env; // end environment for the construct.
|
| + TFNode* node; // result node for the construct.
|
| + LocalType type; // result type for the construct.
|
| +
|
| + int counter; // used by if.
|
| + int max; // used by fi
|
| +
|
| + bool is_if() { return *pc == kExprIf; }
|
| + bool is_block() { return *pc == kExprBlock; }
|
| + bool is_loop() { return *pc == kExprLoop && stack_depth < 0; }
|
| };
|
|
|
| // Macros that build nodes only if there is a graph and the current SSA
|
| @@ -188,9 +191,11 @@ class WasmDecoder : public Decoder {
|
| }
|
|
|
| inline bool Validate(const byte* pc, BreakDepthOperand& operand,
|
| - ZoneVector<Block>& blocks) {
|
| + ZoneVector<size_t>& blocks,
|
| + ZoneVector<Control>& control) {
|
| if (operand.depth < blocks.size()) {
|
| - operand.target = &blocks[blocks.size() - operand.depth - 1];
|
| + operand.target = &control[blocks[blocks.size() - operand.depth - 1]];
|
| + DCHECK(!operand.target->is_if());
|
| return true;
|
| }
|
| error(pc, pc + 1, "invalid break depth");
|
| @@ -229,17 +234,18 @@ class WasmDecoder : public Decoder {
|
| case kExprLoadGlobal:
|
| case kExprNop:
|
| case kExprUnreachable:
|
| + case kExprEnd:
|
| return 0;
|
|
|
| case kExprBr:
|
| case kExprStoreGlobal:
|
| case kExprSetLocal:
|
| + case kExprElse:
|
| return 1;
|
|
|
| case kExprIf:
|
| case kExprBrIf:
|
| return 2;
|
| - case kExprIfElse:
|
| case kExprSelect:
|
| return 3;
|
|
|
| @@ -281,7 +287,6 @@ class WasmDecoder : public Decoder {
|
| FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE)
|
| FOREACH_ASMJS_COMPAT_OPCODE(DECLARE_OPCODE_CASE)
|
| #undef DECLARE_OPCODE_CASE
|
| - case kExprDeclLocals:
|
| default:
|
| UNREACHABLE();
|
| return 0;
|
| @@ -298,11 +303,6 @@ class WasmDecoder : public Decoder {
|
| MemoryAccessOperand operand(this, pc);
|
| return 1 + operand.length;
|
| }
|
| - case kExprBlock:
|
| - case kExprLoop: {
|
| - BlockCountOperand operand(this, pc);
|
| - return 1 + operand.length;
|
| - }
|
| case kExprBr:
|
| case kExprBrIf: {
|
| BreakDepthOperand operand(this, pc);
|
| @@ -368,48 +368,57 @@ class SR_WasmDecoder : public WasmDecoder {
|
| builder_(builder),
|
| base_(body.base),
|
| local_type_vec_(zone),
|
| - trees_(zone),
|
| stack_(zone),
|
| - blocks_(zone),
|
| - ifs_(zone) {
|
| + control_(zone),
|
| + blocks_(zone) {
|
| local_types_ = &local_type_vec_;
|
| }
|
|
|
| - TreeResult Decode() {
|
| + bool Decode() {
|
| + base::ElapsedTimer decode_timer;
|
| + if (FLAG_trace_wasm_decode_time) {
|
| + decode_timer.Start();
|
| + }
|
| + stack_.clear();
|
| + control_.clear();
|
| +
|
| if (end_ < pc_) {
|
| error(pc_, "function body end < start");
|
| - return result_;
|
| + return false;
|
| }
|
|
|
| DecodeLocalDecls();
|
| InitSsaEnv();
|
| DecodeFunctionBody();
|
|
|
| - Tree* tree = nullptr;
|
| - if (ok()) {
|
| - if (ssa_env_->go()) {
|
| - if (stack_.size() > 0) {
|
| - error(stack_.back().pc(), end_, "fell off end of code");
|
| - }
|
| - AddImplicitReturnAtEnd();
|
| - }
|
| - if (trees_.size() == 0) {
|
| - if (sig_->return_count() > 0) {
|
| - error(start_, "no trees created");
|
| - }
|
| - } else {
|
| - tree = trees_[0];
|
| - }
|
| + if (failed()) return TraceFailed();
|
| +
|
| + if (!control_.empty()) {
|
| + error(pc_, control_.back().pc, "unterminated control structure");
|
| + return TraceFailed();
|
| + }
|
| +
|
| + if (ssa_env_->go()) {
|
| + TRACE(" @%-6d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
|
| + DoReturn();
|
| + if (failed()) return TraceFailed();
|
| + TRACE("\n");
|
| }
|
|
|
| - if (ok()) {
|
| - TRACE("wasm-decode ok\n");
|
| + if (FLAG_trace_wasm_decode_time) {
|
| + double ms = decode_timer.Elapsed().InMillisecondsF();
|
| + PrintF("wasm-decode ok (%0.3f ms)\n\n", ms);
|
| } else {
|
| - TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_),
|
| - startrel(error_pc_), error_msg_.get());
|
| + TRACE("wasm-decode ok\n\n");
|
| }
|
|
|
| - return toResult(tree);
|
| + return true;
|
| + }
|
| +
|
| + bool TraceFailed() {
|
| + TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_),
|
| + startrel(error_pc_), error_msg_.get());
|
| + return false;
|
| }
|
|
|
| std::vector<LocalType>* DecodeLocalDeclsForTesting() {
|
| @@ -440,15 +449,13 @@ class SR_WasmDecoder : public WasmDecoder {
|
| Zone* zone_;
|
| TFBuilder* builder_;
|
| const byte* base_;
|
| - TreeResult result_;
|
|
|
| SsaEnv* ssa_env_;
|
|
|
| ZoneVector<LocalType> local_type_vec_;
|
| - ZoneVector<Tree*> trees_;
|
| - ZoneVector<Production> stack_;
|
| - ZoneVector<Block> blocks_;
|
| - ZoneVector<IfEnv> ifs_;
|
| + ZoneVector<Value> stack_;
|
| + ZoneVector<Control> control_;
|
| + ZoneVector<size_t> blocks_;
|
|
|
| inline bool build() { return builder_ && ssa_env_->go(); }
|
|
|
| @@ -500,53 +507,6 @@ class SR_WasmDecoder : public WasmDecoder {
|
| }
|
| }
|
|
|
| - void Leaf(LocalType type, TFNode* node = nullptr) {
|
| - size_t size = sizeof(Tree);
|
| - Tree* tree = reinterpret_cast<Tree*>(zone_->New(size));
|
| - tree->type = type;
|
| - tree->count = 0;
|
| - tree->pc = pc_;
|
| - tree->node = node;
|
| - tree->children[0] = nullptr;
|
| - Reduce(tree);
|
| - }
|
| -
|
| - void Shift(LocalType type, uint32_t count) {
|
| - size_t size =
|
| - sizeof(Tree) + (count == 0 ? 0 : ((count - 1) * sizeof(Tree*)));
|
| - Tree* tree = reinterpret_cast<Tree*>(zone_->New(size));
|
| - tree->type = type;
|
| - tree->count = count;
|
| - tree->pc = pc_;
|
| - tree->node = nullptr;
|
| - for (uint32_t i = 0; i < count; i++) tree->children[i] = nullptr;
|
| - if (count == 0) {
|
| - Production p = {tree, 0};
|
| - Reduce(&p);
|
| - Reduce(tree);
|
| - } else {
|
| - stack_.push_back({tree, 0});
|
| - }
|
| - }
|
| -
|
| - void Reduce(Tree* tree) {
|
| - while (true) {
|
| - if (stack_.size() == 0) {
|
| - trees_.push_back(tree);
|
| - break;
|
| - }
|
| - Production* p = &stack_.back();
|
| - p->tree->children[p->index++] = tree;
|
| - Reduce(p);
|
| - if (p->done()) {
|
| - tree = p->tree;
|
| - stack_.pop_back();
|
| - } else {
|
| - break;
|
| - }
|
| - }
|
| - }
|
| -
|
| char* indentation() {
|
| static const int kMaxIndent = 64;
|
| static char bytes[kMaxIndent + 1];
|
| @@ -597,11 +557,11 @@ class SR_WasmDecoder : public WasmDecoder {
|
| total_locals_ = local_type_vec_.size();
|
| }
|
|
|
| - // Decodes the body of a function, producing reduced trees into {result}.
|
| + // Decodes the body of a function.
|
| void DecodeFunctionBody() {
|
| - TRACE("wasm-decode %p...%p (%d bytes) %s\n",
|
| + TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n",
|
| reinterpret_cast<const void*>(start_),
|
| - reinterpret_cast<const void*>(limit_),
|
| + reinterpret_cast<const void*>(limit_), baserel(pc_),
|
| static_cast<int>(limit_ - start_), builder_ ? "graph building" : "");
|
|
|
| if (pc_ >= limit_) return; // Nothing to do.
|
| @@ -609,49 +569,45 @@ class SR_WasmDecoder : public WasmDecoder {
|
| while (true) { // decoding loop.
|
| int len = 1;
|
| WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
|
| - TRACE("wasm-decode module+%-6d %s func+%d: 0x%02x %s\n", baserel(pc_),
|
| - indentation(), startrel(pc_), opcode,
|
| - WasmOpcodes::OpcodeName(opcode));
|
| + TRACE(" @%-6d #%02x:%-20s|", startrel(pc_), opcode,
|
| + WasmOpcodes::ShortOpcodeName(opcode));
|
|
|
| FunctionSig* sig = WasmOpcodes::Signature(opcode);
|
| if (sig) {
|
| - // A simple expression with a fixed signature.
|
| - Shift(sig->GetReturn(), static_cast<uint32_t>(sig->parameter_count()));
|
| - pc_ += len;
|
| - if (pc_ >= limit_) {
|
| - // End of code reached or exceeded.
|
| - if (pc_ > limit_ && ok()) {
|
| - error("Beyond end of code");
|
| + // Fast case of a simple operator.
|
| + TFNode* node;
|
| + switch (sig->parameter_count()) {
|
| + case 1: {
|
| + Value val = Pop(0, sig->GetParam(0));
|
| + node = BUILD(Unop, opcode, val.node);
|
| + break;
|
| }
|
| - return;
|
| + case 2: {
|
| + Value rval = Pop(1, sig->GetParam(1));
|
| + Value lval = Pop(0, sig->GetParam(0));
|
| + node = BUILD(Binop, opcode, lval.node, rval.node);
|
| + break;
|
| + }
|
| + default:
|
| + UNREACHABLE();
|
| + node = nullptr;
|
| + break;
|
| }
|
| - continue; // back to decoding loop.
|
| - }
|
| -
|
| - switch (opcode) {
|
| - case kExprNop:
|
| - Leaf(kAstStmt);
|
| - break;
|
| - case kExprBlock: {
|
| - BlockCountOperand operand(this, pc_);
|
| - if (operand.count < 1) {
|
| - Leaf(kAstStmt);
|
| - } else {
|
| - Shift(kAstEnd, operand.count);
|
| + Push(GetReturnType(sig), node);
|
| + } else {
|
| + // Complex bytecode.
|
| + switch (opcode) {
|
| + case kExprNop:
|
| + Push(kAstStmt, nullptr);
|
| + break;
|
| + case kExprBlock: {
|
| // The break environment is the outer environment.
|
| SsaEnv* break_env = ssa_env_;
|
| PushBlock(break_env);
|
| SetEnv("block:start", Steal(break_env));
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprLoop: {
|
| - BlockCountOperand operand(this, pc_);
|
| - if (operand.count < 1) {
|
| - Leaf(kAstStmt);
|
| - } else {
|
| - Shift(kAstEnd, operand.count);
|
| + case kExprLoop: {
|
| // The break environment is the outer environment.
|
| SsaEnv* break_env = ssa_env_;
|
| PushBlock(break_env);
|
| @@ -659,707 +615,565 @@ class SR_WasmDecoder : public WasmDecoder {
|
| // The continue environment is the inner environment.
|
| PrepareForLoop(pc_, cont_env);
|
| SetEnv("loop:start", Split(cont_env));
|
| - if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached;
|
| - PushBlock(cont_env);
|
| - blocks_.back().stack_depth = -1; // no production for inner block.
|
| + ssa_env_->SetNotMerged();
|
| + PushBlock(cont_env, true);
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprIf:
|
| - Shift(kAstStmt, 2);
|
| - break;
|
| - case kExprIfElse:
|
| - Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}.
|
| - break;
|
| - case kExprSelect:
|
| - Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}.
|
| - break;
|
| - case kExprBr: {
|
| - BreakDepthOperand operand(this, pc_);
|
| - if (Validate(pc_, operand, blocks_)) {
|
| - Shift(kAstEnd, 1);
|
| + case kExprIf: {
|
| + // Condition on top of stack. Split environment for true branch.
|
| + Value cond = Pop(0, kAstI32);
|
| + SsaEnv* false_env = ssa_env_;
|
| + SsaEnv* true_env = Split(ssa_env_);
|
| + false_env->SetNotMerged();
|
| + PushIf(false_env);
|
| + BUILD(Branch, cond.node, &true_env->control, &false_env->control);
|
| + SetEnv("if:true", true_env);
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprBrIf: {
|
| - BreakDepthOperand operand(this, pc_);
|
| - if (Validate(pc_, operand, blocks_)) {
|
| - Shift(kAstStmt, 2);
|
| + case kExprElse: {
|
| + if (control_.empty()) {
|
| + error(pc_, "else does not match any if");
|
| + break;
|
| + }
|
| + Control* c = &control_.back();
|
| + if (!c->is_if()) {
|
| + error(pc_, c->pc, "else does not match an if");
|
| + break;
|
| + }
|
| + if (++c->counter > 1) {
|
| + error(pc_, c->pc, "else already present for if");
|
| + break;
|
| + }
|
| + Value val = PopUpTo(c->stack_depth);
|
| + // The {ssa_env} stores the false env for an if.
|
| + SsaEnv* false_env = Steal(c->ssa_env);
|
| + MergeInto(c->ssa_env, &c->node, &c->type, val);
|
| + // Switch to environment for false branch.
|
| + SetEnv("if_else:false", false_env);
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprBrTable: {
|
| - BranchTableOperand operand(this, pc_);
|
| - if (Validate(pc_, operand, blocks_.size())) {
|
| - Shift(kAstEnd, 1);
|
| + case kExprEnd: {
|
| + if (control_.empty()) {
|
| + error(pc_, "end does not match any if or block");
|
| + break;
|
| + }
|
| + const char* name = "block:end";
|
| + Control* c = &control_.back();
|
| + if (c->is_loop()) {
|
| + // Loops always push control in pairs.
|
| + control_.pop_back();
|
| + c = &control_.back();
|
| + name = "loop:end";
|
| + }
|
| + Value val = PopUpTo(c->stack_depth);
|
| + if (c->is_if()) {
|
| + if (c->counter == 0) {
|
| + // End of a one-armed if.
|
| + val = {val.pc, nullptr, kAstStmt};
|
| + name = "if:merge";
|
| + } else {
|
| + // End of a two-armed if.
|
| + name = "if_else:merge";
|
| + }
|
| + }
|
| + if (ssa_env_->go()) {
|
| + MergeInto(c->ssa_env, &c->node, &c->type, val);
|
| + }
|
| + SetEnv(name, c->ssa_env);
|
| + stack_.resize(c->stack_depth);
|
| + blocks_.resize(c->block_depth);
|
| + Push(c->type, c->node);
|
| + control_.pop_back();
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprReturn: {
|
| - int count = static_cast<int>(sig_->return_count());
|
| - if (count == 0) {
|
| - BUILD(Return, 0, builder_->Buffer(0));
|
| - ssa_env_->Kill();
|
| - Leaf(kAstEnd);
|
| - } else {
|
| - Shift(kAstEnd, count);
|
| + case kExprSelect: {
|
| + Value cond = Pop(2, kAstI32);
|
| + Value fval = Pop();
|
| + Value tval = Pop();
|
| + if (tval.type == kAstStmt || tval.type != fval.type) {
|
| + if (tval.type != kAstEnd && fval.type != kAstEnd) {
|
| + error(pc_, "type mismatch in select");
|
| + break;
|
| + }
|
| + }
|
| + if (build()) {
|
| + DCHECK(tval.type != kAstEnd);
|
| + DCHECK(fval.type != kAstEnd);
|
| + DCHECK(cond.type != kAstEnd);
|
| + TFNode* controls[2];
|
| + builder_->Branch(cond.node, &controls[0], &controls[1]);
|
| + TFNode* merge = builder_->Merge(2, controls);
|
| + TFNode* vals[2] = {tval.node, fval.node};
|
| + TFNode* phi = builder_->Phi(tval.type, 2, vals, merge);
|
| + Push(tval.type, phi);
|
| + ssa_env_->control = merge;
|
| + } else {
|
| + Push(tval.type, nullptr);
|
| + }
|
| + break;
|
| }
|
| - break;
|
| - }
|
| - case kExprUnreachable: {
|
| - BUILD0(Unreachable);
|
| - ssa_env_->Kill(SsaEnv::kControlEnd);
|
| - Leaf(kAstEnd, nullptr);
|
| - break;
|
| - }
|
| - case kExprI8Const: {
|
| - ImmI8Operand operand(this, pc_);
|
| - Leaf(kAstI32, BUILD(Int32Constant, operand.value));
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprI32Const: {
|
| - ImmI32Operand operand(this, pc_);
|
| - Leaf(kAstI32, BUILD(Int32Constant, operand.value));
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprI64Const: {
|
| - ImmI64Operand operand(this, pc_);
|
| - Leaf(kAstI64, BUILD(Int64Constant, operand.value));
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprF32Const: {
|
| - ImmF32Operand operand(this, pc_);
|
| - Leaf(kAstF32, BUILD(Float32Constant, operand.value));
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprF64Const: {
|
| - ImmF64Operand operand(this, pc_);
|
| - Leaf(kAstF64, BUILD(Float64Constant, operand.value));
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprGetLocal: {
|
| - LocalIndexOperand operand(this, pc_);
|
| - if (Validate(pc_, operand)) {
|
| - TFNode* val = build() ? ssa_env_->locals[operand.index] : nullptr;
|
| - Leaf(operand.type, val);
|
| + case kExprBr: {
|
| + Value val = Pop();
|
| + BreakDepthOperand operand(this, pc_);
|
| + if (Validate(pc_, operand, blocks_, control_)) {
|
| + BreakTo(operand.target, val);
|
| + }
|
| + len = 1 + operand.length;
|
| + Push(kAstEnd, nullptr);
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprSetLocal: {
|
| - LocalIndexOperand operand(this, pc_);
|
| - if (Validate(pc_, operand)) {
|
| - Shift(operand.type, 1);
|
| + case kExprBrIf: {
|
| + Value cond = Pop(1, kAstI32);
|
| + Value val = Pop();
|
| + BreakDepthOperand operand(this, pc_);
|
| + if (Validate(pc_, operand, blocks_, control_)) {
|
| + SsaEnv* fenv = ssa_env_;
|
| + SsaEnv* tenv = Split(fenv);
|
| + fenv->SetNotMerged();
|
| + BUILD(Branch, cond.node, &tenv->control, &fenv->control);
|
| + ssa_env_ = tenv;
|
| + BreakTo(operand.target, val);
|
| + ssa_env_ = fenv;
|
| + }
|
| + len = 1 + operand.length;
|
| + Push(kAstStmt, nullptr);
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprLoadGlobal: {
|
| - GlobalIndexOperand operand(this, pc_);
|
| - if (Validate(pc_, operand)) {
|
| - Leaf(operand.type, BUILD(LoadGlobal, operand.index));
|
| + case kExprBrTable: {
|
| + BranchTableOperand operand(this, pc_);
|
| + if (Validate(pc_, operand, blocks_.size())) {
|
| + Value key = Pop(0, kAstI32);
|
| + if (failed()) break;
|
| +
|
| + Value voidval = {pc_, nullptr, kAstStmt};
|
| + SsaEnv* break_env = ssa_env_;
|
| + if (operand.table_count > 0) {
|
| + // Build branches to the various blocks based on the table.
|
| + TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node);
|
| +
|
| + SsaEnv* copy = Steal(break_env);
|
| + ssa_env_ = copy;
|
| + for (uint32_t i = 0; i < operand.table_count + 1; i++) {
|
| + uint16_t target = operand.read_entry(this, i);
|
| + ssa_env_ = Split(copy);
|
| + ssa_env_->control = (i == operand.table_count)
|
| + ? BUILD(IfDefault, sw)
|
| + : BUILD(IfValue, i, sw);
|
| + int depth = target;
|
| + size_t block_index = blocks_[blocks_.size() - depth - 1];
|
| + Control* c = &control_[block_index];
|
| + MergeInto(c->ssa_env, &c->node, &c->type, voidval);
|
| + }
|
| + } else {
|
| + // Only a default target. Do the equivalent of br.
|
| + uint16_t target = operand.read_entry(this, 0);
|
| + int depth = target;
|
| + size_t block_index = blocks_[blocks_.size() - depth - 1];
|
| + Control* c = &control_[block_index];
|
| + MergeInto(c->ssa_env, &c->node, &c->type, voidval);
|
| + }
|
| + // br_table ends the control flow like br.
|
| + ssa_env_ = break_env;
|
| + Push(kAstStmt, nullptr);
|
| + }
|
| + len = 1 + operand.length;
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprStoreGlobal: {
|
| - GlobalIndexOperand operand(this, pc_);
|
| - if (Validate(pc_, operand)) {
|
| - Shift(operand.type, 1);
|
| + case kExprReturn: {
|
| + DoReturn();
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprI32LoadMem8S:
|
| - case kExprI32LoadMem8U:
|
| - case kExprI32LoadMem16S:
|
| - case kExprI32LoadMem16U:
|
| - case kExprI32LoadMem:
|
| - len = DecodeLoadMem(pc_, kAstI32);
|
| - break;
|
| - case kExprI64LoadMem8S:
|
| - case kExprI64LoadMem8U:
|
| - case kExprI64LoadMem16S:
|
| - case kExprI64LoadMem16U:
|
| - case kExprI64LoadMem32S:
|
| - case kExprI64LoadMem32U:
|
| - case kExprI64LoadMem:
|
| - len = DecodeLoadMem(pc_, kAstI64);
|
| - break;
|
| - case kExprF32LoadMem:
|
| - len = DecodeLoadMem(pc_, kAstF32);
|
| - break;
|
| - case kExprF64LoadMem:
|
| - len = DecodeLoadMem(pc_, kAstF64);
|
| - break;
|
| - case kExprI32StoreMem8:
|
| - case kExprI32StoreMem16:
|
| - case kExprI32StoreMem:
|
| - len = DecodeStoreMem(pc_, kAstI32);
|
| - break;
|
| - case kExprI64StoreMem8:
|
| - case kExprI64StoreMem16:
|
| - case kExprI64StoreMem32:
|
| - case kExprI64StoreMem:
|
| - len = DecodeStoreMem(pc_, kAstI64);
|
| - break;
|
| - case kExprF32StoreMem:
|
| - len = DecodeStoreMem(pc_, kAstF32);
|
| - break;
|
| - case kExprF64StoreMem:
|
| - len = DecodeStoreMem(pc_, kAstF64);
|
| - break;
|
| - case kExprMemorySize:
|
| - Leaf(kAstI32, BUILD(MemSize, 0));
|
| - break;
|
| - case kExprGrowMemory:
|
| - Shift(kAstI32, 1);
|
| - break;
|
| - case kExprCallFunction: {
|
| - FunctionIndexOperand operand(this, pc_);
|
| - if (Validate(pc_, operand)) {
|
| - LocalType type = operand.sig->return_count() == 0
|
| - ? kAstStmt
|
| - : operand.sig->GetReturn();
|
| - Shift(type, static_cast<int>(operand.sig->parameter_count()));
|
| + case kExprUnreachable: {
|
| + Push(kAstEnd, BUILD0(Unreachable));
|
| + ssa_env_->Kill(SsaEnv::kControlEnd);
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprCallIndirect: {
|
| - SignatureIndexOperand operand(this, pc_);
|
| - if (Validate(pc_, operand)) {
|
| - LocalType type = operand.sig->return_count() == 0
|
| - ? kAstStmt
|
| - : operand.sig->GetReturn();
|
| - Shift(type, static_cast<int>(1 + operand.sig->parameter_count()));
|
| + case kExprI8Const: {
|
| + ImmI8Operand operand(this, pc_);
|
| + Push(kAstI32, BUILD(Int32Constant, operand.value));
|
| + len = 1 + operand.length;
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| - }
|
| - case kExprCallImport: {
|
| - ImportIndexOperand operand(this, pc_);
|
| - if (Validate(pc_, operand)) {
|
| - LocalType type = operand.sig->return_count() == 0
|
| - ? kAstStmt
|
| - : operand.sig->GetReturn();
|
| - Shift(type, static_cast<int>(operand.sig->parameter_count()));
|
| + case kExprI32Const: {
|
| + ImmI32Operand operand(this, pc_);
|
| + Push(kAstI32, BUILD(Int32Constant, operand.value));
|
| + len = 1 + operand.length;
|
| + break;
|
| }
|
| - len = 1 + operand.length;
|
| - break;
|
| + case kExprI64Const: {
|
| + ImmI64Operand operand(this, pc_);
|
| + Push(kAstI64, BUILD(Int64Constant, operand.value));
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprF32Const: {
|
| + ImmF32Operand operand(this, pc_);
|
| + Push(kAstF32, BUILD(Float32Constant, operand.value));
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprF64Const: {
|
| + ImmF64Operand operand(this, pc_);
|
| + Push(kAstF64, BUILD(Float64Constant, operand.value));
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprGetLocal: {
|
| + LocalIndexOperand operand(this, pc_);
|
| + if (Validate(pc_, operand)) {
|
| + if (build()) {
|
| + Push(operand.type, ssa_env_->locals[operand.index]);
|
| + } else {
|
| + Push(operand.type, nullptr);
|
| + }
|
| + }
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprSetLocal: {
|
| + LocalIndexOperand operand(this, pc_);
|
| + if (Validate(pc_, operand)) {
|
| + Value val = Pop(0, local_type_vec_[operand.index]);
|
| + if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
|
| + Push(val.type, val.node);
|
| + }
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprLoadGlobal: {
|
| + GlobalIndexOperand operand(this, pc_);
|
| + if (Validate(pc_, operand)) {
|
| + Push(operand.type, BUILD(LoadGlobal, operand.index));
|
| + }
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprStoreGlobal: {
|
| + GlobalIndexOperand operand(this, pc_);
|
| + if (Validate(pc_, operand)) {
|
| + Value val = Pop(0, operand.type);
|
| + BUILD(StoreGlobal, operand.index, val.node);
|
| + Push(val.type, val.node);
|
| + }
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprI32LoadMem8S:
|
| + len = DecodeLoadMem(kAstI32, MachineType::Int8());
|
| + break;
|
| + case kExprI32LoadMem8U:
|
| + len = DecodeLoadMem(kAstI32, MachineType::Uint8());
|
| + break;
|
| + case kExprI32LoadMem16S:
|
| + len = DecodeLoadMem(kAstI32, MachineType::Int16());
|
| + break;
|
| + case kExprI32LoadMem16U:
|
| + len = DecodeLoadMem(kAstI32, MachineType::Uint16());
|
| + break;
|
| + case kExprI32LoadMem:
|
| + len = DecodeLoadMem(kAstI32, MachineType::Int32());
|
| + break;
|
| +
|
| + case kExprI64LoadMem8S:
|
| + len = DecodeLoadMem(kAstI64, MachineType::Int8());
|
| + break;
|
| + case kExprI64LoadMem8U:
|
| + len = DecodeLoadMem(kAstI64, MachineType::Uint8());
|
| + break;
|
| + case kExprI64LoadMem16S:
|
| + len = DecodeLoadMem(kAstI64, MachineType::Int16());
|
| + break;
|
| + case kExprI64LoadMem16U:
|
| + len = DecodeLoadMem(kAstI64, MachineType::Uint16());
|
| + break;
|
| + case kExprI64LoadMem32S:
|
| + len = DecodeLoadMem(kAstI64, MachineType::Int32());
|
| + break;
|
| + case kExprI64LoadMem32U:
|
| + len = DecodeLoadMem(kAstI64, MachineType::Uint32());
|
| + break;
|
| + case kExprI64LoadMem:
|
| + len = DecodeLoadMem(kAstI64, MachineType::Int64());
|
| + break;
|
| + case kExprF32LoadMem:
|
| + len = DecodeLoadMem(kAstF32, MachineType::Float32());
|
| + break;
|
| + case kExprF64LoadMem:
|
| + len = DecodeLoadMem(kAstF64, MachineType::Float64());
|
| + break;
|
| + case kExprI32StoreMem8:
|
| + len = DecodeStoreMem(kAstI32, MachineType::Int8());
|
| + break;
|
| + case kExprI32StoreMem16:
|
| + len = DecodeStoreMem(kAstI32, MachineType::Int16());
|
| + break;
|
| + case kExprI32StoreMem:
|
| + len = DecodeStoreMem(kAstI32, MachineType::Int32());
|
| + break;
|
| + case kExprI64StoreMem8:
|
| + len = DecodeStoreMem(kAstI64, MachineType::Int8());
|
| + break;
|
| + case kExprI64StoreMem16:
|
| + len = DecodeStoreMem(kAstI64, MachineType::Int16());
|
| + break;
|
| + case kExprI64StoreMem32:
|
| + len = DecodeStoreMem(kAstI64, MachineType::Int32());
|
| + break;
|
| + case kExprI64StoreMem:
|
| + len = DecodeStoreMem(kAstI64, MachineType::Int64());
|
| + break;
|
| + case kExprF32StoreMem:
|
| + len = DecodeStoreMem(kAstF32, MachineType::Float32());
|
| + break;
|
| + case kExprF64StoreMem:
|
| + len = DecodeStoreMem(kAstF64, MachineType::Float64());
|
| + break;
|
| +
|
| + case kExprMemorySize:
|
| + Push(kAstI32, BUILD(MemSize, 0));
|
| + break;
|
| + case kExprGrowMemory: {
|
| + Value val = Pop(0, kAstI32);
|
| + USE(val); // TODO(titzer): build node for grow memory
|
| + Push(kAstI32, BUILD(Int32Constant, 0));
|
| + break;
|
| + }
|
| + case kExprCallFunction: {
|
| + FunctionIndexOperand operand(this, pc_);
|
| + if (Validate(pc_, operand)) {
|
| + TFNode** buffer = PopArgs(operand.sig);
|
| + Push(GetReturnType(operand.sig),
|
| + BUILD(CallDirect, operand.index, buffer));
|
| + }
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprCallIndirect: {
|
| + SignatureIndexOperand operand(this, pc_);
|
| + if (Validate(pc_, operand)) {
|
| + TFNode** buffer = PopArgs(operand.sig);
|
| + Value index = Pop(0, kAstI32);
|
| + if (buffer) buffer[0] = index.node;
|
| + Push(GetReturnType(operand.sig),
|
| + BUILD(CallIndirect, operand.index, buffer));
|
| + }
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + case kExprCallImport: {
|
| + ImportIndexOperand operand(this, pc_);
|
| + if (Validate(pc_, operand)) {
|
| + TFNode** buffer = PopArgs(operand.sig);
|
| + Push(GetReturnType(operand.sig),
|
| + BUILD(CallImport, operand.index, buffer));
|
| + }
|
| + len = 1 + operand.length;
|
| + break;
|
| + }
|
| + default:
|
| + error("Invalid opcode");
|
| + return;
|
| }
|
| - case kExprDeclLocals:
|
| - default:
|
| - error("Invalid opcode");
|
| - return;
|
| + } // end complex bytecode
|
| +
|
| +#if DEBUG
|
| + if (FLAG_trace_wasm_decoder) {
|
| + for (size_t i = 0; i < stack_.size(); i++) {
|
| + Value& val = stack_[i];
|
| + PrintF(
|
| + " %c@%d:%s", WasmOpcodes::ShortNameOf(val.type),
|
| + static_cast<int>(val.pc - start_),
|
| + WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*val.pc)));
|
| + }
|
| + PrintF("\n");
|
| }
|
| +#endif
|
| pc_ += len;
|
| if (pc_ >= limit_) {
|
| // End of code reached or exceeded.
|
| - if (pc_ > limit_ && ok()) {
|
| - error("Beyond end of code");
|
| - }
|
| + if (pc_ > limit_ && ok()) error("Beyond end of code");
|
| return;
|
| }
|
| + } // end decode loop
|
| + } // end DecodeFunctionBody()
|
| +
|
| + TFNode** PopArgs(FunctionSig* sig) {
|
| + if (build()) {
|
| + int count = static_cast<int>(sig->parameter_count());
|
| + TFNode** buffer = builder_->Buffer(count + 1);
|
| + buffer[0] = nullptr; // reserved for code object or function index.
|
| + for (int i = count - 1; i >= 0; i--) {
|
| + buffer[i + 1] = Pop(i, sig->GetParam(i)).node;
|
| + }
|
| + return buffer;
|
| + } else {
|
| + int count = static_cast<int>(sig->parameter_count());
|
| + for (int i = count - 1; i >= 0; i--) {
|
| + Pop(i, sig->GetParam(i));
|
| + }
|
| + return nullptr;
|
| }
|
| }
|
|
|
| - void PushBlock(SsaEnv* ssa_env) {
|
| - blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)});
|
| + LocalType GetReturnType(FunctionSig* sig) {
|
| + return sig->return_count() == 0 ? kAstStmt : sig->GetReturn();
|
| }
|
|
|
| - int DecodeLoadMem(const byte* pc, LocalType type) {
|
| - MemoryAccessOperand operand(this, pc);
|
| - Shift(type, 1);
|
| - return 1 + operand.length;
|
| + void PushBlock(SsaEnv* merge_env, bool loop = false) {
|
| + int stack_depth = loop ? -1 : static_cast<int>(stack_.size());
|
| + int block_depth = static_cast<int>(blocks_.size());
|
| + control_.push_back(
|
| + {pc_, stack_depth, block_depth, merge_env, nullptr, kAstEnd, 0, 0});
|
| + blocks_.push_back(control_.size() - 1);
|
| }
|
|
|
| - int DecodeStoreMem(const byte* pc, LocalType type) {
|
| - MemoryAccessOperand operand(this, pc);
|
| - Shift(type, 2);
|
| + void PushIf(SsaEnv* merge_env) {
|
| + int stack_depth = static_cast<int>(stack_.size());
|
| + int block_depth = static_cast<int>(blocks_.size());
|
| + control_.push_back(
|
| + {pc_, stack_depth, block_depth, merge_env, nullptr, kAstStmt, 0, 0});
|
| + }
|
| +
|
| + int DecodeLoadMem(LocalType type, MachineType mem_type) {
|
| + MemoryAccessOperand operand(this, pc_);
|
| + Value index = Pop(0, kAstI32);
|
| + TFNode* node = BUILD(LoadMem, type, mem_type, index.node, operand.offset);
|
| + Push(type, node);
|
| return 1 + operand.length;
|
| }
|
|
|
| - void AddImplicitReturnAtEnd() {
|
| - int retcount = static_cast<int>(sig_->return_count());
|
| - if (retcount == 0) {
|
| - BUILD0(ReturnVoid);
|
| - return;
|
| - }
|
| + int DecodeStoreMem(LocalType type, MachineType mem_type) {
|
| + MemoryAccessOperand operand(this, pc_);
|
| + Value val = Pop(1, type);
|
| + Value index = Pop(0, kAstI32);
|
| + BUILD(StoreMem, mem_type, index.node, operand.offset, val.node);
|
| + Push(type, val.node);
|
| + return 1 + operand.length;
|
| + }
|
|
|
| - if (static_cast<int>(trees_.size()) < retcount) {
|
| - error(limit_, nullptr,
|
| - "ImplicitReturn expects %d arguments, only %d remain", retcount,
|
| - static_cast<int>(trees_.size()));
|
| - return;
|
| - }
|
| + void DoReturn() {
|
| + int count = static_cast<int>(sig_->return_count());
|
| + TFNode** buffer = nullptr;
|
| + if (build()) buffer = builder_->Buffer(count);
|
|
|
| - TRACE("wasm-decode implicit return of %d args\n", retcount);
|
| -
|
| - TFNode** buffer = BUILD(Buffer, retcount);
|
| - for (int index = 0; index < retcount; index++) {
|
| - Tree* tree = trees_[trees_.size() - 1 - index];
|
| - if (buffer) buffer[index] = tree->node;
|
| - LocalType expected = sig_->GetReturn(index);
|
| - if (tree->type != expected) {
|
| - error(limit_, tree->pc,
|
| - "ImplicitReturn[%d] expected type %s, found %s of type %s", index,
|
| - WasmOpcodes::TypeName(expected),
|
| - WasmOpcodes::OpcodeName(tree->opcode()),
|
| - WasmOpcodes::TypeName(tree->type));
|
| - return;
|
| - }
|
| + // Pop return values off the stack in reverse order.
|
| + for (int i = count - 1; i >= 0; i--) {
|
| + Value val = Pop(i, sig_->GetReturn(i));
|
| + if (buffer) buffer[i] = val.node;
|
| }
|
|
|
| - BUILD(Return, retcount, buffer);
|
| + Push(kAstEnd, BUILD(Return, count, buffer));
|
| + ssa_env_->Kill(SsaEnv::kControlEnd);
|
| }
|
|
|
| - int baserel(const byte* ptr) {
|
| - return base_ ? static_cast<int>(ptr - base_) : 0;
|
| + void Push(LocalType type, TFNode* node) {
|
| + stack_.push_back({pc_, node, type});
|
| }
|
|
|
| - int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); }
|
| -
|
| - void Reduce(Production* p) {
|
| - WasmOpcode opcode = p->opcode();
|
| - TRACE("-----reduce module+%-6d %s func+%d: 0x%02x %s\n", baserel(p->pc()),
|
| - indentation(), startrel(p->pc()), opcode,
|
| - WasmOpcodes::OpcodeName(opcode));
|
| - FunctionSig* sig = WasmOpcodes::Signature(opcode);
|
| - if (sig) {
|
| - // A simple expression with a fixed signature.
|
| - TypeCheckLast(p, sig->GetParam(p->index - 1));
|
| - if (p->done() && build()) {
|
| - if (sig->parameter_count() == 2) {
|
| - p->tree->node = builder_->Binop(opcode, p->tree->children[0]->node,
|
| - p->tree->children[1]->node);
|
| - } else if (sig->parameter_count() == 1) {
|
| - p->tree->node = builder_->Unop(opcode, p->tree->children[0]->node);
|
| - } else {
|
| - UNREACHABLE();
|
| - }
|
| + Value Pop(int index, LocalType expected) {
|
| + Value val = Pop();
|
| + if (val.type != expected) {
|
| + if (val.type != kAstEnd) {
|
| + error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s",
|
| + WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc_)),
|
| + index, WasmOpcodes::TypeName(expected),
|
| + WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*val.pc)),
|
| + WasmOpcodes::TypeName(val.type));
|
| }
|
| - return;
|
| }
|
| + return val;
|
| + }
|
|
|
| - switch (opcode) {
|
| - case kExprBlock: {
|
| - if (p->done()) {
|
| - Block* last = &blocks_.back();
|
| - DCHECK_EQ(stack_.size() - 1, last->stack_depth);
|
| - // fallthrough with the last expression.
|
| - ReduceBreakToExprBlock(p, last);
|
| - SetEnv("block:end", last->ssa_env);
|
| - blocks_.pop_back();
|
| - }
|
| - break;
|
| - }
|
| - case kExprLoop: {
|
| - if (p->done()) {
|
| - // Pop the continue environment.
|
| - blocks_.pop_back();
|
| - // Get the break environment.
|
| - Block* last = &blocks_.back();
|
| - DCHECK_EQ(stack_.size() - 1, last->stack_depth);
|
| - // fallthrough with the last expression.
|
| - ReduceBreakToExprBlock(p, last);
|
| - SetEnv("loop:end", last->ssa_env);
|
| - blocks_.pop_back();
|
| - }
|
| - break;
|
| - }
|
| - case kExprIf: {
|
| - if (p->index == 1) {
|
| - // Condition done. Split environment for true branch.
|
| - TypeCheckLast(p, kAstI32);
|
| - SsaEnv* false_env = ssa_env_;
|
| - SsaEnv* true_env = Split(ssa_env_);
|
| - ifs_.push_back({nullptr, false_env, nullptr});
|
| - BUILD(Branch, p->last()->node, &true_env->control,
|
| - &false_env->control);
|
| - SetEnv("if:true", true_env);
|
| - } else if (p->index == 2) {
|
| - // True block done. Merge true and false environments.
|
| - IfEnv* env = &ifs_.back();
|
| - SsaEnv* merge = env->merge_env;
|
| - if (merge->go()) {
|
| - merge->state = SsaEnv::kReached;
|
| - Goto(ssa_env_, merge);
|
| - }
|
| - SetEnv("if:merge", merge);
|
| - ifs_.pop_back();
|
| - }
|
| - break;
|
| - }
|
| - case kExprIfElse: {
|
| - if (p->index == 1) {
|
| - // Condition done. Split environment for true and false branches.
|
| - TypeCheckLast(p, kAstI32);
|
| - SsaEnv* merge_env = ssa_env_;
|
| - TFNode* if_true = nullptr;
|
| - TFNode* if_false = nullptr;
|
| - BUILD(Branch, p->last()->node, &if_true, &if_false);
|
| - SsaEnv* false_env = Split(ssa_env_);
|
| - SsaEnv* true_env = Steal(ssa_env_);
|
| - false_env->control = if_false;
|
| - true_env->control = if_true;
|
| - ifs_.push_back({false_env, merge_env, nullptr});
|
| - SetEnv("if_else:true", true_env);
|
| - } else if (p->index == 2) {
|
| - // True expr done.
|
| - IfEnv* env = &ifs_.back();
|
| - MergeIntoProduction(p, env->merge_env, p->last());
|
| - // Switch to environment for false branch.
|
| - SsaEnv* false_env = ifs_.back().false_env;
|
| - SetEnv("if_else:false", false_env);
|
| - } else if (p->index == 3) {
|
| - // False expr done.
|
| - IfEnv* env = &ifs_.back();
|
| - MergeIntoProduction(p, env->merge_env, p->last());
|
| - SetEnv("if_else:merge", env->merge_env);
|
| - ifs_.pop_back();
|
| - }
|
| - break;
|
| - }
|
| - case kExprSelect: {
|
| - if (p->index == 1) {
|
| - // True expression done.
|
| - p->tree->type = p->last()->type;
|
| - if (p->tree->type == kAstStmt) {
|
| - error(p->pc(), p->tree->children[1]->pc,
|
| - "select operand should be expression");
|
| - }
|
| - } else if (p->index == 2) {
|
| - // False expression done.
|
| - TypeCheckLast(p, p->tree->type);
|
| - } else {
|
| - // Condition done.
|
| - DCHECK(p->done());
|
| - TypeCheckLast(p, kAstI32);
|
| - if (build()) {
|
| - TFNode* controls[2];
|
| - builder_->Branch(p->tree->children[2]->node, &controls[0],
|
| - &controls[1]);
|
| - TFNode* merge = builder_->Merge(2, controls);
|
| - TFNode* vals[2] = {p->tree->children[0]->node,
|
| - p->tree->children[1]->node};
|
| - TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge);
|
| - p->tree->node = phi;
|
| - ssa_env_->control = merge;
|
| - }
|
| - }
|
| - break;
|
| - }
|
| - case kExprBr: {
|
| - BreakDepthOperand operand(this, p->pc());
|
| - CHECK(Validate(p->pc(), operand, blocks_));
|
| - ReduceBreakToExprBlock(p, operand.target);
|
| - break;
|
| - }
|
| - case kExprBrIf: {
|
| - if (p->done()) {
|
| - TypeCheckLast(p, kAstI32);
|
| - BreakDepthOperand operand(this, p->pc());
|
| - CHECK(Validate(p->pc(), operand, blocks_));
|
| - SsaEnv* fenv = ssa_env_;
|
| - SsaEnv* tenv = Split(fenv);
|
| - BUILD(Branch, p->tree->children[1]->node, &tenv->control,
|
| - &fenv->control);
|
| - ssa_env_ = tenv;
|
| - ReduceBreakToExprBlock(p, operand.target, p->tree->children[0]);
|
| - ssa_env_ = fenv;
|
| - }
|
| - break;
|
| - }
|
| - case kExprBrTable: {
|
| - if (p->index == 1) {
|
| - // Switch key finished.
|
| - TypeCheckLast(p, kAstI32);
|
| - if (failed()) break;
|
| -
|
| - BranchTableOperand operand(this, p->pc());
|
| - DCHECK(Validate(p->pc(), operand, blocks_.size()));
|
| -
|
| - // Build a switch only if it has more than just a default target.
|
| - bool build_switch = operand.table_count > 0;
|
| - TFNode* sw = nullptr;
|
| - if (build_switch) {
|
| - sw = BUILD(Switch, operand.table_count + 1, p->last()->node);
|
| - }
|
| -
|
| - // Process the targets of the break table.
|
| - SsaEnv* prev = ssa_env_;
|
| - SsaEnv* copy = Steal(prev);
|
| - for (uint32_t i = 0; i < operand.table_count + 1; i++) {
|
| - uint32_t target = operand.read_entry(this, i);
|
| - SsaEnv* env = copy;
|
| - if (build_switch) {
|
| - ssa_env_ = env = Split(env);
|
| - env->control = i == operand.table_count ? BUILD(IfDefault, sw)
|
| - : BUILD(IfValue, i, sw);
|
| - }
|
| - SsaEnv* tenv = blocks_[blocks_.size() - target - 1].ssa_env;
|
| - Goto(env, tenv);
|
| - }
|
| - ssa_env_ = prev;
|
| - }
|
| - break;
|
| - }
|
| - case kExprReturn: {
|
| - TypeCheckLast(p, sig_->GetReturn(p->index - 1));
|
| - if (p->done()) {
|
| - if (build()) {
|
| - int count = p->tree->count;
|
| - TFNode** buffer = builder_->Buffer(count);
|
| - for (int i = 0; i < count; i++) {
|
| - buffer[i] = p->tree->children[i]->node;
|
| - }
|
| - BUILD(Return, count, buffer);
|
| - }
|
| - ssa_env_->Kill(SsaEnv::kControlEnd);
|
| - }
|
| - break;
|
| - }
|
| - case kExprSetLocal: {
|
| - LocalIndexOperand operand(this, p->pc());
|
| - CHECK(Validate(p->pc(), operand));
|
| - Tree* val = p->last();
|
| - if (operand.type == val->type) {
|
| - if (build()) ssa_env_->locals[operand.index] = val->node;
|
| - p->tree->node = val->node;
|
| - } else {
|
| - error(p->pc(), val->pc, "Typecheck failed in SetLocal");
|
| - }
|
| - break;
|
| - }
|
| - case kExprStoreGlobal: {
|
| - GlobalIndexOperand operand(this, p->pc());
|
| - CHECK(Validate(p->pc(), operand));
|
| - Tree* val = p->last();
|
| - if (operand.type == val->type) {
|
| - BUILD(StoreGlobal, operand.index, val->node);
|
| - p->tree->node = val->node;
|
| - } else {
|
| - error(p->pc(), val->pc, "Typecheck failed in StoreGlobal");
|
| - }
|
| - break;
|
| - }
|
| -
|
| - case kExprI32LoadMem8S:
|
| - return ReduceLoadMem(p, kAstI32, MachineType::Int8());
|
| - case kExprI32LoadMem8U:
|
| - return ReduceLoadMem(p, kAstI32, MachineType::Uint8());
|
| - case kExprI32LoadMem16S:
|
| - return ReduceLoadMem(p, kAstI32, MachineType::Int16());
|
| - case kExprI32LoadMem16U:
|
| - return ReduceLoadMem(p, kAstI32, MachineType::Uint16());
|
| - case kExprI32LoadMem:
|
| - return ReduceLoadMem(p, kAstI32, MachineType::Int32());
|
| -
|
| - case kExprI64LoadMem8S:
|
| - return ReduceLoadMem(p, kAstI64, MachineType::Int8());
|
| - case kExprI64LoadMem8U:
|
| - return ReduceLoadMem(p, kAstI64, MachineType::Uint8());
|
| - case kExprI64LoadMem16S:
|
| - return ReduceLoadMem(p, kAstI64, MachineType::Int16());
|
| - case kExprI64LoadMem16U:
|
| - return ReduceLoadMem(p, kAstI64, MachineType::Uint16());
|
| - case kExprI64LoadMem32S:
|
| - return ReduceLoadMem(p, kAstI64, MachineType::Int32());
|
| - case kExprI64LoadMem32U:
|
| - return ReduceLoadMem(p, kAstI64, MachineType::Uint32());
|
| - case kExprI64LoadMem:
|
| - return ReduceLoadMem(p, kAstI64, MachineType::Int64());
|
| -
|
| - case kExprF32LoadMem:
|
| - return ReduceLoadMem(p, kAstF32, MachineType::Float32());
|
| -
|
| - case kExprF64LoadMem:
|
| - return ReduceLoadMem(p, kAstF64, MachineType::Float64());
|
| -
|
| - case kExprI32StoreMem8:
|
| - return ReduceStoreMem(p, kAstI32, MachineType::Int8());
|
| - case kExprI32StoreMem16:
|
| - return ReduceStoreMem(p, kAstI32, MachineType::Int16());
|
| - case kExprI32StoreMem:
|
| - return ReduceStoreMem(p, kAstI32, MachineType::Int32());
|
| -
|
| - case kExprI64StoreMem8:
|
| - return ReduceStoreMem(p, kAstI64, MachineType::Int8());
|
| - case kExprI64StoreMem16:
|
| - return ReduceStoreMem(p, kAstI64, MachineType::Int16());
|
| - case kExprI64StoreMem32:
|
| - return ReduceStoreMem(p, kAstI64, MachineType::Int32());
|
| - case kExprI64StoreMem:
|
| - return ReduceStoreMem(p, kAstI64, MachineType::Int64());
|
| -
|
| - case kExprF32StoreMem:
|
| - return ReduceStoreMem(p, kAstF32, MachineType::Float32());
|
| -
|
| - case kExprF64StoreMem:
|
| - return ReduceStoreMem(p, kAstF64, MachineType::Float64());
|
| -
|
| - case kExprGrowMemory:
|
| - TypeCheckLast(p, kAstI32);
|
| - // TODO(titzer): build node for GrowMemory
|
| - p->tree->node = BUILD(Int32Constant, 0);
|
| - return;
|
| + Value Pop() {
|
| + if (stack_.empty()) {
|
| + Value val = {pc_, nullptr, kAstStmt};
|
| + error(pc_, pc_, "%s found empty stack",
|
| + WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc_)));
|
| + return val;
|
| + }
|
| + Value val = stack_.back();
|
| + stack_.pop_back();
|
| + return val;
|
| + }
|
|
|
| - case kExprCallFunction: {
|
| - FunctionIndexOperand operand(this, p->pc());
|
| - CHECK(Validate(p->pc(), operand));
|
| - if (p->index > 0) {
|
| - TypeCheckLast(p, operand.sig->GetParam(p->index - 1));
|
| - }
|
| - if (p->done() && build()) {
|
| - uint32_t count = p->tree->count + 1;
|
| - TFNode** buffer = builder_->Buffer(count);
|
| - buffer[0] = nullptr; // reserved for code object.
|
| - for (uint32_t i = 1; i < count; i++) {
|
| - buffer[i] = p->tree->children[i - 1]->node;
|
| - }
|
| - p->tree->node = builder_->CallDirect(operand.index, buffer);
|
| - }
|
| - break;
|
| - }
|
| - case kExprCallIndirect: {
|
| - SignatureIndexOperand operand(this, p->pc());
|
| - CHECK(Validate(p->pc(), operand));
|
| - if (p->index == 1) {
|
| - TypeCheckLast(p, kAstI32);
|
| - } else {
|
| - TypeCheckLast(p, operand.sig->GetParam(p->index - 2));
|
| - }
|
| - if (p->done() && build()) {
|
| - uint32_t count = p->tree->count;
|
| - TFNode** buffer = builder_->Buffer(count);
|
| - for (uint32_t i = 0; i < count; i++) {
|
| - buffer[i] = p->tree->children[i]->node;
|
| - }
|
| - p->tree->node = builder_->CallIndirect(operand.index, buffer);
|
| - }
|
| - break;
|
| - }
|
| - case kExprCallImport: {
|
| - ImportIndexOperand operand(this, p->pc());
|
| - CHECK(Validate(p->pc(), operand));
|
| - if (p->index > 0) {
|
| - TypeCheckLast(p, operand.sig->GetParam(p->index - 1));
|
| - }
|
| - if (p->done() && build()) {
|
| - uint32_t count = p->tree->count + 1;
|
| - TFNode** buffer = builder_->Buffer(count);
|
| - buffer[0] = nullptr; // reserved for code object.
|
| - for (uint32_t i = 1; i < count; i++) {
|
| - buffer[i] = p->tree->children[i - 1]->node;
|
| - }
|
| - p->tree->node = builder_->CallImport(operand.index, buffer);
|
| - }
|
| - break;
|
| - }
|
| - default:
|
| - break;
|
| + Value PopUpTo(int stack_depth) {
|
| + if (stack_depth == stack_.size()) {
|
| + Value val = {pc_, nullptr, kAstStmt};
|
| + return val;
|
| + } else {
|
| + DCHECK_LE(stack_depth, static_cast<int>(stack_.size()));
|
| + Value val = Pop();
|
| + stack_.resize(stack_depth);
|
| + return val;
|
| }
|
| }
|
|
|
| - void ReduceBreakToExprBlock(Production* p, Block* block) {
|
| - ReduceBreakToExprBlock(p, block, p->tree->count > 0 ? p->last() : nullptr);
|
| + int baserel(const byte* ptr) {
|
| + return base_ ? static_cast<int>(ptr - base_) : 0;
|
| }
|
|
|
| - void ReduceBreakToExprBlock(Production* p, Block* block, Tree* val) {
|
| - if (block->stack_depth < 0) {
|
| + int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); }
|
| +
|
| + void BreakTo(Control* block, Value& val) {
|
| + DCHECK(!block->is_if());
|
| + if (block->is_loop()) {
|
| // This is the inner loop block, which does not have a value.
|
| Goto(ssa_env_, block->ssa_env);
|
| } else {
|
| // Merge the value into the production for the block.
|
| - Production* bp = &stack_[block->stack_depth];
|
| - MergeIntoProduction(bp, block->ssa_env, val);
|
| + MergeInto(block->ssa_env, &block->node, &block->type, val);
|
| }
|
| }
|
|
|
| - void MergeIntoProduction(Production* p, SsaEnv* target, Tree* expr) {
|
| + void MergeInto(SsaEnv* target, TFNode** node, LocalType* type, Value& val) {
|
| if (!ssa_env_->go()) return;
|
| + DCHECK_NE(kAstEnd, val.type);
|
|
|
| bool first = target->state == SsaEnv::kUnreachable;
|
| Goto(ssa_env_, target);
|
| - if (expr == nullptr || expr->type == kAstEnd) return;
|
|
|
| if (first) {
|
| // first merge to this environment; set the type and the node.
|
| - p->tree->type = expr->type;
|
| - p->tree->node = expr->node;
|
| - } else {
|
| + *type = val.type;
|
| + *node = val.node;
|
| + } else if (val.type == *type && val.type != kAstStmt) {
|
| // merge with the existing value for this block.
|
| - LocalType type = p->tree->type;
|
| - if (expr->type != type) {
|
| - type = kAstStmt;
|
| - p->tree->type = kAstStmt;
|
| - p->tree->node = nullptr;
|
| - } else if (type != kAstStmt) {
|
| - p->tree->node = CreateOrMergeIntoPhi(type, target->control,
|
| - p->tree->node, expr->node);
|
| - }
|
| - }
|
| - }
|
| -
|
| - void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) {
|
| - DCHECK_EQ(1, p->index);
|
| - TypeCheckLast(p, kAstI32); // index
|
| - if (build()) {
|
| - MemoryAccessOperand operand(this, p->pc());
|
| - p->tree->node =
|
| - builder_->LoadMem(type, mem_type, p->last()->node, operand.offset);
|
| - }
|
| - }
|
| -
|
| - void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) {
|
| - if (p->index == 1) {
|
| - TypeCheckLast(p, kAstI32); // index
|
| + *node = CreateOrMergeIntoPhi(*type, target->control, *node, val.node);
|
| } else {
|
| - DCHECK_EQ(2, p->index);
|
| - TypeCheckLast(p, type);
|
| - if (build()) {
|
| - MemoryAccessOperand operand(this, p->pc());
|
| - TFNode* val = p->tree->children[1]->node;
|
| - builder_->StoreMem(mem_type, p->tree->children[0]->node, operand.offset,
|
| - val);
|
| - p->tree->node = val;
|
| - }
|
| - }
|
| - }
|
| -
|
| - void TypeCheckLast(Production* p, LocalType expected) {
|
| - LocalType result = p->last()->type;
|
| - if (result == expected) return;
|
| - if (result == kAstEnd) return;
|
| - if (expected != kAstStmt) {
|
| - error(p->pc(), p->last()->pc,
|
| - "%s[%d] expected type %s, found %s of type %s",
|
| - WasmOpcodes::OpcodeName(p->opcode()), p->index - 1,
|
| - WasmOpcodes::TypeName(expected),
|
| - WasmOpcodes::OpcodeName(p->last()->opcode()),
|
| - WasmOpcodes::TypeName(p->last()->type));
|
| + // types don't match, or block is already a stmt.
|
| + *type = kAstStmt;
|
| + *node = nullptr;
|
| }
|
| }
|
|
|
| void SetEnv(const char* reason, SsaEnv* env) {
|
| #if DEBUG
|
| - TRACE(" env = %p, block depth = %d, reason = %s", static_cast<void*>(env),
|
| - static_cast<int>(blocks_.size()), reason);
|
| - if (FLAG_trace_wasm_decoder && env && env->control) {
|
| - TRACE(", control = ");
|
| - compiler::WasmGraphBuilder::PrintDebugName(env->control);
|
| + if (FLAG_trace_wasm_decoder) {
|
| + char state = 'X';
|
| + if (env) {
|
| + switch (env->state) {
|
| + case SsaEnv::kReached:
|
| + state = 'R';
|
| + break;
|
| + case SsaEnv::kUnreachable:
|
| + state = 'U';
|
| + break;
|
| + case SsaEnv::kMerged:
|
| + state = 'M';
|
| + break;
|
| + case SsaEnv::kControlEnd:
|
| + state = 'E';
|
| + break;
|
| + }
|
| + }
|
| + PrintF(" env = %p, state = %c, reason = %s", static_cast<void*>(env),
|
| + state, reason);
|
| + if (env && env->control) {
|
| + PrintF(", control = ");
|
| + compiler::WasmGraphBuilder::PrintDebugName(env->control);
|
| + }
|
| + PrintF("\n");
|
| }
|
| - TRACE("\n");
|
| #endif
|
| ssa_env_ = env;
|
| if (builder_) {
|
| @@ -1493,8 +1307,6 @@ class SR_WasmDecoder : public WasmDecoder {
|
| size_t size = sizeof(TFNode*) * EnvironmentCount();
|
| result->control = from->control;
|
| result->effect = from->effect;
|
| - result->state = from->state == SsaEnv::kUnreachable ? SsaEnv::kUnreachable
|
| - : SsaEnv::kReached;
|
|
|
| if (from->go()) {
|
| result->state = SsaEnv::kReached;
|
| @@ -1541,86 +1353,45 @@ class SR_WasmDecoder : public WasmDecoder {
|
| virtual void onFirstError() {
|
| limit_ = start_; // Terminate decoding loop.
|
| builder_ = nullptr; // Don't build any more nodes.
|
| -#if DEBUG
|
| - PrintStackForDebugging();
|
| -#endif
|
| + TRACE(" !%s\n", error_msg_.get());
|
| }
|
| -
|
| -#if DEBUG
|
| - void PrintStackForDebugging() { PrintProduction(0); }
|
| -
|
| - void PrintProduction(size_t depth) {
|
| - if (depth >= stack_.size()) return;
|
| - Production* p = &stack_[depth];
|
| - for (size_t d = 0; d < depth; d++) PrintF(" ");
|
| -
|
| - PrintF("@%d %s [%d]\n", static_cast<int>(p->tree->pc - start_),
|
| - WasmOpcodes::OpcodeName(p->opcode()), p->tree->count);
|
| - for (int i = 0; i < p->index; i++) {
|
| - Tree* child = p->tree->children[i];
|
| - for (size_t d = 0; d <= depth; d++) PrintF(" ");
|
| - PrintF("@%d %s [%d]", static_cast<int>(child->pc - start_),
|
| - WasmOpcodes::OpcodeName(child->opcode()), child->count);
|
| - if (child->node) {
|
| - PrintF(" => TF");
|
| - compiler::WasmGraphBuilder::PrintDebugName(child->node);
|
| - }
|
| - PrintF("\n");
|
| - }
|
| - PrintProduction(depth + 1);
|
| - }
|
| -#endif
|
| -
|
| BitVector* AnalyzeLoopAssignment(const byte* pc) {
|
| if (pc >= limit_) return nullptr;
|
| if (*pc != kExprLoop) return nullptr;
|
|
|
| BitVector* assigned =
|
| - new (zone_) BitVector(static_cast<int>(total_locals_), zone_);
|
| - // Keep a stack to model the nesting of expressions.
|
| - std::vector<int> arity_stack;
|
| - arity_stack.push_back(OpcodeArity(pc));
|
| - pc += OpcodeLength(pc);
|
| -
|
| + new (zone_) BitVector(static_cast<int>(local_type_vec_.size()), zone_);
|
| + int depth = 0;
|
| // Iteratively process all AST nodes nested inside the loop.
|
| - while (pc < limit_) {
|
| - WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
|
| - int arity = 0;
|
| + while (pc_ < limit_) {
|
| + WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
|
| int length = 1;
|
| - int assigned_index = -1;
|
| - if (opcode == kExprSetLocal) {
|
| - LocalIndexOperand operand(this, pc);
|
| - if (assigned->length() > 0 &&
|
| - static_cast<int>(operand.index) < assigned->length()) {
|
| - // Unverified code might have an out-of-bounds index.
|
| - // Ignore out-of-bounds indices, as the main verification will fail.
|
| - assigned->Add(operand.index);
|
| - assigned_index = operand.index;
|
| + switch (opcode) {
|
| + case kExprLoop:
|
| + case kExprIf:
|
| + case kExprBlock:
|
| + depth++;
|
| + DCHECK_EQ(1, OpcodeLength(pc_));
|
| + break;
|
| + case kExprSetLocal: {
|
| + LocalIndexOperand operand(this, pc_);
|
| + if (assigned->length() > 0 &&
|
| + static_cast<int>(operand.index) < assigned->length()) {
|
| + // Unverified code might have an out-of-bounds index.
|
| + assigned->Add(operand.index);
|
| + }
|
| + length = 1 + operand.length;
|
| + break;
|
| }
|
| - arity = 1;
|
| - length = 1 + operand.length;
|
| - } else {
|
| - arity = OpcodeArity(pc);
|
| - length = OpcodeLength(pc);
|
| - }
|
| -
|
| - TRACE("loop-assign module+%-6d %s func+%d: 0x%02x %s", baserel(pc),
|
| - indentation(), startrel(pc), opcode,
|
| - WasmOpcodes::OpcodeName(opcode));
|
| -
|
| - if (assigned_index >= 0) {
|
| - TRACE(" (assigned local #%d)\n", assigned_index);
|
| - } else {
|
| - TRACE("\n");
|
| - }
|
| -
|
| - pc += length;
|
| - arity_stack.push_back(arity);
|
| - while (arity_stack.back() == 0) {
|
| - arity_stack.pop_back();
|
| - if (arity_stack.empty()) return assigned; // reached end of loop
|
| - arity_stack.back()--;
|
| + case kExprEnd:
|
| + depth--;
|
| + break;
|
| + default:
|
| + length = OpcodeLength(pc_);
|
| + break;
|
| }
|
| + if (depth <= 0) break;
|
| + pc_ += length;
|
| }
|
| return assigned;
|
| }
|
| @@ -1637,15 +1408,15 @@ std::vector<LocalType>* DecodeLocalDeclsForTesting(const byte* start,
|
| TreeResult VerifyWasmCode(FunctionBody& body) {
|
| Zone zone;
|
| SR_WasmDecoder decoder(&zone, nullptr, body);
|
| - TreeResult result = decoder.Decode();
|
| - return result;
|
| + decoder.Decode();
|
| + return decoder.toResult<Tree*>(nullptr);
|
| }
|
|
|
| TreeResult BuildTFGraph(TFBuilder* builder, FunctionBody& body) {
|
| Zone zone;
|
| SR_WasmDecoder decoder(&zone, builder, body);
|
| - TreeResult result = decoder.Decode();
|
| - return result;
|
| + decoder.Decode();
|
| + return decoder.toResult<Tree*>(nullptr);
|
| }
|
|
|
|
|
|
|