| OLD | NEW |
| (Empty) |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "src/signature.h" | |
| 6 | |
| 7 #include "src/bit-vector.h" | |
| 8 #include "src/flags.h" | |
| 9 #include "src/handles.h" | |
| 10 #include "src/zone/zone-containers.h" | |
| 11 | |
| 12 #include "src/wasm/ast-decoder.h" | |
| 13 #include "src/wasm/decoder.h" | |
| 14 #include "src/wasm/wasm-module.h" | |
| 15 #include "src/wasm/wasm-opcodes.h" | |
| 16 | |
| 17 #include "src/ostreams.h" | |
| 18 | |
| 19 #include "src/compiler/wasm-compiler.h" | |
| 20 | |
| 21 namespace v8 { | |
| 22 namespace internal { | |
| 23 namespace wasm { | |
| 24 | |
| 25 #if DEBUG | |
| 26 #define TRACE(...) \ | |
| 27 do { \ | |
| 28 if (FLAG_trace_wasm_decoder) PrintF(__VA_ARGS__); \ | |
| 29 } while (false) | |
| 30 #else | |
| 31 #define TRACE(...) | |
| 32 #endif | |
| 33 | |
| 34 #define CHECK_PROTOTYPE_OPCODE(flag) \ | |
| 35 if (module_ && module_->module->origin == kAsmJsOrigin) { \ | |
| 36 error("Opcode not supported for asmjs modules"); \ | |
| 37 } \ | |
| 38 if (!FLAG_##flag) { \ | |
| 39 error("Invalid opcode (enable with --" #flag ")"); \ | |
| 40 break; \ | |
| 41 } | |
| 42 // TODO(titzer): this is only for intermediate migration. | |
| 43 #define IMPLICIT_FUNCTION_END 1 | |
| 44 | |
| 45 // An SsaEnv environment carries the current local variable renaming | |
| 46 // as well as the current effect and control dependency in the TF graph. | |
| 47 // It maintains a control state that tracks whether the environment | |
| 48 // is reachable, has reached a control end, or has been merged. | |
| 49 struct SsaEnv { | |
| 50 enum State { kControlEnd, kUnreachable, kReached, kMerged }; | |
| 51 | |
| 52 State state; | |
| 53 TFNode* control; | |
| 54 TFNode* effect; | |
| 55 TFNode** locals; | |
| 56 | |
| 57 bool go() { return state >= kReached; } | |
| 58 void Kill(State new_state = kControlEnd) { | |
| 59 state = new_state; | |
| 60 locals = nullptr; | |
| 61 control = nullptr; | |
| 62 effect = nullptr; | |
| 63 } | |
| 64 void SetNotMerged() { | |
| 65 if (state == kMerged) state = kReached; | |
| 66 } | |
| 67 }; | |
| 68 | |
| 69 // An entry on the value stack. | |
| 70 struct Value { | |
| 71 const byte* pc; | |
| 72 TFNode* node; | |
| 73 LocalType type; | |
| 74 }; | |
| 75 | |
| 76 struct TryInfo : public ZoneObject { | |
| 77 SsaEnv* catch_env; | |
| 78 TFNode* exception; | |
| 79 | |
| 80 explicit TryInfo(SsaEnv* c) : catch_env(c), exception(nullptr) {} | |
| 81 }; | |
| 82 | |
| 83 struct MergeValues { | |
| 84 uint32_t arity; | |
| 85 union { | |
| 86 Value* array; | |
| 87 Value first; | |
| 88 } vals; // Either multiple values or a single value. | |
| 89 | |
| 90 Value& first() { | |
| 91 DCHECK_GT(arity, 0); | |
| 92 return arity == 1 ? vals.first : vals.array[0]; | |
| 93 } | |
| 94 }; | |
| 95 | |
| 96 static Value* NO_VALUE = nullptr; | |
| 97 | |
| 98 enum ControlKind { kControlIf, kControlBlock, kControlLoop, kControlTry }; | |
| 99 | |
| 100 // An entry on the control stack (i.e. if, block, loop). | |
| 101 struct Control { | |
| 102 const byte* pc; | |
| 103 ControlKind kind; | |
| 104 int stack_depth; // stack height at the beginning of the construct. | |
| 105 SsaEnv* end_env; // end environment for the construct. | |
| 106 SsaEnv* false_env; // false environment (only for if). | |
| 107 TryInfo* try_info; // Information used for compiling try statements. | |
| 108 int32_t previous_catch; // The previous Control (on the stack) with a catch. | |
| 109 | |
| 110 // Values merged into the end of this control construct. | |
| 111 MergeValues merge; | |
| 112 | |
| 113 inline bool is_if() const { return kind == kControlIf; } | |
| 114 inline bool is_block() const { return kind == kControlBlock; } | |
| 115 inline bool is_loop() const { return kind == kControlLoop; } | |
| 116 inline bool is_try() const { return kind == kControlTry; } | |
| 117 | |
| 118 // Named constructors. | |
| 119 static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env, | |
| 120 int32_t previous_catch) { | |
| 121 return {pc, kControlBlock, stack_depth, end_env, | |
| 122 nullptr, nullptr, previous_catch, {0, {NO_VALUE}}}; | |
| 123 } | |
| 124 | |
| 125 static Control If(const byte* pc, int stack_depth, SsaEnv* end_env, | |
| 126 SsaEnv* false_env, int32_t previous_catch) { | |
| 127 return {pc, kControlIf, stack_depth, end_env, | |
| 128 false_env, nullptr, previous_catch, {0, {NO_VALUE}}}; | |
| 129 } | |
| 130 | |
| 131 static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env, | |
| 132 int32_t previous_catch) { | |
| 133 return {pc, kControlLoop, stack_depth, end_env, | |
| 134 nullptr, nullptr, previous_catch, {0, {NO_VALUE}}}; | |
| 135 } | |
| 136 | |
| 137 static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env, | |
| 138 Zone* zone, SsaEnv* catch_env, int32_t previous_catch) { | |
| 139 DCHECK_NOT_NULL(catch_env); | |
| 140 TryInfo* try_info = new (zone) TryInfo(catch_env); | |
| 141 return {pc, kControlTry, stack_depth, end_env, | |
| 142 nullptr, try_info, previous_catch, {0, {NO_VALUE}}}; | |
| 143 } | |
| 144 }; | |
| 145 | |
| 146 // Macros that build nodes only if there is a graph and the current SSA | |
| 147 // environment is reachable from start. This avoids problems with malformed | |
| 148 // TF graphs when decoding inputs that have unreachable code. | |
| 149 #define BUILD(func, ...) \ | |
| 150 (build() ? CheckForException(builder_->func(__VA_ARGS__)) : nullptr) | |
| 151 #define BUILD0(func) (build() ? CheckForException(builder_->func()) : nullptr) | |
| 152 | |
| 153 struct LaneOperand { | |
| 154 uint8_t lane; | |
| 155 unsigned length; | |
| 156 | |
| 157 inline LaneOperand(Decoder* decoder, const byte* pc) { | |
| 158 lane = decoder->checked_read_u8(pc, 2, "lane"); | |
| 159 length = 1; | |
| 160 } | |
| 161 }; | |
| 162 | |
| 163 // Generic Wasm bytecode decoder with utilities for decoding operands, | |
| 164 // lengths, etc. | |
| 165 class WasmDecoder : public Decoder { | |
| 166 public: | |
| 167 WasmDecoder(ModuleEnv* module, FunctionSig* sig, const byte* start, | |
| 168 const byte* end) | |
| 169 : Decoder(start, end), | |
| 170 module_(module), | |
| 171 sig_(sig), | |
| 172 total_locals_(0), | |
| 173 local_types_(nullptr) {} | |
| 174 ModuleEnv* module_; | |
| 175 FunctionSig* sig_; | |
| 176 size_t total_locals_; | |
| 177 ZoneVector<LocalType>* local_types_; | |
| 178 | |
| 179 inline bool Validate(const byte* pc, LocalIndexOperand& operand) { | |
| 180 if (operand.index < total_locals_) { | |
| 181 if (local_types_) { | |
| 182 operand.type = local_types_->at(operand.index); | |
| 183 } else { | |
| 184 operand.type = kAstStmt; | |
| 185 } | |
| 186 return true; | |
| 187 } | |
| 188 error(pc, pc + 1, "invalid local index: %u", operand.index); | |
| 189 return false; | |
| 190 } | |
| 191 | |
| 192 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) { | |
| 193 ModuleEnv* m = module_; | |
| 194 if (m && m->module && operand.index < m->module->globals.size()) { | |
| 195 operand.global = &m->module->globals[operand.index]; | |
| 196 operand.type = operand.global->type; | |
| 197 return true; | |
| 198 } | |
| 199 error(pc, pc + 1, "invalid global index: %u", operand.index); | |
| 200 return false; | |
| 201 } | |
| 202 | |
| 203 inline bool Complete(const byte* pc, CallFunctionOperand& operand) { | |
| 204 ModuleEnv* m = module_; | |
| 205 if (m && m->module && operand.index < m->module->functions.size()) { | |
| 206 operand.sig = m->module->functions[operand.index].sig; | |
| 207 return true; | |
| 208 } | |
| 209 return false; | |
| 210 } | |
| 211 | |
| 212 inline bool Validate(const byte* pc, CallFunctionOperand& operand) { | |
| 213 if (Complete(pc, operand)) { | |
| 214 return true; | |
| 215 } | |
| 216 error(pc, pc + 1, "invalid function index: %u", operand.index); | |
| 217 return false; | |
| 218 } | |
| 219 | |
| 220 inline bool Complete(const byte* pc, CallIndirectOperand& operand) { | |
| 221 ModuleEnv* m = module_; | |
| 222 if (m && m->module && operand.index < m->module->signatures.size()) { | |
| 223 operand.sig = m->module->signatures[operand.index]; | |
| 224 return true; | |
| 225 } | |
| 226 return false; | |
| 227 } | |
| 228 | |
| 229 inline bool Validate(const byte* pc, CallIndirectOperand& operand) { | |
| 230 uint32_t table_index = 0; | |
| 231 if (!module_->IsValidTable(table_index)) { | |
| 232 error("function table has to exist to execute call_indirect"); | |
| 233 return false; | |
| 234 } | |
| 235 if (Complete(pc, operand)) { | |
| 236 return true; | |
| 237 } | |
| 238 error(pc, pc + 1, "invalid signature index: #%u", operand.index); | |
| 239 return false; | |
| 240 } | |
| 241 | |
| 242 inline bool Validate(const byte* pc, BreakDepthOperand& operand, | |
| 243 ZoneVector<Control>& control) { | |
| 244 if (operand.depth < control.size()) { | |
| 245 operand.target = &control[control.size() - operand.depth - 1]; | |
| 246 return true; | |
| 247 } | |
| 248 error(pc, pc + 1, "invalid break depth: %u", operand.depth); | |
| 249 return false; | |
| 250 } | |
| 251 | |
| 252 bool Validate(const byte* pc, BranchTableOperand& operand, | |
| 253 size_t block_depth) { | |
| 254 // TODO(titzer): add extra redundant validation for br_table here? | |
| 255 return true; | |
| 256 } | |
| 257 | |
| 258 inline bool Validate(const byte* pc, LaneOperand& operand) { | |
| 259 if (operand.lane < 0 || operand.lane > 3) { | |
| 260 error(pc_, pc_ + 2, "invalid extract lane value"); | |
| 261 return false; | |
| 262 } else { | |
| 263 return true; | |
| 264 } | |
| 265 } | |
| 266 | |
| 267 unsigned OpcodeLength(const byte* pc) { | |
| 268 switch (static_cast<byte>(*pc)) { | |
| 269 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: | |
| 270 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
| 271 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
| 272 #undef DECLARE_OPCODE_CASE | |
| 273 { | |
| 274 MemoryAccessOperand operand(this, pc, UINT32_MAX); | |
| 275 return 1 + operand.length; | |
| 276 } | |
| 277 case kExprBr: | |
| 278 case kExprBrIf: { | |
| 279 BreakDepthOperand operand(this, pc); | |
| 280 return 1 + operand.length; | |
| 281 } | |
| 282 case kExprSetGlobal: | |
| 283 case kExprGetGlobal: { | |
| 284 GlobalIndexOperand operand(this, pc); | |
| 285 return 1 + operand.length; | |
| 286 } | |
| 287 | |
| 288 case kExprCallFunction: { | |
| 289 CallFunctionOperand operand(this, pc); | |
| 290 return 1 + operand.length; | |
| 291 } | |
| 292 case kExprCallIndirect: { | |
| 293 CallIndirectOperand operand(this, pc); | |
| 294 return 1 + operand.length; | |
| 295 } | |
| 296 | |
| 297 case kExprTry: | |
| 298 case kExprIf: // fall thru | |
| 299 case kExprLoop: | |
| 300 case kExprBlock: { | |
| 301 BlockTypeOperand operand(this, pc); | |
| 302 return 1 + operand.length; | |
| 303 } | |
| 304 | |
| 305 case kExprSetLocal: | |
| 306 case kExprTeeLocal: | |
| 307 case kExprGetLocal: | |
| 308 case kExprCatch: { | |
| 309 LocalIndexOperand operand(this, pc); | |
| 310 return 1 + operand.length; | |
| 311 } | |
| 312 case kExprBrTable: { | |
| 313 BranchTableOperand operand(this, pc); | |
| 314 BranchTableIterator iterator(this, operand); | |
| 315 return 1 + iterator.length(); | |
| 316 } | |
| 317 case kExprI32Const: { | |
| 318 ImmI32Operand operand(this, pc); | |
| 319 return 1 + operand.length; | |
| 320 } | |
| 321 case kExprI64Const: { | |
| 322 ImmI64Operand operand(this, pc); | |
| 323 return 1 + operand.length; | |
| 324 } | |
| 325 case kExprGrowMemory: | |
| 326 case kExprMemorySize: { | |
| 327 MemoryIndexOperand operand(this, pc); | |
| 328 return 1 + operand.length; | |
| 329 } | |
| 330 case kExprI8Const: | |
| 331 return 2; | |
| 332 case kExprF32Const: | |
| 333 return 5; | |
| 334 case kExprF64Const: | |
| 335 return 9; | |
| 336 case kSimdPrefix: { | |
| 337 byte simd_index = checked_read_u8(pc, 1, "simd_index"); | |
| 338 WasmOpcode opcode = | |
| 339 static_cast<WasmOpcode>(kSimdPrefix << 8 | simd_index); | |
| 340 switch (opcode) { | |
| 341 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: | |
| 342 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE) | |
| 343 #undef DECLARE_OPCODE_CASE | |
| 344 { | |
| 345 return 2; | |
| 346 } | |
| 347 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: | |
| 348 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE) | |
| 349 #undef DECLARE_OPCODE_CASE | |
| 350 { | |
| 351 return 3; | |
| 352 } | |
| 353 default: | |
| 354 error("invalid SIMD opcode"); | |
| 355 return 2; | |
| 356 } | |
| 357 } | |
| 358 default: | |
| 359 return 1; | |
| 360 } | |
| 361 } | |
| 362 }; | |
| 363 | |
| 364 static const int32_t kNullCatch = -1; | |
| 365 | |
| 366 // The full WASM decoder for bytecode. Both verifies bytecode and generates | |
| 367 // a TurboFan IR graph. | |
| 368 class WasmFullDecoder : public WasmDecoder { | |
| 369 public: | |
| 370 WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body) | |
| 371 : WasmDecoder(body.module, body.sig, body.start, body.end), | |
| 372 zone_(zone), | |
| 373 builder_(builder), | |
| 374 base_(body.base), | |
| 375 local_type_vec_(zone), | |
| 376 stack_(zone), | |
| 377 control_(zone), | |
| 378 last_end_found_(false), | |
| 379 current_catch_(kNullCatch) { | |
| 380 local_types_ = &local_type_vec_; | |
| 381 } | |
| 382 | |
| 383 bool Decode() { | |
| 384 if (FLAG_wasm_code_fuzzer_gen_test) { | |
| 385 PrintAstForDebugging(start_, end_); | |
| 386 } | |
| 387 base::ElapsedTimer decode_timer; | |
| 388 if (FLAG_trace_wasm_decode_time) { | |
| 389 decode_timer.Start(); | |
| 390 } | |
| 391 stack_.clear(); | |
| 392 control_.clear(); | |
| 393 | |
| 394 if (end_ < pc_) { | |
| 395 error("function body end < start"); | |
| 396 return false; | |
| 397 } | |
| 398 | |
| 399 DecodeLocalDecls(); | |
| 400 InitSsaEnv(); | |
| 401 DecodeFunctionBody(); | |
| 402 | |
| 403 if (failed()) return TraceFailed(); | |
| 404 | |
| 405 #if IMPLICIT_FUNCTION_END | |
| 406 // With implicit end support (old style), the function block | |
| 407 // remains on the stack. Other control blocks are an error. | |
| 408 if (control_.size() > 1) { | |
| 409 error(pc_, control_.back().pc, "unterminated control structure"); | |
| 410 return TraceFailed(); | |
| 411 } | |
| 412 | |
| 413 // Assume an implicit end to the function body block. | |
| 414 if (control_.size() == 1) { | |
| 415 Control* c = &control_.back(); | |
| 416 if (ssa_env_->go()) { | |
| 417 FallThruTo(c); | |
| 418 } | |
| 419 | |
| 420 if (c->end_env->go()) { | |
| 421 // Push the end values onto the stack. | |
| 422 stack_.resize(c->stack_depth); | |
| 423 if (c->merge.arity == 1) { | |
| 424 stack_.push_back(c->merge.vals.first); | |
| 425 } else { | |
| 426 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 427 stack_.push_back(c->merge.vals.array[i]); | |
| 428 } | |
| 429 } | |
| 430 | |
| 431 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn"); | |
| 432 SetEnv("function:end", c->end_env); | |
| 433 DoReturn(); | |
| 434 TRACE("\n"); | |
| 435 } | |
| 436 } | |
| 437 #else | |
| 438 if (!control_.empty()) { | |
| 439 error(pc_, control_.back().pc, "unterminated control structure"); | |
| 440 return TraceFailed(); | |
| 441 } | |
| 442 | |
| 443 if (!last_end_found_) { | |
| 444 error("function body must end with \"end\" opcode."); | |
| 445 return false; | |
| 446 } | |
| 447 #endif | |
| 448 | |
| 449 if (FLAG_trace_wasm_decode_time) { | |
| 450 double ms = decode_timer.Elapsed().InMillisecondsF(); | |
| 451 PrintF("wasm-decode %s (%0.3f ms)\n\n", ok() ? "ok" : "failed", ms); | |
| 452 } else { | |
| 453 TRACE("wasm-decode %s\n\n", ok() ? "ok" : "failed"); | |
| 454 } | |
| 455 | |
| 456 return true; | |
| 457 } | |
| 458 | |
| 459 bool TraceFailed() { | |
| 460 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_), | |
| 461 startrel(error_pc_), error_msg_.get()); | |
| 462 return false; | |
| 463 } | |
| 464 | |
| 465 bool DecodeLocalDecls(AstLocalDecls& decls) { | |
| 466 DecodeLocalDecls(); | |
| 467 if (failed()) return false; | |
| 468 decls.decls_encoded_size = pc_offset(); | |
| 469 decls.local_types.reserve(local_type_vec_.size()); | |
| 470 for (size_t pos = 0; pos < local_type_vec_.size();) { | |
| 471 uint32_t count = 0; | |
| 472 LocalType type = local_type_vec_[pos]; | |
| 473 while (pos < local_type_vec_.size() && local_type_vec_[pos] == type) { | |
| 474 pos++; | |
| 475 count++; | |
| 476 } | |
| 477 decls.local_types.push_back(std::pair<LocalType, uint32_t>(type, count)); | |
| 478 } | |
| 479 decls.total_local_count = static_cast<uint32_t>(local_type_vec_.size()); | |
| 480 return true; | |
| 481 } | |
| 482 | |
| 483 BitVector* AnalyzeLoopAssignmentForTesting(const byte* pc, | |
| 484 size_t num_locals) { | |
| 485 total_locals_ = num_locals; | |
| 486 local_type_vec_.reserve(num_locals); | |
| 487 if (num_locals > local_type_vec_.size()) { | |
| 488 local_type_vec_.insert(local_type_vec_.end(), | |
| 489 num_locals - local_type_vec_.size(), kAstI32); | |
| 490 } | |
| 491 return AnalyzeLoopAssignment(pc); | |
| 492 } | |
| 493 | |
| 494 private: | |
| 495 static const size_t kErrorMsgSize = 128; | |
| 496 | |
| 497 Zone* zone_; | |
| 498 TFBuilder* builder_; | |
| 499 const byte* base_; | |
| 500 | |
| 501 SsaEnv* ssa_env_; | |
| 502 | |
| 503 ZoneVector<LocalType> local_type_vec_; // types of local variables. | |
| 504 ZoneVector<Value> stack_; // stack of values. | |
| 505 ZoneVector<Control> control_; // stack of blocks, loops, and ifs. | |
| 506 bool last_end_found_; | |
| 507 | |
| 508 int32_t current_catch_; | |
| 509 | |
| 510 TryInfo* current_try_info() { return control_[current_catch_].try_info; } | |
| 511 | |
| 512 inline bool build() { return builder_ && ssa_env_->go(); } | |
| 513 | |
| 514 void InitSsaEnv() { | |
| 515 TFNode* start = nullptr; | |
| 516 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); | |
| 517 size_t size = sizeof(TFNode*) * EnvironmentCount(); | |
| 518 ssa_env->state = SsaEnv::kReached; | |
| 519 ssa_env->locals = | |
| 520 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; | |
| 521 | |
| 522 if (builder_) { | |
| 523 start = builder_->Start(static_cast<int>(sig_->parameter_count() + 1)); | |
| 524 // Initialize local variables. | |
| 525 uint32_t index = 0; | |
| 526 while (index < sig_->parameter_count()) { | |
| 527 ssa_env->locals[index] = builder_->Param(index, local_type_vec_[index]); | |
| 528 index++; | |
| 529 } | |
| 530 while (index < local_type_vec_.size()) { | |
| 531 LocalType type = local_type_vec_[index]; | |
| 532 TFNode* node = DefaultValue(type); | |
| 533 while (index < local_type_vec_.size() && | |
| 534 local_type_vec_[index] == type) { | |
| 535 // Do a whole run of like-typed locals at a time. | |
| 536 ssa_env->locals[index++] = node; | |
| 537 } | |
| 538 } | |
| 539 builder_->set_module(module_); | |
| 540 } | |
| 541 ssa_env->control = start; | |
| 542 ssa_env->effect = start; | |
| 543 SetEnv("initial", ssa_env); | |
| 544 if (builder_) { | |
| 545 builder_->StackCheck(position()); | |
| 546 } | |
| 547 } | |
| 548 | |
| 549 TFNode* DefaultValue(LocalType type) { | |
| 550 switch (type) { | |
| 551 case kAstI32: | |
| 552 return builder_->Int32Constant(0); | |
| 553 case kAstI64: | |
| 554 return builder_->Int64Constant(0); | |
| 555 case kAstF32: | |
| 556 return builder_->Float32Constant(0); | |
| 557 case kAstF64: | |
| 558 return builder_->Float64Constant(0); | |
| 559 case kAstS128: | |
| 560 return builder_->CreateS128Value(0); | |
| 561 default: | |
| 562 UNREACHABLE(); | |
| 563 return nullptr; | |
| 564 } | |
| 565 } | |
| 566 | |
| 567 char* indentation() { | |
| 568 static const int kMaxIndent = 64; | |
| 569 static char bytes[kMaxIndent + 1]; | |
| 570 for (int i = 0; i < kMaxIndent; ++i) bytes[i] = ' '; | |
| 571 bytes[kMaxIndent] = 0; | |
| 572 if (stack_.size() < kMaxIndent / 2) { | |
| 573 bytes[stack_.size() * 2] = 0; | |
| 574 } | |
| 575 return bytes; | |
| 576 } | |
| 577 | |
| 578 // Decodes the locals declarations, if any, populating {local_type_vec_}. | |
| 579 void DecodeLocalDecls() { | |
| 580 DCHECK_EQ(0, local_type_vec_.size()); | |
| 581 // Initialize {local_type_vec} from signature. | |
| 582 if (sig_) { | |
| 583 local_type_vec_.reserve(sig_->parameter_count()); | |
| 584 for (size_t i = 0; i < sig_->parameter_count(); ++i) { | |
| 585 local_type_vec_.push_back(sig_->GetParam(i)); | |
| 586 } | |
| 587 } | |
| 588 // Decode local declarations, if any. | |
| 589 uint32_t entries = consume_u32v("local decls count"); | |
| 590 TRACE("local decls count: %u\n", entries); | |
| 591 while (entries-- > 0 && pc_ < limit_) { | |
| 592 uint32_t count = consume_u32v("local count"); | |
| 593 if ((count + local_type_vec_.size()) > kMaxNumWasmLocals) { | |
| 594 error(pc_ - 1, "local count too large"); | |
| 595 return; | |
| 596 } | |
| 597 byte code = consume_u8("local type"); | |
| 598 LocalType type; | |
| 599 switch (code) { | |
| 600 case kLocalI32: | |
| 601 type = kAstI32; | |
| 602 break; | |
| 603 case kLocalI64: | |
| 604 type = kAstI64; | |
| 605 break; | |
| 606 case kLocalF32: | |
| 607 type = kAstF32; | |
| 608 break; | |
| 609 case kLocalF64: | |
| 610 type = kAstF64; | |
| 611 break; | |
| 612 case kLocalS128: | |
| 613 type = kAstS128; | |
| 614 break; | |
| 615 default: | |
| 616 error(pc_ - 1, "invalid local type"); | |
| 617 return; | |
| 618 } | |
| 619 local_type_vec_.insert(local_type_vec_.end(), count, type); | |
| 620 } | |
| 621 total_locals_ = local_type_vec_.size(); | |
| 622 } | |
| 623 | |
| 624 // Decodes the body of a function. | |
| 625 void DecodeFunctionBody() { | |
| 626 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n", | |
| 627 reinterpret_cast<const void*>(start_), | |
| 628 reinterpret_cast<const void*>(limit_), baserel(pc_), | |
| 629 static_cast<int>(limit_ - start_), builder_ ? "graph building" : ""); | |
| 630 | |
| 631 { | |
| 632 // Set up initial function block. | |
| 633 SsaEnv* break_env = ssa_env_; | |
| 634 SetEnv("initial env", Steal(break_env)); | |
| 635 PushBlock(break_env); | |
| 636 Control* c = &control_.back(); | |
| 637 c->merge.arity = static_cast<uint32_t>(sig_->return_count()); | |
| 638 | |
| 639 if (c->merge.arity == 1) { | |
| 640 c->merge.vals.first = {pc_, nullptr, sig_->GetReturn(0)}; | |
| 641 } else if (c->merge.arity > 1) { | |
| 642 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity); | |
| 643 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 644 c->merge.vals.array[i] = {pc_, nullptr, sig_->GetReturn(i)}; | |
| 645 } | |
| 646 } | |
| 647 } | |
| 648 | |
| 649 if (pc_ >= limit_) return; // Nothing to do. | |
| 650 | |
| 651 while (true) { // decoding loop. | |
| 652 unsigned len = 1; | |
| 653 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); | |
| 654 if (!WasmOpcodes::IsPrefixOpcode(opcode)) { | |
| 655 TRACE(" @%-8d #%02x:%-20s|", startrel(pc_), opcode, | |
| 656 WasmOpcodes::ShortOpcodeName(opcode)); | |
| 657 } | |
| 658 | |
| 659 FunctionSig* sig = WasmOpcodes::Signature(opcode); | |
| 660 if (sig) { | |
| 661 BuildSimpleOperator(opcode, sig); | |
| 662 } else { | |
| 663 // Complex bytecode. | |
| 664 switch (opcode) { | |
| 665 case kExprNop: | |
| 666 break; | |
| 667 case kExprBlock: { | |
| 668 // The break environment is the outer environment. | |
| 669 BlockTypeOperand operand(this, pc_); | |
| 670 SsaEnv* break_env = ssa_env_; | |
| 671 PushBlock(break_env); | |
| 672 SetEnv("block:start", Steal(break_env)); | |
| 673 SetBlockType(&control_.back(), operand); | |
| 674 len = 1 + operand.length; | |
| 675 break; | |
| 676 } | |
| 677 case kExprThrow: { | |
| 678 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); | |
| 679 Value value = Pop(0, kAstI32); | |
| 680 BUILD(Throw, value.node); | |
| 681 break; | |
| 682 } | |
| 683 case kExprTry: { | |
| 684 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); | |
| 685 BlockTypeOperand operand(this, pc_); | |
| 686 SsaEnv* outer_env = ssa_env_; | |
| 687 SsaEnv* try_env = Steal(outer_env); | |
| 688 SsaEnv* catch_env = UnreachableEnv(); | |
| 689 PushTry(outer_env, catch_env); | |
| 690 SetEnv("try_catch:start", try_env); | |
| 691 SetBlockType(&control_.back(), operand); | |
| 692 len = 1 + operand.length; | |
| 693 break; | |
| 694 } | |
| 695 case kExprCatch: { | |
| 696 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); | |
| 697 LocalIndexOperand operand(this, pc_); | |
| 698 len = 1 + operand.length; | |
| 699 | |
| 700 if (control_.empty()) { | |
| 701 error("catch does not match any try"); | |
| 702 break; | |
| 703 } | |
| 704 | |
| 705 Control* c = &control_.back(); | |
| 706 if (!c->is_try()) { | |
| 707 error("catch does not match any try"); | |
| 708 break; | |
| 709 } | |
| 710 | |
| 711 if (c->try_info->catch_env == nullptr) { | |
| 712 error(pc_, "catch already present for try with catch"); | |
| 713 break; | |
| 714 } | |
| 715 | |
| 716 if (ssa_env_->go()) { | |
| 717 MergeValuesInto(c); | |
| 718 } | |
| 719 stack_.resize(c->stack_depth); | |
| 720 | |
| 721 DCHECK_NOT_NULL(c->try_info); | |
| 722 SsaEnv* catch_env = c->try_info->catch_env; | |
| 723 c->try_info->catch_env = nullptr; | |
| 724 SetEnv("catch:begin", catch_env); | |
| 725 current_catch_ = c->previous_catch; | |
| 726 | |
| 727 if (Validate(pc_, operand)) { | |
| 728 if (ssa_env_->locals) { | |
| 729 TFNode* exception_as_i32 = | |
| 730 BUILD(Catch, c->try_info->exception, position()); | |
| 731 ssa_env_->locals[operand.index] = exception_as_i32; | |
| 732 } | |
| 733 } | |
| 734 | |
| 735 break; | |
| 736 } | |
| 737 case kExprLoop: { | |
| 738 BlockTypeOperand operand(this, pc_); | |
| 739 SsaEnv* finish_try_env = Steal(ssa_env_); | |
| 740 // The continue environment is the inner environment. | |
| 741 SsaEnv* loop_body_env = PrepareForLoop(pc_, finish_try_env); | |
| 742 SetEnv("loop:start", loop_body_env); | |
| 743 ssa_env_->SetNotMerged(); | |
| 744 PushLoop(finish_try_env); | |
| 745 SetBlockType(&control_.back(), operand); | |
| 746 len = 1 + operand.length; | |
| 747 break; | |
| 748 } | |
| 749 case kExprIf: { | |
| 750 // Condition on top of stack. Split environments for branches. | |
| 751 BlockTypeOperand operand(this, pc_); | |
| 752 Value cond = Pop(0, kAstI32); | |
| 753 TFNode* if_true = nullptr; | |
| 754 TFNode* if_false = nullptr; | |
| 755 BUILD(BranchNoHint, cond.node, &if_true, &if_false); | |
| 756 SsaEnv* end_env = ssa_env_; | |
| 757 SsaEnv* false_env = Split(ssa_env_); | |
| 758 false_env->control = if_false; | |
| 759 SsaEnv* true_env = Steal(ssa_env_); | |
| 760 true_env->control = if_true; | |
| 761 PushIf(end_env, false_env); | |
| 762 SetEnv("if:true", true_env); | |
| 763 SetBlockType(&control_.back(), operand); | |
| 764 len = 1 + operand.length; | |
| 765 break; | |
| 766 } | |
| 767 case kExprElse: { | |
| 768 if (control_.empty()) { | |
| 769 error("else does not match any if"); | |
| 770 break; | |
| 771 } | |
| 772 Control* c = &control_.back(); | |
| 773 if (!c->is_if()) { | |
| 774 error(pc_, c->pc, "else does not match an if"); | |
| 775 break; | |
| 776 } | |
| 777 if (c->false_env == nullptr) { | |
| 778 error(pc_, c->pc, "else already present for if"); | |
| 779 break; | |
| 780 } | |
| 781 FallThruTo(c); | |
| 782 // Switch to environment for false branch. | |
| 783 stack_.resize(c->stack_depth); | |
| 784 SetEnv("if_else:false", c->false_env); | |
| 785 c->false_env = nullptr; // record that an else is already seen | |
| 786 break; | |
| 787 } | |
| 788 case kExprEnd: { | |
| 789 if (control_.empty()) { | |
| 790 error("end does not match any if, try, or block"); | |
| 791 return; | |
| 792 } | |
| 793 const char* name = "block:end"; | |
| 794 Control* c = &control_.back(); | |
| 795 if (c->is_loop()) { | |
| 796 // A loop just leaves the values on the stack. | |
| 797 TypeCheckLoopFallThru(c); | |
| 798 PopControl(); | |
| 799 SetEnv("loop:end", ssa_env_); | |
| 800 break; | |
| 801 } | |
| 802 if (c->is_if()) { | |
| 803 if (c->false_env != nullptr) { | |
| 804 // End the true branch of a one-armed if. | |
| 805 Goto(c->false_env, c->end_env); | |
| 806 if (ssa_env_->go() && | |
| 807 static_cast<int>(stack_.size()) != c->stack_depth) { | |
| 808 error("end of if expected empty stack"); | |
| 809 stack_.resize(c->stack_depth); | |
| 810 } | |
| 811 if (c->merge.arity > 0) { | |
| 812 error("non-void one-armed if"); | |
| 813 } | |
| 814 name = "if:merge"; | |
| 815 } else { | |
| 816 // End the false branch of a two-armed if. | |
| 817 name = "if_else:merge"; | |
| 818 } | |
| 819 } else if (c->is_try()) { | |
| 820 name = "try:end"; | |
| 821 | |
| 822 // validate that catch was seen. | |
| 823 if (c->try_info->catch_env != nullptr) { | |
| 824 error(pc_, "missing catch in try"); | |
| 825 break; | |
| 826 } | |
| 827 } | |
| 828 FallThruTo(c); | |
| 829 SetEnv(name, c->end_env); | |
| 830 | |
| 831 // Push the end values onto the stack. | |
| 832 stack_.resize(c->stack_depth); | |
| 833 if (c->merge.arity == 1) { | |
| 834 stack_.push_back(c->merge.vals.first); | |
| 835 } else { | |
| 836 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 837 stack_.push_back(c->merge.vals.array[i]); | |
| 838 } | |
| 839 } | |
| 840 | |
| 841 PopControl(); | |
| 842 | |
| 843 if (control_.empty()) { | |
| 844 // If the last (implicit) control was popped, check we are at end. | |
| 845 if (pc_ + 1 != end_) { | |
| 846 error(pc_, pc_ + 1, "trailing code after function end"); | |
| 847 } | |
| 848 last_end_found_ = true; | |
| 849 if (ssa_env_->go()) { | |
| 850 // The result of the block is the return value. | |
| 851 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn"); | |
| 852 DoReturn(); | |
| 853 TRACE("\n"); | |
| 854 } | |
| 855 return; | |
| 856 } | |
| 857 break; | |
| 858 } | |
| 859 case kExprSelect: { | |
| 860 Value cond = Pop(2, kAstI32); | |
| 861 Value fval = Pop(); | |
| 862 Value tval = Pop(); | |
| 863 if (tval.type == kAstStmt || tval.type != fval.type) { | |
| 864 if (tval.type != kAstEnd && fval.type != kAstEnd) { | |
| 865 error("type mismatch in select"); | |
| 866 break; | |
| 867 } | |
| 868 } | |
| 869 if (build()) { | |
| 870 DCHECK(tval.type != kAstEnd); | |
| 871 DCHECK(fval.type != kAstEnd); | |
| 872 DCHECK(cond.type != kAstEnd); | |
| 873 TFNode* controls[2]; | |
| 874 builder_->BranchNoHint(cond.node, &controls[0], &controls[1]); | |
| 875 TFNode* merge = builder_->Merge(2, controls); | |
| 876 TFNode* vals[2] = {tval.node, fval.node}; | |
| 877 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge); | |
| 878 Push(tval.type, phi); | |
| 879 ssa_env_->control = merge; | |
| 880 } else { | |
| 881 Push(tval.type, nullptr); | |
| 882 } | |
| 883 break; | |
| 884 } | |
| 885 case kExprBr: { | |
| 886 BreakDepthOperand operand(this, pc_); | |
| 887 if (Validate(pc_, operand, control_)) { | |
| 888 BreakTo(operand.depth); | |
| 889 } | |
| 890 len = 1 + operand.length; | |
| 891 EndControl(); | |
| 892 break; | |
| 893 } | |
| 894 case kExprBrIf: { | |
| 895 BreakDepthOperand operand(this, pc_); | |
| 896 Value cond = Pop(0, kAstI32); | |
| 897 if (ok() && Validate(pc_, operand, control_)) { | |
| 898 SsaEnv* fenv = ssa_env_; | |
| 899 SsaEnv* tenv = Split(fenv); | |
| 900 fenv->SetNotMerged(); | |
| 901 BUILD(BranchNoHint, cond.node, &tenv->control, &fenv->control); | |
| 902 ssa_env_ = tenv; | |
| 903 BreakTo(operand.depth); | |
| 904 ssa_env_ = fenv; | |
| 905 } | |
| 906 len = 1 + operand.length; | |
| 907 break; | |
| 908 } | |
| 909 case kExprBrTable: { | |
| 910 BranchTableOperand operand(this, pc_); | |
| 911 BranchTableIterator iterator(this, operand); | |
| 912 if (Validate(pc_, operand, control_.size())) { | |
| 913 Value key = Pop(0, kAstI32); | |
| 914 if (failed()) break; | |
| 915 | |
| 916 SsaEnv* break_env = ssa_env_; | |
| 917 if (operand.table_count > 0) { | |
| 918 // Build branches to the various blocks based on the table. | |
| 919 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node); | |
| 920 | |
| 921 SsaEnv* copy = Steal(break_env); | |
| 922 ssa_env_ = copy; | |
| 923 while (ok() && iterator.has_next()) { | |
| 924 uint32_t i = iterator.cur_index(); | |
| 925 const byte* pos = iterator.pc(); | |
| 926 uint32_t target = iterator.next(); | |
| 927 if (target >= control_.size()) { | |
| 928 error(pos, "improper branch in br_table"); | |
| 929 break; | |
| 930 } | |
| 931 ssa_env_ = Split(copy); | |
| 932 ssa_env_->control = (i == operand.table_count) | |
| 933 ? BUILD(IfDefault, sw) | |
| 934 : BUILD(IfValue, i, sw); | |
| 935 BreakTo(target); | |
| 936 } | |
| 937 if (failed()) break; | |
| 938 } else { | |
| 939 // Only a default target. Do the equivalent of br. | |
| 940 const byte* pos = iterator.pc(); | |
| 941 uint32_t target = iterator.next(); | |
| 942 if (target >= control_.size()) { | |
| 943 error(pos, "improper branch in br_table"); | |
| 944 break; | |
| 945 } | |
| 946 BreakTo(target); | |
| 947 } | |
| 948 // br_table ends the control flow like br. | |
| 949 ssa_env_ = break_env; | |
| 950 } | |
| 951 len = 1 + iterator.length(); | |
| 952 break; | |
| 953 } | |
| 954 case kExprReturn: { | |
| 955 DoReturn(); | |
| 956 break; | |
| 957 } | |
| 958 case kExprUnreachable: { | |
| 959 BUILD(Unreachable, position()); | |
| 960 EndControl(); | |
| 961 break; | |
| 962 } | |
| 963 case kExprI8Const: { | |
| 964 ImmI8Operand operand(this, pc_); | |
| 965 Push(kAstI32, BUILD(Int32Constant, operand.value)); | |
| 966 len = 1 + operand.length; | |
| 967 break; | |
| 968 } | |
| 969 case kExprI32Const: { | |
| 970 ImmI32Operand operand(this, pc_); | |
| 971 Push(kAstI32, BUILD(Int32Constant, operand.value)); | |
| 972 len = 1 + operand.length; | |
| 973 break; | |
| 974 } | |
| 975 case kExprI64Const: { | |
| 976 ImmI64Operand operand(this, pc_); | |
| 977 Push(kAstI64, BUILD(Int64Constant, operand.value)); | |
| 978 len = 1 + operand.length; | |
| 979 break; | |
| 980 } | |
| 981 case kExprF32Const: { | |
| 982 ImmF32Operand operand(this, pc_); | |
| 983 Push(kAstF32, BUILD(Float32Constant, operand.value)); | |
| 984 len = 1 + operand.length; | |
| 985 break; | |
| 986 } | |
| 987 case kExprF64Const: { | |
| 988 ImmF64Operand operand(this, pc_); | |
| 989 Push(kAstF64, BUILD(Float64Constant, operand.value)); | |
| 990 len = 1 + operand.length; | |
| 991 break; | |
| 992 } | |
| 993 case kExprGetLocal: { | |
| 994 LocalIndexOperand operand(this, pc_); | |
| 995 if (Validate(pc_, operand)) { | |
| 996 if (build()) { | |
| 997 Push(operand.type, ssa_env_->locals[operand.index]); | |
| 998 } else { | |
| 999 Push(operand.type, nullptr); | |
| 1000 } | |
| 1001 } | |
| 1002 len = 1 + operand.length; | |
| 1003 break; | |
| 1004 } | |
| 1005 case kExprSetLocal: { | |
| 1006 LocalIndexOperand operand(this, pc_); | |
| 1007 if (Validate(pc_, operand)) { | |
| 1008 Value val = Pop(0, local_type_vec_[operand.index]); | |
| 1009 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node; | |
| 1010 } | |
| 1011 len = 1 + operand.length; | |
| 1012 break; | |
| 1013 } | |
| 1014 case kExprTeeLocal: { | |
| 1015 LocalIndexOperand operand(this, pc_); | |
| 1016 if (Validate(pc_, operand)) { | |
| 1017 Value val = Pop(0, local_type_vec_[operand.index]); | |
| 1018 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node; | |
| 1019 Push(val.type, val.node); | |
| 1020 } | |
| 1021 len = 1 + operand.length; | |
| 1022 break; | |
| 1023 } | |
| 1024 case kExprDrop: { | |
| 1025 Pop(); | |
| 1026 break; | |
| 1027 } | |
| 1028 case kExprGetGlobal: { | |
| 1029 GlobalIndexOperand operand(this, pc_); | |
| 1030 if (Validate(pc_, operand)) { | |
| 1031 Push(operand.type, BUILD(GetGlobal, operand.index)); | |
| 1032 } | |
| 1033 len = 1 + operand.length; | |
| 1034 break; | |
| 1035 } | |
| 1036 case kExprSetGlobal: { | |
| 1037 GlobalIndexOperand operand(this, pc_); | |
| 1038 if (Validate(pc_, operand)) { | |
| 1039 if (operand.global->mutability) { | |
| 1040 Value val = Pop(0, operand.type); | |
| 1041 BUILD(SetGlobal, operand.index, val.node); | |
| 1042 } else { | |
| 1043 error(pc_, pc_ + 1, "immutable global #%u cannot be assigned", | |
| 1044 operand.index); | |
| 1045 } | |
| 1046 } | |
| 1047 len = 1 + operand.length; | |
| 1048 break; | |
| 1049 } | |
| 1050 case kExprI32LoadMem8S: | |
| 1051 len = DecodeLoadMem(kAstI32, MachineType::Int8()); | |
| 1052 break; | |
| 1053 case kExprI32LoadMem8U: | |
| 1054 len = DecodeLoadMem(kAstI32, MachineType::Uint8()); | |
| 1055 break; | |
| 1056 case kExprI32LoadMem16S: | |
| 1057 len = DecodeLoadMem(kAstI32, MachineType::Int16()); | |
| 1058 break; | |
| 1059 case kExprI32LoadMem16U: | |
| 1060 len = DecodeLoadMem(kAstI32, MachineType::Uint16()); | |
| 1061 break; | |
| 1062 case kExprI32LoadMem: | |
| 1063 len = DecodeLoadMem(kAstI32, MachineType::Int32()); | |
| 1064 break; | |
| 1065 case kExprI64LoadMem8S: | |
| 1066 len = DecodeLoadMem(kAstI64, MachineType::Int8()); | |
| 1067 break; | |
| 1068 case kExprI64LoadMem8U: | |
| 1069 len = DecodeLoadMem(kAstI64, MachineType::Uint8()); | |
| 1070 break; | |
| 1071 case kExprI64LoadMem16S: | |
| 1072 len = DecodeLoadMem(kAstI64, MachineType::Int16()); | |
| 1073 break; | |
| 1074 case kExprI64LoadMem16U: | |
| 1075 len = DecodeLoadMem(kAstI64, MachineType::Uint16()); | |
| 1076 break; | |
| 1077 case kExprI64LoadMem32S: | |
| 1078 len = DecodeLoadMem(kAstI64, MachineType::Int32()); | |
| 1079 break; | |
| 1080 case kExprI64LoadMem32U: | |
| 1081 len = DecodeLoadMem(kAstI64, MachineType::Uint32()); | |
| 1082 break; | |
| 1083 case kExprI64LoadMem: | |
| 1084 len = DecodeLoadMem(kAstI64, MachineType::Int64()); | |
| 1085 break; | |
| 1086 case kExprF32LoadMem: | |
| 1087 len = DecodeLoadMem(kAstF32, MachineType::Float32()); | |
| 1088 break; | |
| 1089 case kExprF64LoadMem: | |
| 1090 len = DecodeLoadMem(kAstF64, MachineType::Float64()); | |
| 1091 break; | |
| 1092 case kExprI32StoreMem8: | |
| 1093 len = DecodeStoreMem(kAstI32, MachineType::Int8()); | |
| 1094 break; | |
| 1095 case kExprI32StoreMem16: | |
| 1096 len = DecodeStoreMem(kAstI32, MachineType::Int16()); | |
| 1097 break; | |
| 1098 case kExprI32StoreMem: | |
| 1099 len = DecodeStoreMem(kAstI32, MachineType::Int32()); | |
| 1100 break; | |
| 1101 case kExprI64StoreMem8: | |
| 1102 len = DecodeStoreMem(kAstI64, MachineType::Int8()); | |
| 1103 break; | |
| 1104 case kExprI64StoreMem16: | |
| 1105 len = DecodeStoreMem(kAstI64, MachineType::Int16()); | |
| 1106 break; | |
| 1107 case kExprI64StoreMem32: | |
| 1108 len = DecodeStoreMem(kAstI64, MachineType::Int32()); | |
| 1109 break; | |
| 1110 case kExprI64StoreMem: | |
| 1111 len = DecodeStoreMem(kAstI64, MachineType::Int64()); | |
| 1112 break; | |
| 1113 case kExprF32StoreMem: | |
| 1114 len = DecodeStoreMem(kAstF32, MachineType::Float32()); | |
| 1115 break; | |
| 1116 case kExprF64StoreMem: | |
| 1117 len = DecodeStoreMem(kAstF64, MachineType::Float64()); | |
| 1118 break; | |
| 1119 case kExprGrowMemory: { | |
| 1120 MemoryIndexOperand operand(this, pc_); | |
| 1121 if (module_->module->origin != kAsmJsOrigin) { | |
| 1122 Value val = Pop(0, kAstI32); | |
| 1123 Push(kAstI32, BUILD(GrowMemory, val.node)); | |
| 1124 } else { | |
| 1125 error("grow_memory is not supported for asmjs modules"); | |
| 1126 } | |
| 1127 len = 1 + operand.length; | |
| 1128 break; | |
| 1129 } | |
| 1130 case kExprMemorySize: { | |
| 1131 MemoryIndexOperand operand(this, pc_); | |
| 1132 Push(kAstI32, BUILD(CurrentMemoryPages)); | |
| 1133 len = 1 + operand.length; | |
| 1134 break; | |
| 1135 } | |
| 1136 case kExprCallFunction: { | |
| 1137 CallFunctionOperand operand(this, pc_); | |
| 1138 if (Validate(pc_, operand)) { | |
| 1139 TFNode** buffer = PopArgs(operand.sig); | |
| 1140 TFNode** rets = nullptr; | |
| 1141 BUILD(CallDirect, operand.index, buffer, &rets, position()); | |
| 1142 PushReturns(operand.sig, rets); | |
| 1143 } | |
| 1144 len = 1 + operand.length; | |
| 1145 break; | |
| 1146 } | |
| 1147 case kExprCallIndirect: { | |
| 1148 CallIndirectOperand operand(this, pc_); | |
| 1149 if (Validate(pc_, operand)) { | |
| 1150 Value index = Pop(0, kAstI32); | |
| 1151 TFNode** buffer = PopArgs(operand.sig); | |
| 1152 if (buffer) buffer[0] = index.node; | |
| 1153 TFNode** rets = nullptr; | |
| 1154 BUILD(CallIndirect, operand.index, buffer, &rets, position()); | |
| 1155 PushReturns(operand.sig, rets); | |
| 1156 } | |
| 1157 len = 1 + operand.length; | |
| 1158 break; | |
| 1159 } | |
| 1160 case kSimdPrefix: { | |
| 1161 CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype); | |
| 1162 len++; | |
| 1163 byte simd_index = checked_read_u8(pc_, 1, "simd index"); | |
| 1164 opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index); | |
| 1165 TRACE(" @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix, | |
| 1166 simd_index, WasmOpcodes::ShortOpcodeName(opcode)); | |
| 1167 len += DecodeSimdOpcode(opcode); | |
| 1168 break; | |
| 1169 } | |
| 1170 case kAtomicPrefix: { | |
| 1171 if (!module_ || module_->module->origin != kAsmJsOrigin) { | |
| 1172 error("Atomics are allowed only in AsmJs modules"); | |
| 1173 break; | |
| 1174 } | |
| 1175 if (!FLAG_wasm_atomics_prototype) { | |
| 1176 error("Invalid opcode (enable with --wasm_atomics_prototype)"); | |
| 1177 break; | |
| 1178 } | |
| 1179 len = 2; | |
| 1180 byte atomic_opcode = checked_read_u8(pc_, 1, "atomic index"); | |
| 1181 opcode = static_cast<WasmOpcode>(opcode << 8 | atomic_opcode); | |
| 1182 sig = WasmOpcodes::AtomicSignature(opcode); | |
| 1183 if (sig) { | |
| 1184 BuildAtomicOperator(opcode); | |
| 1185 } | |
| 1186 break; | |
| 1187 } | |
| 1188 default: { | |
| 1189 // Deal with special asmjs opcodes. | |
| 1190 if (module_ && module_->module->origin == kAsmJsOrigin) { | |
| 1191 sig = WasmOpcodes::AsmjsSignature(opcode); | |
| 1192 if (sig) { | |
| 1193 BuildSimpleOperator(opcode, sig); | |
| 1194 } | |
| 1195 } else { | |
| 1196 error("Invalid opcode"); | |
| 1197 return; | |
| 1198 } | |
| 1199 } | |
| 1200 } | |
| 1201 } | |
| 1202 | |
| 1203 #if DEBUG | |
| 1204 if (FLAG_trace_wasm_decoder) { | |
| 1205 for (size_t i = 0; i < stack_.size(); ++i) { | |
| 1206 Value& val = stack_[i]; | |
| 1207 WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc); | |
| 1208 if (WasmOpcodes::IsPrefixOpcode(opcode)) { | |
| 1209 opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1)); | |
| 1210 } | |
| 1211 PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type), | |
| 1212 static_cast<int>(val.pc - start_), | |
| 1213 WasmOpcodes::ShortOpcodeName(opcode)); | |
| 1214 switch (opcode) { | |
| 1215 case kExprI32Const: { | |
| 1216 ImmI32Operand operand(this, val.pc); | |
| 1217 PrintF("[%d]", operand.value); | |
| 1218 break; | |
| 1219 } | |
| 1220 case kExprGetLocal: { | |
| 1221 LocalIndexOperand operand(this, val.pc); | |
| 1222 PrintF("[%u]", operand.index); | |
| 1223 break; | |
| 1224 } | |
| 1225 case kExprSetLocal: // fallthru | |
| 1226 case kExprTeeLocal: { | |
| 1227 LocalIndexOperand operand(this, val.pc); | |
| 1228 PrintF("[%u]", operand.index); | |
| 1229 break; | |
| 1230 } | |
| 1231 default: | |
| 1232 break; | |
| 1233 } | |
| 1234 } | |
| 1235 PrintF("\n"); | |
| 1236 } | |
| 1237 #endif | |
| 1238 pc_ += len; | |
| 1239 if (pc_ >= limit_) { | |
| 1240 // End of code reached or exceeded. | |
| 1241 if (pc_ > limit_ && ok()) error("Beyond end of code"); | |
| 1242 return; | |
| 1243 } | |
| 1244 } // end decode loop | |
| 1245 } | |
| 1246 | |
| 1247 void EndControl() { ssa_env_->Kill(SsaEnv::kControlEnd); } | |
| 1248 | |
| 1249 void SetBlockType(Control* c, BlockTypeOperand& operand) { | |
| 1250 c->merge.arity = operand.arity; | |
| 1251 if (c->merge.arity == 1) { | |
| 1252 c->merge.vals.first = {pc_, nullptr, operand.read_entry(0)}; | |
| 1253 } else if (c->merge.arity > 1) { | |
| 1254 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity); | |
| 1255 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 1256 c->merge.vals.array[i] = {pc_, nullptr, operand.read_entry(i)}; | |
| 1257 } | |
| 1258 } | |
| 1259 } | |
| 1260 | |
| 1261 TFNode** PopArgs(FunctionSig* sig) { | |
| 1262 if (build()) { | |
| 1263 int count = static_cast<int>(sig->parameter_count()); | |
| 1264 TFNode** buffer = builder_->Buffer(count + 1); | |
| 1265 buffer[0] = nullptr; // reserved for code object or function index. | |
| 1266 for (int i = count - 1; i >= 0; i--) { | |
| 1267 buffer[i + 1] = Pop(i, sig->GetParam(i)).node; | |
| 1268 } | |
| 1269 return buffer; | |
| 1270 } else { | |
| 1271 int count = static_cast<int>(sig->parameter_count()); | |
| 1272 for (int i = count - 1; i >= 0; i--) { | |
| 1273 Pop(i, sig->GetParam(i)); | |
| 1274 } | |
| 1275 return nullptr; | |
| 1276 } | |
| 1277 } | |
| 1278 | |
| 1279 LocalType GetReturnType(FunctionSig* sig) { | |
| 1280 return sig->return_count() == 0 ? kAstStmt : sig->GetReturn(); | |
| 1281 } | |
| 1282 | |
| 1283 void PushBlock(SsaEnv* end_env) { | |
| 1284 const int stack_depth = static_cast<int>(stack_.size()); | |
| 1285 control_.emplace_back( | |
| 1286 Control::Block(pc_, stack_depth, end_env, current_catch_)); | |
| 1287 } | |
| 1288 | |
| 1289 void PushLoop(SsaEnv* end_env) { | |
| 1290 const int stack_depth = static_cast<int>(stack_.size()); | |
| 1291 control_.emplace_back( | |
| 1292 Control::Loop(pc_, stack_depth, end_env, current_catch_)); | |
| 1293 } | |
| 1294 | |
| 1295 void PushIf(SsaEnv* end_env, SsaEnv* false_env) { | |
| 1296 const int stack_depth = static_cast<int>(stack_.size()); | |
| 1297 control_.emplace_back( | |
| 1298 Control::If(pc_, stack_depth, end_env, false_env, current_catch_)); | |
| 1299 } | |
| 1300 | |
| 1301 void PushTry(SsaEnv* end_env, SsaEnv* catch_env) { | |
| 1302 const int stack_depth = static_cast<int>(stack_.size()); | |
| 1303 control_.emplace_back(Control::Try(pc_, stack_depth, end_env, zone_, | |
| 1304 catch_env, current_catch_)); | |
| 1305 current_catch_ = static_cast<int32_t>(control_.size() - 1); | |
| 1306 } | |
| 1307 | |
| 1308 void PopControl() { control_.pop_back(); } | |
| 1309 | |
| 1310 int DecodeLoadMem(LocalType type, MachineType mem_type) { | |
| 1311 MemoryAccessOperand operand(this, pc_, | |
| 1312 ElementSizeLog2Of(mem_type.representation())); | |
| 1313 | |
| 1314 Value index = Pop(0, kAstI32); | |
| 1315 TFNode* node = BUILD(LoadMem, type, mem_type, index.node, operand.offset, | |
| 1316 operand.alignment, position()); | |
| 1317 Push(type, node); | |
| 1318 return 1 + operand.length; | |
| 1319 } | |
| 1320 | |
| 1321 int DecodeStoreMem(LocalType type, MachineType mem_type) { | |
| 1322 MemoryAccessOperand operand(this, pc_, | |
| 1323 ElementSizeLog2Of(mem_type.representation())); | |
| 1324 Value val = Pop(1, type); | |
| 1325 Value index = Pop(0, kAstI32); | |
| 1326 BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment, | |
| 1327 val.node, position()); | |
| 1328 return 1 + operand.length; | |
| 1329 } | |
| 1330 | |
| 1331 unsigned ExtractLane(WasmOpcode opcode, LocalType type) { | |
| 1332 LaneOperand operand(this, pc_); | |
| 1333 if (Validate(pc_, operand)) { | |
| 1334 compiler::NodeVector inputs(1, zone_); | |
| 1335 inputs[0] = Pop(0, LocalType::kSimd128).node; | |
| 1336 TFNode* node = BUILD(SimdLaneOp, opcode, operand.lane, inputs); | |
| 1337 Push(type, node); | |
| 1338 } | |
| 1339 return operand.length; | |
| 1340 } | |
| 1341 | |
| 1342 unsigned ReplaceLane(WasmOpcode opcode, LocalType type) { | |
| 1343 LaneOperand operand(this, pc_); | |
| 1344 if (Validate(pc_, operand)) { | |
| 1345 compiler::NodeVector inputs(2, zone_); | |
| 1346 inputs[1] = Pop(1, type).node; | |
| 1347 inputs[0] = Pop(0, LocalType::kSimd128).node; | |
| 1348 TFNode* node = BUILD(SimdLaneOp, opcode, operand.lane, inputs); | |
| 1349 Push(LocalType::kSimd128, node); | |
| 1350 } | |
| 1351 return operand.length; | |
| 1352 } | |
| 1353 | |
| 1354 unsigned DecodeSimdOpcode(WasmOpcode opcode) { | |
| 1355 unsigned len = 0; | |
| 1356 switch (opcode) { | |
| 1357 case kExprI32x4ExtractLane: { | |
| 1358 len = ExtractLane(opcode, LocalType::kWord32); | |
| 1359 break; | |
| 1360 } | |
| 1361 case kExprF32x4ExtractLane: { | |
| 1362 len = ExtractLane(opcode, LocalType::kFloat32); | |
| 1363 break; | |
| 1364 } | |
| 1365 case kExprI32x4ReplaceLane: { | |
| 1366 len = ReplaceLane(opcode, LocalType::kWord32); | |
| 1367 break; | |
| 1368 } | |
| 1369 case kExprF32x4ReplaceLane: { | |
| 1370 len = ReplaceLane(opcode, LocalType::kFloat32); | |
| 1371 break; | |
| 1372 } | |
| 1373 default: { | |
| 1374 FunctionSig* sig = WasmOpcodes::Signature(opcode); | |
| 1375 if (sig != nullptr) { | |
| 1376 compiler::NodeVector inputs(sig->parameter_count(), zone_); | |
| 1377 for (size_t i = sig->parameter_count(); i > 0; i--) { | |
| 1378 Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1)); | |
| 1379 inputs[i - 1] = val.node; | |
| 1380 } | |
| 1381 TFNode* node = BUILD(SimdOp, opcode, inputs); | |
| 1382 Push(GetReturnType(sig), node); | |
| 1383 } else { | |
| 1384 error("invalid simd opcode"); | |
| 1385 } | |
| 1386 } | |
| 1387 } | |
| 1388 return len; | |
| 1389 } | |
| 1390 | |
| 1391 void BuildAtomicOperator(WasmOpcode opcode) { UNIMPLEMENTED(); } | |
| 1392 | |
| 1393 void DoReturn() { | |
| 1394 int count = static_cast<int>(sig_->return_count()); | |
| 1395 TFNode** buffer = nullptr; | |
| 1396 if (build()) buffer = builder_->Buffer(count); | |
| 1397 | |
| 1398 // Pop return values off the stack in reverse order. | |
| 1399 for (int i = count - 1; i >= 0; i--) { | |
| 1400 Value val = Pop(i, sig_->GetReturn(i)); | |
| 1401 if (buffer) buffer[i] = val.node; | |
| 1402 } | |
| 1403 | |
| 1404 BUILD(Return, count, buffer); | |
| 1405 EndControl(); | |
| 1406 } | |
| 1407 | |
| 1408 void Push(LocalType type, TFNode* node) { | |
| 1409 if (type != kAstStmt && type != kAstEnd) { | |
| 1410 stack_.push_back({pc_, node, type}); | |
| 1411 } | |
| 1412 } | |
| 1413 | |
| 1414 void PushReturns(FunctionSig* sig, TFNode** rets) { | |
| 1415 for (size_t i = 0; i < sig->return_count(); i++) { | |
| 1416 // When verifying only, then {rets} will be null, so push null. | |
| 1417 Push(sig->GetReturn(i), rets ? rets[i] : nullptr); | |
| 1418 } | |
| 1419 } | |
| 1420 | |
| 1421 const char* SafeOpcodeNameAt(const byte* pc) { | |
| 1422 if (pc >= end_) return "<end>"; | |
| 1423 return WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc)); | |
| 1424 } | |
| 1425 | |
| 1426 Value Pop(int index, LocalType expected) { | |
| 1427 if (!ssa_env_->go()) { | |
| 1428 // Unreachable code is essentially not typechecked. | |
| 1429 return {pc_, nullptr, expected}; | |
| 1430 } | |
| 1431 Value val = Pop(); | |
| 1432 if (val.type != expected) { | |
| 1433 if (val.type != kAstEnd) { | |
| 1434 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s", | |
| 1435 SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected), | |
| 1436 SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type)); | |
| 1437 } | |
| 1438 } | |
| 1439 return val; | |
| 1440 } | |
| 1441 | |
| 1442 Value Pop() { | |
| 1443 if (!ssa_env_->go()) { | |
| 1444 // Unreachable code is essentially not typechecked. | |
| 1445 return {pc_, nullptr, kAstEnd}; | |
| 1446 } | |
| 1447 size_t limit = control_.empty() ? 0 : control_.back().stack_depth; | |
| 1448 if (stack_.size() <= limit) { | |
| 1449 Value val = {pc_, nullptr, kAstStmt}; | |
| 1450 error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_)); | |
| 1451 return val; | |
| 1452 } | |
| 1453 Value val = stack_.back(); | |
| 1454 stack_.pop_back(); | |
| 1455 return val; | |
| 1456 } | |
| 1457 | |
| 1458 Value PopUpTo(int stack_depth) { | |
| 1459 if (!ssa_env_->go()) { | |
| 1460 // Unreachable code is essentially not typechecked. | |
| 1461 return {pc_, nullptr, kAstEnd}; | |
| 1462 } | |
| 1463 if (stack_depth == static_cast<int>(stack_.size())) { | |
| 1464 Value val = {pc_, nullptr, kAstStmt}; | |
| 1465 return val; | |
| 1466 } else { | |
| 1467 DCHECK_LE(stack_depth, stack_.size()); | |
| 1468 Value val = Pop(); | |
| 1469 stack_.resize(stack_depth); | |
| 1470 return val; | |
| 1471 } | |
| 1472 } | |
| 1473 | |
| 1474 int baserel(const byte* ptr) { | |
| 1475 return base_ ? static_cast<int>(ptr - base_) : 0; | |
| 1476 } | |
| 1477 | |
| 1478 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); } | |
| 1479 | |
| 1480 void BreakTo(unsigned depth) { | |
| 1481 if (!ssa_env_->go()) return; | |
| 1482 Control* c = &control_[control_.size() - depth - 1]; | |
| 1483 if (c->is_loop()) { | |
| 1484 // This is the inner loop block, which does not have a value. | |
| 1485 Goto(ssa_env_, c->end_env); | |
| 1486 } else { | |
| 1487 // Merge the value(s) into the end of the block. | |
| 1488 if (c->stack_depth + c->merge.arity > stack_.size()) { | |
| 1489 error( | |
| 1490 pc_, pc_, | |
| 1491 "expected at least %d values on the stack for br to @%d, found %d", | |
| 1492 c->merge.arity, startrel(c->pc), | |
| 1493 static_cast<int>(stack_.size() - c->stack_depth)); | |
| 1494 return; | |
| 1495 } | |
| 1496 MergeValuesInto(c); | |
| 1497 } | |
| 1498 } | |
| 1499 | |
| 1500 void FallThruTo(Control* c) { | |
| 1501 if (!ssa_env_->go()) return; | |
| 1502 // Merge the value(s) into the end of the block. | |
| 1503 int arity = static_cast<int>(c->merge.arity); | |
| 1504 if (c->stack_depth + arity != static_cast<int>(stack_.size())) { | |
| 1505 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d", | |
| 1506 arity, startrel(c->pc)); | |
| 1507 return; | |
| 1508 } | |
| 1509 MergeValuesInto(c); | |
| 1510 } | |
| 1511 | |
| 1512 inline Value& GetMergeValueFromStack(Control* c, int i) { | |
| 1513 return stack_[stack_.size() - c->merge.arity + i]; | |
| 1514 } | |
| 1515 | |
| 1516 void TypeCheckLoopFallThru(Control* c) { | |
| 1517 if (!ssa_env_->go()) return; | |
| 1518 // Fallthru must match arity exactly. | |
| 1519 int arity = static_cast<int>(c->merge.arity); | |
| 1520 if (c->stack_depth + arity != static_cast<int>(stack_.size())) { | |
| 1521 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d", | |
| 1522 arity, startrel(c->pc)); | |
| 1523 return; | |
| 1524 } | |
| 1525 // Typecheck the values left on the stack. | |
| 1526 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 1527 Value& val = GetMergeValueFromStack(c, i); | |
| 1528 Value& old = | |
| 1529 c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i]; | |
| 1530 if (val.type != old.type) { | |
| 1531 error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i, | |
| 1532 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type)); | |
| 1533 return; | |
| 1534 } | |
| 1535 } | |
| 1536 } | |
| 1537 | |
| 1538 void MergeValuesInto(Control* c) { | |
| 1539 SsaEnv* target = c->end_env; | |
| 1540 bool first = target->state == SsaEnv::kUnreachable; | |
| 1541 Goto(ssa_env_, target); | |
| 1542 | |
| 1543 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 1544 Value& val = GetMergeValueFromStack(c, i); | |
| 1545 Value& old = | |
| 1546 c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i]; | |
| 1547 if (val.type != old.type) { | |
| 1548 error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i, | |
| 1549 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type)); | |
| 1550 return; | |
| 1551 } | |
| 1552 if (builder_) { | |
| 1553 old.node = | |
| 1554 first ? val.node : CreateOrMergeIntoPhi(old.type, target->control, | |
| 1555 old.node, val.node); | |
| 1556 } else { | |
| 1557 old.node = nullptr; | |
| 1558 } | |
| 1559 } | |
| 1560 } | |
| 1561 | |
| 1562 void SetEnv(const char* reason, SsaEnv* env) { | |
| 1563 #if DEBUG | |
| 1564 if (FLAG_trace_wasm_decoder) { | |
| 1565 char state = 'X'; | |
| 1566 if (env) { | |
| 1567 switch (env->state) { | |
| 1568 case SsaEnv::kReached: | |
| 1569 state = 'R'; | |
| 1570 break; | |
| 1571 case SsaEnv::kUnreachable: | |
| 1572 state = 'U'; | |
| 1573 break; | |
| 1574 case SsaEnv::kMerged: | |
| 1575 state = 'M'; | |
| 1576 break; | |
| 1577 case SsaEnv::kControlEnd: | |
| 1578 state = 'E'; | |
| 1579 break; | |
| 1580 } | |
| 1581 } | |
| 1582 PrintF(" env = %p, state = %c, reason = %s", static_cast<void*>(env), | |
| 1583 state, reason); | |
| 1584 if (env && env->control) { | |
| 1585 PrintF(", control = "); | |
| 1586 compiler::WasmGraphBuilder::PrintDebugName(env->control); | |
| 1587 } | |
| 1588 PrintF("\n"); | |
| 1589 } | |
| 1590 #endif | |
| 1591 ssa_env_ = env; | |
| 1592 if (builder_) { | |
| 1593 builder_->set_control_ptr(&env->control); | |
| 1594 builder_->set_effect_ptr(&env->effect); | |
| 1595 } | |
| 1596 } | |
| 1597 | |
| 1598 TFNode* CheckForException(TFNode* node) { | |
| 1599 if (node == nullptr) { | |
| 1600 return nullptr; | |
| 1601 } | |
| 1602 | |
| 1603 const bool inside_try_scope = current_catch_ != kNullCatch; | |
| 1604 | |
| 1605 if (!inside_try_scope) { | |
| 1606 return node; | |
| 1607 } | |
| 1608 | |
| 1609 TFNode* if_success = nullptr; | |
| 1610 TFNode* if_exception = nullptr; | |
| 1611 if (!builder_->ThrowsException(node, &if_success, &if_exception)) { | |
| 1612 return node; | |
| 1613 } | |
| 1614 | |
| 1615 SsaEnv* success_env = Steal(ssa_env_); | |
| 1616 success_env->control = if_success; | |
| 1617 | |
| 1618 SsaEnv* exception_env = Split(success_env); | |
| 1619 exception_env->control = if_exception; | |
| 1620 TryInfo* try_info = current_try_info(); | |
| 1621 Goto(exception_env, try_info->catch_env); | |
| 1622 TFNode* exception = try_info->exception; | |
| 1623 if (exception == nullptr) { | |
| 1624 DCHECK_EQ(SsaEnv::kReached, try_info->catch_env->state); | |
| 1625 try_info->exception = if_exception; | |
| 1626 } else { | |
| 1627 DCHECK_EQ(SsaEnv::kMerged, try_info->catch_env->state); | |
| 1628 try_info->exception = | |
| 1629 CreateOrMergeIntoPhi(kAstI32, try_info->catch_env->control, | |
| 1630 try_info->exception, if_exception); | |
| 1631 } | |
| 1632 | |
| 1633 SetEnv("if_success", success_env); | |
| 1634 return node; | |
| 1635 } | |
| 1636 | |
| 1637 void Goto(SsaEnv* from, SsaEnv* to) { | |
| 1638 DCHECK_NOT_NULL(to); | |
| 1639 if (!from->go()) return; | |
| 1640 switch (to->state) { | |
| 1641 case SsaEnv::kUnreachable: { // Overwrite destination. | |
| 1642 to->state = SsaEnv::kReached; | |
| 1643 to->locals = from->locals; | |
| 1644 to->control = from->control; | |
| 1645 to->effect = from->effect; | |
| 1646 break; | |
| 1647 } | |
| 1648 case SsaEnv::kReached: { // Create a new merge. | |
| 1649 to->state = SsaEnv::kMerged; | |
| 1650 if (!builder_) break; | |
| 1651 // Merge control. | |
| 1652 TFNode* controls[] = {to->control, from->control}; | |
| 1653 TFNode* merge = builder_->Merge(2, controls); | |
| 1654 to->control = merge; | |
| 1655 // Merge effects. | |
| 1656 if (from->effect != to->effect) { | |
| 1657 TFNode* effects[] = {to->effect, from->effect, merge}; | |
| 1658 to->effect = builder_->EffectPhi(2, effects, merge); | |
| 1659 } | |
| 1660 // Merge SSA values. | |
| 1661 for (int i = EnvironmentCount() - 1; i >= 0; i--) { | |
| 1662 TFNode* a = to->locals[i]; | |
| 1663 TFNode* b = from->locals[i]; | |
| 1664 if (a != b) { | |
| 1665 TFNode* vals[] = {a, b}; | |
| 1666 to->locals[i] = builder_->Phi(local_type_vec_[i], 2, vals, merge); | |
| 1667 } | |
| 1668 } | |
| 1669 break; | |
| 1670 } | |
| 1671 case SsaEnv::kMerged: { | |
| 1672 if (!builder_) break; | |
| 1673 TFNode* merge = to->control; | |
| 1674 // Extend the existing merge. | |
| 1675 builder_->AppendToMerge(merge, from->control); | |
| 1676 // Merge effects. | |
| 1677 if (builder_->IsPhiWithMerge(to->effect, merge)) { | |
| 1678 builder_->AppendToPhi(to->effect, from->effect); | |
| 1679 } else if (to->effect != from->effect) { | |
| 1680 uint32_t count = builder_->InputCount(merge); | |
| 1681 TFNode** effects = builder_->Buffer(count); | |
| 1682 for (uint32_t j = 0; j < count - 1; j++) { | |
| 1683 effects[j] = to->effect; | |
| 1684 } | |
| 1685 effects[count - 1] = from->effect; | |
| 1686 to->effect = builder_->EffectPhi(count, effects, merge); | |
| 1687 } | |
| 1688 // Merge locals. | |
| 1689 for (int i = EnvironmentCount() - 1; i >= 0; i--) { | |
| 1690 TFNode* tnode = to->locals[i]; | |
| 1691 TFNode* fnode = from->locals[i]; | |
| 1692 if (builder_->IsPhiWithMerge(tnode, merge)) { | |
| 1693 builder_->AppendToPhi(tnode, fnode); | |
| 1694 } else if (tnode != fnode) { | |
| 1695 uint32_t count = builder_->InputCount(merge); | |
| 1696 TFNode** vals = builder_->Buffer(count); | |
| 1697 for (uint32_t j = 0; j < count - 1; j++) { | |
| 1698 vals[j] = tnode; | |
| 1699 } | |
| 1700 vals[count - 1] = fnode; | |
| 1701 to->locals[i] = | |
| 1702 builder_->Phi(local_type_vec_[i], count, vals, merge); | |
| 1703 } | |
| 1704 } | |
| 1705 break; | |
| 1706 } | |
| 1707 default: | |
| 1708 UNREACHABLE(); | |
| 1709 } | |
| 1710 return from->Kill(); | |
| 1711 } | |
| 1712 | |
| 1713 TFNode* CreateOrMergeIntoPhi(LocalType type, TFNode* merge, TFNode* tnode, | |
| 1714 TFNode* fnode) { | |
| 1715 DCHECK_NOT_NULL(builder_); | |
| 1716 if (builder_->IsPhiWithMerge(tnode, merge)) { | |
| 1717 builder_->AppendToPhi(tnode, fnode); | |
| 1718 } else if (tnode != fnode) { | |
| 1719 uint32_t count = builder_->InputCount(merge); | |
| 1720 TFNode** vals = builder_->Buffer(count); | |
| 1721 for (uint32_t j = 0; j < count - 1; j++) vals[j] = tnode; | |
| 1722 vals[count - 1] = fnode; | |
| 1723 return builder_->Phi(type, count, vals, merge); | |
| 1724 } | |
| 1725 return tnode; | |
| 1726 } | |
| 1727 | |
| 1728 SsaEnv* PrepareForLoop(const byte* pc, SsaEnv* env) { | |
| 1729 if (!builder_) return Split(env); | |
| 1730 if (!env->go()) return Split(env); | |
| 1731 env->state = SsaEnv::kMerged; | |
| 1732 | |
| 1733 env->control = builder_->Loop(env->control); | |
| 1734 env->effect = builder_->EffectPhi(1, &env->effect, env->control); | |
| 1735 builder_->Terminate(env->effect, env->control); | |
| 1736 if (FLAG_wasm_loop_assignment_analysis) { | |
| 1737 BitVector* assigned = AnalyzeLoopAssignment(pc); | |
| 1738 if (failed()) return env; | |
| 1739 if (assigned != nullptr) { | |
| 1740 // Only introduce phis for variables assigned in this loop. | |
| 1741 for (int i = EnvironmentCount() - 1; i >= 0; i--) { | |
| 1742 if (!assigned->Contains(i)) continue; | |
| 1743 env->locals[i] = builder_->Phi(local_type_vec_[i], 1, &env->locals[i], | |
| 1744 env->control); | |
| 1745 } | |
| 1746 SsaEnv* loop_body_env = Split(env); | |
| 1747 builder_->StackCheck(position(), &(loop_body_env->effect), | |
| 1748 &(loop_body_env->control)); | |
| 1749 return loop_body_env; | |
| 1750 } | |
| 1751 } | |
| 1752 | |
| 1753 // Conservatively introduce phis for all local variables. | |
| 1754 for (int i = EnvironmentCount() - 1; i >= 0; i--) { | |
| 1755 env->locals[i] = | |
| 1756 builder_->Phi(local_type_vec_[i], 1, &env->locals[i], env->control); | |
| 1757 } | |
| 1758 | |
| 1759 SsaEnv* loop_body_env = Split(env); | |
| 1760 builder_->StackCheck(position(), &(loop_body_env->effect), | |
| 1761 &(loop_body_env->control)); | |
| 1762 return loop_body_env; | |
| 1763 } | |
| 1764 | |
| 1765 // Create a complete copy of the {from}. | |
| 1766 SsaEnv* Split(SsaEnv* from) { | |
| 1767 DCHECK_NOT_NULL(from); | |
| 1768 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); | |
| 1769 size_t size = sizeof(TFNode*) * EnvironmentCount(); | |
| 1770 result->control = from->control; | |
| 1771 result->effect = from->effect; | |
| 1772 | |
| 1773 if (from->go()) { | |
| 1774 result->state = SsaEnv::kReached; | |
| 1775 result->locals = | |
| 1776 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; | |
| 1777 memcpy(result->locals, from->locals, size); | |
| 1778 } else { | |
| 1779 result->state = SsaEnv::kUnreachable; | |
| 1780 result->locals = nullptr; | |
| 1781 } | |
| 1782 | |
| 1783 return result; | |
| 1784 } | |
| 1785 | |
| 1786 // Create a copy of {from} that steals its state and leaves {from} | |
| 1787 // unreachable. | |
| 1788 SsaEnv* Steal(SsaEnv* from) { | |
| 1789 DCHECK_NOT_NULL(from); | |
| 1790 if (!from->go()) return UnreachableEnv(); | |
| 1791 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); | |
| 1792 result->state = SsaEnv::kReached; | |
| 1793 result->locals = from->locals; | |
| 1794 result->control = from->control; | |
| 1795 result->effect = from->effect; | |
| 1796 from->Kill(SsaEnv::kUnreachable); | |
| 1797 return result; | |
| 1798 } | |
| 1799 | |
| 1800 // Create an unreachable environment. | |
| 1801 SsaEnv* UnreachableEnv() { | |
| 1802 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); | |
| 1803 result->state = SsaEnv::kUnreachable; | |
| 1804 result->control = nullptr; | |
| 1805 result->effect = nullptr; | |
| 1806 result->locals = nullptr; | |
| 1807 return result; | |
| 1808 } | |
| 1809 | |
| 1810 int EnvironmentCount() { | |
| 1811 if (builder_) return static_cast<int>(local_type_vec_.size()); | |
| 1812 return 0; // if we aren't building a graph, don't bother with SSA renaming. | |
| 1813 } | |
| 1814 | |
| 1815 virtual void onFirstError() { | |
| 1816 limit_ = start_; // Terminate decoding loop. | |
| 1817 builder_ = nullptr; // Don't build any more nodes. | |
| 1818 TRACE(" !%s\n", error_msg_.get()); | |
| 1819 } | |
| 1820 BitVector* AnalyzeLoopAssignment(const byte* pc) { | |
| 1821 if (pc >= limit_) return nullptr; | |
| 1822 if (*pc != kExprLoop) return nullptr; | |
| 1823 | |
| 1824 BitVector* assigned = | |
| 1825 new (zone_) BitVector(static_cast<int>(local_type_vec_.size()), zone_); | |
| 1826 int depth = 0; | |
| 1827 // Iteratively process all AST nodes nested inside the loop. | |
| 1828 while (pc < limit_ && ok()) { | |
| 1829 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); | |
| 1830 unsigned length = 1; | |
| 1831 switch (opcode) { | |
| 1832 case kExprLoop: | |
| 1833 case kExprIf: | |
| 1834 case kExprBlock: | |
| 1835 case kExprTry: | |
| 1836 length = OpcodeLength(pc); | |
| 1837 depth++; | |
| 1838 break; | |
| 1839 case kExprSetLocal: // fallthru | |
| 1840 case kExprTeeLocal: { | |
| 1841 LocalIndexOperand operand(this, pc); | |
| 1842 if (assigned->length() > 0 && | |
| 1843 operand.index < static_cast<uint32_t>(assigned->length())) { | |
| 1844 // Unverified code might have an out-of-bounds index. | |
| 1845 assigned->Add(operand.index); | |
| 1846 } | |
| 1847 length = 1 + operand.length; | |
| 1848 break; | |
| 1849 } | |
| 1850 case kExprEnd: | |
| 1851 depth--; | |
| 1852 break; | |
| 1853 default: | |
| 1854 length = OpcodeLength(pc); | |
| 1855 break; | |
| 1856 } | |
| 1857 if (depth <= 0) break; | |
| 1858 pc += length; | |
| 1859 } | |
| 1860 return ok() ? assigned : nullptr; | |
| 1861 } | |
| 1862 | |
| 1863 inline wasm::WasmCodePosition position() { | |
| 1864 int offset = static_cast<int>(pc_ - start_); | |
| 1865 DCHECK_EQ(pc_ - start_, offset); // overflows cannot happen | |
| 1866 return offset; | |
| 1867 } | |
| 1868 | |
| 1869 inline void BuildSimpleOperator(WasmOpcode opcode, FunctionSig* sig) { | |
| 1870 TFNode* node; | |
| 1871 switch (sig->parameter_count()) { | |
| 1872 case 1: { | |
| 1873 Value val = Pop(0, sig->GetParam(0)); | |
| 1874 node = BUILD(Unop, opcode, val.node, position()); | |
| 1875 break; | |
| 1876 } | |
| 1877 case 2: { | |
| 1878 Value rval = Pop(1, sig->GetParam(1)); | |
| 1879 Value lval = Pop(0, sig->GetParam(0)); | |
| 1880 node = BUILD(Binop, opcode, lval.node, rval.node, position()); | |
| 1881 break; | |
| 1882 } | |
| 1883 default: | |
| 1884 UNREACHABLE(); | |
| 1885 node = nullptr; | |
| 1886 break; | |
| 1887 } | |
| 1888 Push(GetReturnType(sig), node); | |
| 1889 } | |
| 1890 }; | |
| 1891 | |
| 1892 bool DecodeLocalDecls(AstLocalDecls& decls, const byte* start, | |
| 1893 const byte* end) { | |
| 1894 AccountingAllocator allocator; | |
| 1895 Zone tmp(&allocator, ZONE_NAME); | |
| 1896 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; | |
| 1897 WasmFullDecoder decoder(&tmp, nullptr, body); | |
| 1898 return decoder.DecodeLocalDecls(decls); | |
| 1899 } | |
| 1900 | |
| 1901 BytecodeIterator::BytecodeIterator(const byte* start, const byte* end, | |
| 1902 AstLocalDecls* decls) | |
| 1903 : Decoder(start, end) { | |
| 1904 if (decls != nullptr) { | |
| 1905 if (DecodeLocalDecls(*decls, start, end)) { | |
| 1906 pc_ += decls->decls_encoded_size; | |
| 1907 if (pc_ > end_) pc_ = end_; | |
| 1908 } | |
| 1909 } | |
| 1910 } | |
| 1911 | |
| 1912 DecodeResult VerifyWasmCode(AccountingAllocator* allocator, | |
| 1913 FunctionBody& body) { | |
| 1914 Zone zone(allocator, ZONE_NAME); | |
| 1915 WasmFullDecoder decoder(&zone, nullptr, body); | |
| 1916 decoder.Decode(); | |
| 1917 return decoder.toResult<DecodeStruct*>(nullptr); | |
| 1918 } | |
| 1919 | |
| 1920 DecodeResult BuildTFGraph(AccountingAllocator* allocator, TFBuilder* builder, | |
| 1921 FunctionBody& body) { | |
| 1922 Zone zone(allocator, ZONE_NAME); | |
| 1923 WasmFullDecoder decoder(&zone, builder, body); | |
| 1924 decoder.Decode(); | |
| 1925 return decoder.toResult<DecodeStruct*>(nullptr); | |
| 1926 } | |
| 1927 | |
| 1928 unsigned OpcodeLength(const byte* pc, const byte* end) { | |
| 1929 WasmDecoder decoder(nullptr, nullptr, pc, end); | |
| 1930 return decoder.OpcodeLength(pc); | |
| 1931 } | |
| 1932 | |
| 1933 void PrintAstForDebugging(const byte* start, const byte* end) { | |
| 1934 AccountingAllocator allocator; | |
| 1935 OFStream os(stdout); | |
| 1936 PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr); | |
| 1937 } | |
| 1938 | |
| 1939 bool PrintAst(AccountingAllocator* allocator, const FunctionBody& body, | |
| 1940 std::ostream& os, | |
| 1941 std::vector<std::tuple<uint32_t, int, int>>* offset_table) { | |
| 1942 Zone zone(allocator, ZONE_NAME); | |
| 1943 WasmFullDecoder decoder(&zone, nullptr, body); | |
| 1944 int line_nr = 0; | |
| 1945 | |
| 1946 // Print the function signature. | |
| 1947 if (body.sig) { | |
| 1948 os << "// signature: " << *body.sig << std::endl; | |
| 1949 ++line_nr; | |
| 1950 } | |
| 1951 | |
| 1952 // Print the local declarations. | |
| 1953 AstLocalDecls decls(&zone); | |
| 1954 BytecodeIterator i(body.start, body.end, &decls); | |
| 1955 if (body.start != i.pc() && !FLAG_wasm_code_fuzzer_gen_test) { | |
| 1956 os << "// locals: "; | |
| 1957 for (auto p : decls.local_types) { | |
| 1958 LocalType type = p.first; | |
| 1959 uint32_t count = p.second; | |
| 1960 os << " " << count << " " << WasmOpcodes::TypeName(type); | |
| 1961 } | |
| 1962 os << std::endl; | |
| 1963 ++line_nr; | |
| 1964 | |
| 1965 for (const byte* locals = body.start; locals < i.pc(); locals++) { | |
| 1966 os << (locals == body.start ? "0x" : " 0x") << AsHex(*locals, 2) << ","; | |
| 1967 } | |
| 1968 os << std::endl; | |
| 1969 ++line_nr; | |
| 1970 } | |
| 1971 | |
| 1972 os << "// body: " << std::endl; | |
| 1973 ++line_nr; | |
| 1974 unsigned control_depth = 0; | |
| 1975 for (; i.has_next(); i.next()) { | |
| 1976 unsigned length = decoder.OpcodeLength(i.pc()); | |
| 1977 | |
| 1978 WasmOpcode opcode = i.current(); | |
| 1979 if (opcode == kExprElse) control_depth--; | |
| 1980 | |
| 1981 int num_whitespaces = control_depth < 32 ? 2 * control_depth : 64; | |
| 1982 if (offset_table) { | |
| 1983 offset_table->push_back( | |
| 1984 std::make_tuple(i.pc_offset(), line_nr, num_whitespaces)); | |
| 1985 } | |
| 1986 | |
| 1987 // 64 whitespaces | |
| 1988 const char* padding = | |
| 1989 " "; | |
| 1990 os.write(padding, num_whitespaces); | |
| 1991 os << "k" << WasmOpcodes::OpcodeName(opcode) << ","; | |
| 1992 | |
| 1993 for (size_t j = 1; j < length; ++j) { | |
| 1994 os << " 0x" << AsHex(i.pc()[j], 2) << ","; | |
| 1995 } | |
| 1996 | |
| 1997 switch (opcode) { | |
| 1998 case kExprElse: | |
| 1999 os << " // @" << i.pc_offset(); | |
| 2000 control_depth++; | |
| 2001 break; | |
| 2002 case kExprLoop: | |
| 2003 case kExprIf: | |
| 2004 case kExprBlock: | |
| 2005 case kExprTry: { | |
| 2006 BlockTypeOperand operand(&i, i.pc()); | |
| 2007 os << " // @" << i.pc_offset(); | |
| 2008 for (unsigned i = 0; i < operand.arity; i++) { | |
| 2009 os << " " << WasmOpcodes::TypeName(operand.read_entry(i)); | |
| 2010 } | |
| 2011 control_depth++; | |
| 2012 break; | |
| 2013 } | |
| 2014 case kExprEnd: | |
| 2015 os << " // @" << i.pc_offset(); | |
| 2016 control_depth--; | |
| 2017 break; | |
| 2018 case kExprBr: { | |
| 2019 BreakDepthOperand operand(&i, i.pc()); | |
| 2020 os << " // depth=" << operand.depth; | |
| 2021 break; | |
| 2022 } | |
| 2023 case kExprBrIf: { | |
| 2024 BreakDepthOperand operand(&i, i.pc()); | |
| 2025 os << " // depth=" << operand.depth; | |
| 2026 break; | |
| 2027 } | |
| 2028 case kExprBrTable: { | |
| 2029 BranchTableOperand operand(&i, i.pc()); | |
| 2030 os << " // entries=" << operand.table_count; | |
| 2031 break; | |
| 2032 } | |
| 2033 case kExprCallIndirect: { | |
| 2034 CallIndirectOperand operand(&i, i.pc()); | |
| 2035 os << " // sig #" << operand.index; | |
| 2036 if (decoder.Complete(i.pc(), operand)) { | |
| 2037 os << ": " << *operand.sig; | |
| 2038 } | |
| 2039 break; | |
| 2040 } | |
| 2041 case kExprCallFunction: { | |
| 2042 CallFunctionOperand operand(&i, i.pc()); | |
| 2043 os << " // function #" << operand.index; | |
| 2044 if (decoder.Complete(i.pc(), operand)) { | |
| 2045 os << ": " << *operand.sig; | |
| 2046 } | |
| 2047 break; | |
| 2048 } | |
| 2049 default: | |
| 2050 break; | |
| 2051 } | |
| 2052 os << std::endl; | |
| 2053 ++line_nr; | |
| 2054 } | |
| 2055 | |
| 2056 return decoder.ok(); | |
| 2057 } | |
| 2058 | |
| 2059 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, | |
| 2060 const byte* start, const byte* end) { | |
| 2061 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; | |
| 2062 WasmFullDecoder decoder(zone, nullptr, body); | |
| 2063 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); | |
| 2064 } | |
| 2065 | |
| 2066 } // namespace wasm | |
| 2067 } // namespace internal | |
| 2068 } // namespace v8 | |
| OLD | NEW |