Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/signature.h" | 5 #include "src/signature.h" |
| 6 | 6 |
| 7 #include "src/bit-vector.h" | 7 #include "src/bit-vector.h" |
| 8 #include "src/flags.h" | 8 #include "src/flags.h" |
| 9 #include "src/handles.h" | 9 #include "src/handles.h" |
| 10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 29 } while (false) | 29 } while (false) |
| 30 #else | 30 #else |
| 31 #define TRACE(...) | 31 #define TRACE(...) |
| 32 #endif | 32 #endif |
| 33 | 33 |
| 34 #define CHECK_PROTOTYPE_OPCODE(flag) \ | 34 #define CHECK_PROTOTYPE_OPCODE(flag) \ |
| 35 if (!FLAG_##flag) { \ | 35 if (!FLAG_##flag) { \ |
| 36 error("Invalid opcode (enable with --" #flag ")"); \ | 36 error("Invalid opcode (enable with --" #flag ")"); \ |
| 37 break; \ | 37 break; \ |
| 38 } | 38 } |
| 39 // TODO(titzer): this is only for intermediate migration. | |
| 40 #define IMPLICIT_FUNCTION_END 1 | |
| 39 | 41 |
| 40 // An SsaEnv environment carries the current local variable renaming | 42 // An SsaEnv environment carries the current local variable renaming |
| 41 // as well as the current effect and control dependency in the TF graph. | 43 // as well as the current effect and control dependency in the TF graph. |
| 42 // It maintains a control state that tracks whether the environment | 44 // It maintains a control state that tracks whether the environment |
| 43 // is reachable, has reached a control end, or has been merged. | 45 // is reachable, has reached a control end, or has been merged. |
| 44 struct SsaEnv { | 46 struct SsaEnv { |
| 45 enum State { kControlEnd, kUnreachable, kReached, kMerged }; | 47 enum State { kControlEnd, kUnreachable, kReached, kMerged }; |
| 46 | 48 |
| 47 State state; | 49 State state; |
| 48 TFNode* control; | 50 TFNode* control; |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 63 | 65 |
| 64 // An entry on the value stack. | 66 // An entry on the value stack. |
| 65 struct Value { | 67 struct Value { |
| 66 const byte* pc; | 68 const byte* pc; |
| 67 TFNode* node; | 69 TFNode* node; |
| 68 LocalType type; | 70 LocalType type; |
| 69 }; | 71 }; |
| 70 | 72 |
| 71 struct Control; | 73 struct Control; |
| 72 | 74 |
| 73 // An entry on the control stack (i.e. if, block, loop, try). | 75 struct MergeValues { |
| 76 uint32_t arity; | |
| 77 union { | |
| 78 Value* array; | |
| 79 Value first; | |
| 80 } vals; // Either multiple values or a single value. | |
| 81 | |
| 82 Value& first() { | |
| 83 DCHECK_GT(arity, 0u); | |
| 84 return arity == 1 ? vals.first : vals.array[0]; | |
| 85 } | |
| 86 }; | |
| 87 | |
| 88 // IncomingBranch is used by exception handling code for managing finally's. | |
| 89 struct IncomingBranch { | |
| 90 int32_t token_value; | |
| 91 Control* target; | |
| 92 MergeValues merge; | |
| 93 }; | |
| 94 | |
| 95 static Value* NO_VALUE = nullptr; | |
| 96 | |
| 97 enum ControlKind { kControlIf, kControlBlock, kControlLoop, kControlTry }; | |
| 98 | |
| 99 // An entry on the control stack (i.e. if, block, loop). | |
| 74 struct Control { | 100 struct Control { |
| 75 const byte* pc; | 101 const byte* pc; |
| 102 ControlKind kind; | |
| 76 int stack_depth; // stack height at the beginning of the construct. | 103 int stack_depth; // stack height at the beginning of the construct. |
| 77 SsaEnv* end_env; // end environment for the construct. | 104 SsaEnv* end_env; // end environment for the construct. |
| 78 SsaEnv* false_env; // false environment (only for if). | 105 SsaEnv* false_env; // false environment (only for if). |
| 79 SsaEnv* catch_env; // catch environment (only for try). | 106 SsaEnv* catch_env; // catch environment (only for try). |
| 80 TFNode* node; // result node for the construct. | |
| 81 LocalType type; // result type for the construct. | |
| 82 bool is_loop; // true if this is the inner label of a loop. | |
| 83 | 107 |
| 84 bool is_if() const { return *pc == kExprIf; } | 108 // Values merged into the end of this control construct. |
| 109 MergeValues merge; | |
| 85 | 110 |
| 86 bool is_try() const { return *pc == kExprTry; } | 111 inline bool is_if() { return kind == kControlIf; } |
| 112 inline bool is_block() { return kind == kControlBlock; } | |
| 113 inline bool is_loop() { return kind == kControlLoop; } | |
| 114 inline bool is_try() const { return kind == kControlTry; } | |
| 87 | 115 |
| 88 // Named constructors. | 116 // Named constructors. |
| 89 static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env) { | 117 static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env) { |
| 90 return {pc, stack_depth, end_env, nullptr, | 118 return {pc, kControlBlock, stack_depth, end_env, |
| 91 nullptr, nullptr, kAstEnd, false}; | 119 nullptr, nullptr, {0, {NO_VALUE}}}; |
| 92 } | 120 } |
| 93 | 121 |
| 94 static Control If(const byte* pc, int stack_depth, SsaEnv* end_env, | 122 static Control If(const byte* pc, int stack_depth, SsaEnv* end_env, |
| 95 SsaEnv* false_env) { | 123 SsaEnv* false_env) { |
| 96 return {pc, stack_depth, end_env, false_env, | 124 return {pc, kControlIf, stack_depth, end_env, |
| 97 nullptr, nullptr, kAstStmt, false}; | 125 false_env, nullptr, {0, {NO_VALUE}}}; |
| 98 } | 126 } |
| 99 | 127 |
| 100 static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env) { | 128 static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env) { |
| 101 return {pc, stack_depth, end_env, nullptr, nullptr, nullptr, kAstEnd, true}; | 129 return {pc, kControlLoop, stack_depth, end_env, |
| 130 nullptr, nullptr, {0, {NO_VALUE}}}; | |
| 102 } | 131 } |
| 103 | 132 |
| 104 static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env, | 133 static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env, |
| 105 SsaEnv* catch_env) { | 134 SsaEnv* catch_env) { |
| 106 return {pc, stack_depth, end_env, nullptr, | 135 return {pc, kControlTry, stack_depth, end_env, |
| 107 catch_env, nullptr, kAstEnd, false}; | 136 nullptr, catch_env, {0, {NO_VALUE}}}; |
| 108 } | 137 } |
| 109 }; | 138 }; |
| 110 | 139 |
| 111 // Macros that build nodes only if there is a graph and the current SSA | 140 // Macros that build nodes only if there is a graph and the current SSA |
| 112 // environment is reachable from start. This avoids problems with malformed | 141 // environment is reachable from start. This avoids problems with malformed |
| 113 // TF graphs when decoding inputs that have unreachable code. | 142 // TF graphs when decoding inputs that have unreachable code. |
| 114 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) | 143 #define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr) |
| 115 #define BUILD0(func) (build() ? builder_->func() : nullptr) | 144 #define BUILD0(func) (build() ? builder_->func() : nullptr) |
| 116 | 145 |
| 117 // Generic Wasm bytecode decoder with utilities for decoding operands, | 146 // Generic Wasm bytecode decoder with utilities for decoding operands, |
| (...skipping 14 matching lines...) Expand all Loading... | |
| 132 | 161 |
| 133 inline bool Validate(const byte* pc, LocalIndexOperand& operand) { | 162 inline bool Validate(const byte* pc, LocalIndexOperand& operand) { |
| 134 if (operand.index < total_locals_) { | 163 if (operand.index < total_locals_) { |
| 135 if (local_types_) { | 164 if (local_types_) { |
| 136 operand.type = local_types_->at(operand.index); | 165 operand.type = local_types_->at(operand.index); |
| 137 } else { | 166 } else { |
| 138 operand.type = kAstStmt; | 167 operand.type = kAstStmt; |
| 139 } | 168 } |
| 140 return true; | 169 return true; |
| 141 } | 170 } |
| 142 error(pc, pc + 1, "invalid local index"); | 171 error(pc, pc + 1, "invalid local index: %u", operand.index); |
| 143 return false; | 172 return false; |
| 144 } | 173 } |
| 145 | 174 |
| 146 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) { | 175 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) { |
| 147 ModuleEnv* m = module_; | 176 ModuleEnv* m = module_; |
| 148 if (m && m->module && operand.index < m->module->globals.size()) { | 177 if (m && m->module && operand.index < m->module->globals.size()) { |
| 149 operand.type = m->module->globals[operand.index].type; | 178 operand.global = &m->module->globals[operand.index]; |
| 179 operand.type = operand.global->type; | |
| 150 return true; | 180 return true; |
| 151 } | 181 } |
| 152 error(pc, pc + 1, "invalid global index"); | 182 error(pc, pc + 1, "invalid global index: %u", operand.index); |
| 153 return false; | 183 return false; |
| 154 } | 184 } |
| 155 | 185 |
| 156 inline bool Complete(const byte* pc, CallFunctionOperand& operand) { | 186 inline bool Complete(const byte* pc, CallFunctionOperand& operand) { |
| 157 ModuleEnv* m = module_; | 187 ModuleEnv* m = module_; |
| 158 if (m && m->module && operand.index < m->module->functions.size()) { | 188 if (m && m->module && operand.index < m->module->functions.size()) { |
| 159 operand.sig = m->module->functions[operand.index].sig; | 189 operand.sig = m->module->functions[operand.index].sig; |
| 160 return true; | 190 return true; |
| 161 } | 191 } |
| 162 return false; | 192 return false; |
| 163 } | 193 } |
| 164 | 194 |
| 165 inline bool Validate(const byte* pc, CallFunctionOperand& operand) { | 195 inline bool Validate(const byte* pc, CallFunctionOperand& operand) { |
| 166 if (Complete(pc, operand)) { | 196 if (Complete(pc, operand)) { |
| 167 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count()); | |
| 168 if (operand.arity != expected) { | |
| 169 error(pc, pc + 1, | |
| 170 "arity mismatch in direct function call (expected %u, got %u)", | |
| 171 expected, operand.arity); | |
| 172 return false; | |
| 173 } | |
| 174 return true; | 197 return true; |
| 175 } | 198 } |
| 176 error(pc, pc + 1, "invalid function index"); | 199 error(pc, pc + 1, "invalid function index: %u", operand.index); |
| 177 return false; | 200 return false; |
| 178 } | 201 } |
| 179 | 202 |
| 180 inline bool Complete(const byte* pc, CallIndirectOperand& operand) { | 203 inline bool Complete(const byte* pc, CallIndirectOperand& operand) { |
| 181 ModuleEnv* m = module_; | 204 ModuleEnv* m = module_; |
| 182 if (m && m->module && operand.index < m->module->signatures.size()) { | 205 if (m && m->module && operand.index < m->module->signatures.size()) { |
| 183 operand.sig = m->module->signatures[operand.index]; | 206 operand.sig = m->module->signatures[operand.index]; |
| 184 return true; | 207 return true; |
| 185 } | 208 } |
| 186 return false; | 209 return false; |
| 187 } | 210 } |
| 188 | 211 |
| 189 inline bool Validate(const byte* pc, CallIndirectOperand& operand) { | 212 inline bool Validate(const byte* pc, CallIndirectOperand& operand) { |
| 190 if (Complete(pc, operand)) { | 213 if (Complete(pc, operand)) { |
| 191 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count()); | |
| 192 if (operand.arity != expected) { | |
| 193 error(pc, pc + 1, | |
| 194 "arity mismatch in indirect function call (expected %u, got %u)", | |
| 195 expected, operand.arity); | |
| 196 return false; | |
| 197 } | |
| 198 return true; | 214 return true; |
| 199 } | 215 } |
| 200 error(pc, pc + 1, "invalid signature index"); | 216 error(pc, pc + 1, "invalid signature index: #%u", operand.index); |
| 201 return false; | |
| 202 } | |
| 203 | |
| 204 inline bool Complete(const byte* pc, CallImportOperand& operand) { | |
| 205 ModuleEnv* m = module_; | |
| 206 if (m && m->module && operand.index < m->module->import_table.size()) { | |
| 207 operand.sig = m->module->import_table[operand.index].sig; | |
| 208 return true; | |
| 209 } | |
| 210 return false; | |
| 211 } | |
| 212 | |
| 213 inline bool Validate(const byte* pc, CallImportOperand& operand) { | |
| 214 if (Complete(pc, operand)) { | |
| 215 uint32_t expected = static_cast<uint32_t>(operand.sig->parameter_count()); | |
| 216 if (operand.arity != expected) { | |
| 217 error(pc, pc + 1, "arity mismatch in import call (expected %u, got %u)", | |
| 218 expected, operand.arity); | |
| 219 return false; | |
| 220 } | |
| 221 return true; | |
| 222 } | |
| 223 error(pc, pc + 1, "invalid signature index"); | |
| 224 return false; | 217 return false; |
| 225 } | 218 } |
| 226 | 219 |
| 227 inline bool Validate(const byte* pc, BreakDepthOperand& operand, | 220 inline bool Validate(const byte* pc, BreakDepthOperand& operand, |
| 228 ZoneVector<Control>& control) { | 221 ZoneVector<Control>& control) { |
| 229 if (operand.arity > 1) { | |
| 230 error(pc, pc + 1, "invalid arity for br or br_if"); | |
| 231 return false; | |
| 232 } | |
| 233 if (operand.depth < control.size()) { | 222 if (operand.depth < control.size()) { |
| 234 operand.target = &control[control.size() - operand.depth - 1]; | 223 operand.target = &control[control.size() - operand.depth - 1]; |
| 235 return true; | 224 return true; |
| 236 } | 225 } |
| 237 error(pc, pc + 1, "invalid break depth"); | 226 error(pc, pc + 1, "invalid break depth: %u", operand.depth); |
| 238 return false; | 227 return false; |
| 239 } | 228 } |
| 240 | 229 |
| 241 bool Validate(const byte* pc, BranchTableOperand& operand, | 230 bool Validate(const byte* pc, BranchTableOperand& operand, |
| 242 size_t block_depth) { | 231 size_t block_depth) { |
| 243 if (operand.arity > 1) { | |
| 244 error(pc, pc + 1, "invalid arity for break"); | |
| 245 return false; | |
| 246 } | |
| 247 // Verify table. | 232 // Verify table. |
| 248 for (uint32_t i = 0; i < operand.table_count + 1; ++i) { | 233 for (uint32_t i = 0; i < operand.table_count + 1; ++i) { |
| 249 uint32_t target = operand.read_entry(this, i); | 234 uint32_t target = operand.read_entry(this, i); |
| 250 if (target >= block_depth) { | 235 if (target >= block_depth) { |
| 251 error(operand.table + i * 2, "improper branch in br_table"); | 236 error(operand.table + i * 2, "improper branch in br_table"); |
| 252 return false; | 237 return false; |
| 253 } | 238 } |
| 254 } | 239 } |
| 255 return true; | 240 return true; |
| 256 } | 241 } |
| 257 | 242 |
| 258 unsigned OpcodeArity(const byte* pc) { | |
| 259 #define DECLARE_ARITY(name, ...) \ | |
| 260 static const LocalType kTypes_##name[] = {__VA_ARGS__}; \ | |
| 261 static const int kArity_##name = \ | |
| 262 static_cast<int>(arraysize(kTypes_##name) - 1); | |
| 263 | |
| 264 FOREACH_SIGNATURE(DECLARE_ARITY); | |
| 265 #undef DECLARE_ARITY | |
| 266 | |
| 267 switch (static_cast<WasmOpcode>(*pc)) { | |
| 268 case kExprI8Const: | |
| 269 case kExprI32Const: | |
| 270 case kExprI64Const: | |
| 271 case kExprF64Const: | |
| 272 case kExprF32Const: | |
| 273 case kExprGetLocal: | |
| 274 case kExprGetGlobal: | |
| 275 case kExprNop: | |
| 276 case kExprUnreachable: | |
| 277 case kExprEnd: | |
| 278 case kExprBlock: | |
| 279 case kExprThrow: | |
| 280 case kExprTry: | |
| 281 case kExprLoop: | |
| 282 return 0; | |
| 283 | |
| 284 case kExprSetGlobal: | |
| 285 case kExprSetLocal: | |
| 286 case kExprElse: | |
| 287 case kExprCatch: | |
| 288 return 1; | |
| 289 | |
| 290 case kExprBr: { | |
| 291 BreakDepthOperand operand(this, pc); | |
| 292 return operand.arity; | |
| 293 } | |
| 294 case kExprBrIf: { | |
| 295 BreakDepthOperand operand(this, pc); | |
| 296 return 1 + operand.arity; | |
| 297 } | |
| 298 case kExprBrTable: { | |
| 299 BranchTableOperand operand(this, pc); | |
| 300 return 1 + operand.arity; | |
| 301 } | |
| 302 | |
| 303 case kExprIf: | |
| 304 return 1; | |
| 305 case kExprSelect: | |
| 306 return 3; | |
| 307 | |
| 308 case kExprCallFunction: { | |
| 309 CallFunctionOperand operand(this, pc); | |
| 310 return operand.arity; | |
| 311 } | |
| 312 case kExprCallIndirect: { | |
| 313 CallIndirectOperand operand(this, pc); | |
| 314 return 1 + operand.arity; | |
| 315 } | |
| 316 case kExprCallImport: { | |
| 317 CallImportOperand operand(this, pc); | |
| 318 return operand.arity; | |
| 319 } | |
| 320 case kExprReturn: { | |
| 321 ReturnArityOperand operand(this, pc); | |
| 322 return operand.arity; | |
| 323 } | |
| 324 | |
| 325 #define DECLARE_OPCODE_CASE(name, opcode, sig) \ | |
| 326 case kExpr##name: \ | |
| 327 return kArity_##sig; | |
| 328 | |
| 329 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
| 330 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
| 331 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
| 332 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE) | |
| 333 FOREACH_SIMPLE_MEM_OPCODE(DECLARE_OPCODE_CASE) | |
| 334 FOREACH_ASMJS_COMPAT_OPCODE(DECLARE_OPCODE_CASE) | |
| 335 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE) | |
| 336 #undef DECLARE_OPCODE_CASE | |
| 337 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: | |
| 338 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE) | |
| 339 #undef DECLARE_OPCODE_CASE | |
| 340 return 1; | |
| 341 default: | |
| 342 UNREACHABLE(); | |
| 343 return 0; | |
| 344 } | |
| 345 } | |
| 346 | |
| 347 unsigned OpcodeLength(const byte* pc) { | 243 unsigned OpcodeLength(const byte* pc) { |
| 348 switch (static_cast<WasmOpcode>(*pc)) { | 244 switch (static_cast<WasmOpcode>(*pc)) { |
| 349 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: | 245 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: |
| 350 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) | 246 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 351 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) | 247 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE) |
| 352 #undef DECLARE_OPCODE_CASE | 248 #undef DECLARE_OPCODE_CASE |
| 353 { | 249 { |
| 354 MemoryAccessOperand operand(this, pc, UINT32_MAX); | 250 MemoryAccessOperand operand(this, pc, UINT32_MAX); |
| 355 return 1 + operand.length; | 251 return 1 + operand.length; |
| 356 } | 252 } |
| 357 case kExprBr: | 253 case kExprBr: |
| 358 case kExprBrIf: { | 254 case kExprBrIf: { |
| 359 BreakDepthOperand operand(this, pc); | 255 BreakDepthOperand operand(this, pc); |
| 360 return 1 + operand.length; | 256 return 1 + operand.length; |
| 361 } | 257 } |
| 362 case kExprSetGlobal: | 258 case kExprSetGlobal: |
| 363 case kExprGetGlobal: { | 259 case kExprGetGlobal: { |
| 364 GlobalIndexOperand operand(this, pc); | 260 GlobalIndexOperand operand(this, pc); |
| 365 return 1 + operand.length; | 261 return 1 + operand.length; |
| 366 } | 262 } |
| 367 | 263 |
| 368 case kExprCallFunction: { | 264 case kExprCallFunction: { |
| 369 CallFunctionOperand operand(this, pc); | 265 CallFunctionOperand operand(this, pc); |
| 370 return 1 + operand.length; | 266 return 1 + operand.length; |
| 371 } | 267 } |
| 372 case kExprCallIndirect: { | 268 case kExprCallIndirect: { |
| 373 CallIndirectOperand operand(this, pc); | 269 CallIndirectOperand operand(this, pc); |
| 374 return 1 + operand.length; | 270 return 1 + operand.length; |
| 375 } | 271 } |
| 376 case kExprCallImport: { | 272 |
| 377 CallImportOperand operand(this, pc); | 273 case kExprTry: |
| 274 case kExprIf: // fall thru | |
| 275 case kExprLoop: | |
| 276 case kExprBlock: { | |
| 277 BlockTypeOperand operand(this, pc); | |
| 378 return 1 + operand.length; | 278 return 1 + operand.length; |
| 379 } | 279 } |
| 380 | 280 |
| 381 case kExprSetLocal: | 281 case kExprSetLocal: |
| 282 case kExprTeeLocal: | |
| 382 case kExprGetLocal: | 283 case kExprGetLocal: |
| 383 case kExprCatch: { | 284 case kExprCatch: { |
| 384 LocalIndexOperand operand(this, pc); | 285 LocalIndexOperand operand(this, pc); |
| 385 return 1 + operand.length; | 286 return 1 + operand.length; |
| 386 } | 287 } |
| 387 case kExprBrTable: { | 288 case kExprBrTable: { |
| 388 BranchTableOperand operand(this, pc); | 289 BranchTableOperand operand(this, pc); |
| 389 return 1 + operand.length; | 290 return 1 + operand.length; |
| 390 } | 291 } |
| 391 case kExprI32Const: { | 292 case kExprI32Const: { |
| 392 ImmI32Operand operand(this, pc); | 293 ImmI32Operand operand(this, pc); |
| 393 return 1 + operand.length; | 294 return 1 + operand.length; |
| 394 } | 295 } |
| 395 case kExprI64Const: { | 296 case kExprI64Const: { |
| 396 ImmI64Operand operand(this, pc); | 297 ImmI64Operand operand(this, pc); |
| 397 return 1 + operand.length; | 298 return 1 + operand.length; |
| 398 } | 299 } |
| 399 case kExprI8Const: | 300 case kExprI8Const: |
| 400 return 2; | 301 return 2; |
| 401 case kExprF32Const: | 302 case kExprF32Const: |
| 402 return 5; | 303 return 5; |
| 403 case kExprF64Const: | 304 case kExprF64Const: |
| 404 return 9; | 305 return 9; |
| 405 case kExprReturn: { | |
| 406 ReturnArityOperand operand(this, pc); | |
| 407 return 1 + operand.length; | |
| 408 } | |
| 409 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name: | |
| 410 FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE) { return 2; } | |
| 411 FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE) { return 3; } | |
| 412 #undef DECLARE_OPCODE_CASE | |
| 413 default: | 306 default: |
| 414 return 1; | 307 return 1; |
| 415 } | 308 } |
| 416 } | 309 } |
| 417 }; | 310 }; |
| 418 | 311 |
| 419 // The full WASM decoder for bytecode. Both verifies bytecode and generates | 312 // The full WASM decoder for bytecode. Both verifies bytecode and generates |
| 420 // a TurboFan IR graph. | 313 // a TurboFan IR graph. |
| 421 class WasmFullDecoder : public WasmDecoder { | 314 class WasmFullDecoder : public WasmDecoder { |
| 422 public: | 315 public: |
| 423 WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body) | 316 WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body) |
| 424 : WasmDecoder(body.module, body.sig, body.start, body.end), | 317 : WasmDecoder(body.module, body.sig, body.start, body.end), |
| 425 zone_(zone), | 318 zone_(zone), |
| 426 builder_(builder), | 319 builder_(builder), |
| 427 base_(body.base), | 320 base_(body.base), |
| 428 local_type_vec_(zone), | 321 local_type_vec_(zone), |
| 429 stack_(zone), | 322 stack_(zone), |
| 430 control_(zone) { | 323 control_(zone), |
| 324 last_end_found_(false) { | |
| 431 local_types_ = &local_type_vec_; | 325 local_types_ = &local_type_vec_; |
| 432 } | 326 } |
| 433 | 327 |
| 434 bool Decode() { | 328 bool Decode() { |
| 435 base::ElapsedTimer decode_timer; | 329 base::ElapsedTimer decode_timer; |
| 436 if (FLAG_trace_wasm_decode_time) { | 330 if (FLAG_trace_wasm_decode_time) { |
| 437 decode_timer.Start(); | 331 decode_timer.Start(); |
| 438 } | 332 } |
| 439 stack_.clear(); | 333 stack_.clear(); |
| 440 control_.clear(); | 334 control_.clear(); |
| 441 | 335 |
| 442 if (end_ < pc_) { | 336 if (end_ < pc_) { |
| 443 error(pc_, "function body end < start"); | 337 error(pc_, "function body end < start"); |
| 444 return false; | 338 return false; |
| 445 } | 339 } |
| 446 | 340 |
| 447 DecodeLocalDecls(); | 341 DecodeLocalDecls(); |
| 448 InitSsaEnv(); | 342 InitSsaEnv(); |
| 449 DecodeFunctionBody(); | 343 DecodeFunctionBody(); |
| 450 | 344 |
| 451 if (failed()) return TraceFailed(); | 345 if (failed()) return TraceFailed(); |
| 452 | 346 |
| 347 #if IMPLICIT_FUNCTION_END | |
| 348 // With implicit end support (old style), the function block | |
| 349 // remains on the stack. Other control blocks are an error. | |
| 350 if (control_.size() > 1) { | |
| 351 error(pc_, control_.back().pc, "unterminated control structure"); | |
| 352 return TraceFailed(); | |
| 353 } | |
| 354 | |
| 355 // Assume an implicit end to the function body block. | |
| 356 if (control_.size() == 1) { | |
| 357 Control* c = &control_.back(); | |
| 358 if (ssa_env_->go()) { | |
| 359 FallThruTo(c); | |
| 360 } | |
| 361 | |
| 362 if (c->end_env->go()) { | |
| 363 // Push the end values onto the stack. | |
| 364 stack_.resize(c->stack_depth); | |
| 365 if (c->merge.arity == 1) { | |
| 366 stack_.push_back(c->merge.vals.first); | |
| 367 } else { | |
| 368 for (unsigned i = 0; i < c->merge.arity; i++) { | |
|
ahaas
2016/09/16 11:21:30
is merge.arity really of type unsigned and not of
titzer
2016/09/16 12:13:21
Good question. The field is of type uint32_t, but
| |
| 369 stack_.push_back(c->merge.vals.array[i]); | |
| 370 } | |
| 371 } | |
| 372 | |
| 373 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn"); | |
| 374 SetEnv("function:end", c->end_env); | |
| 375 DoReturn(); | |
| 376 TRACE("\n"); | |
| 377 } | |
| 378 } | |
| 379 #else | |
| 453 if (!control_.empty()) { | 380 if (!control_.empty()) { |
| 454 error(pc_, control_.back().pc, "unterminated control structure"); | 381 error(pc_, control_.back().pc, "unterminated control structure"); |
| 455 return TraceFailed(); | 382 return TraceFailed(); |
| 456 } | 383 } |
| 457 | 384 |
| 458 if (ssa_env_->go()) { | 385 if (!last_end_found_) { |
| 459 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn"); | 386 error(pc_, pc_, "function body must end with \"end\" opcode."); |
| 460 DoReturn(); | 387 return false; |
| 461 if (failed()) return TraceFailed(); | |
| 462 TRACE("\n"); | |
| 463 } | 388 } |
| 389 #endif | |
| 464 | 390 |
| 465 if (FLAG_trace_wasm_decode_time) { | 391 if (FLAG_trace_wasm_decode_time) { |
| 466 double ms = decode_timer.Elapsed().InMillisecondsF(); | 392 double ms = decode_timer.Elapsed().InMillisecondsF(); |
| 467 PrintF("wasm-decode ok (%0.3f ms)\n\n", ms); | 393 PrintF("wasm-decode %s (%0.3f ms)\n\n", ok() ? "ok" : "failed", ms); |
| 468 } else { | 394 } else { |
| 469 TRACE("wasm-decode ok\n\n"); | 395 TRACE("wasm-decode %s\n\n", ok() ? "ok" : "failed"); |
| 470 } | 396 } |
| 471 | 397 |
| 472 return true; | 398 return true; |
| 473 } | 399 } |
| 474 | 400 |
| 475 bool TraceFailed() { | 401 bool TraceFailed() { |
| 476 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_), | 402 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_), |
| 477 startrel(error_pc_), error_msg_.get()); | 403 startrel(error_pc_), error_msg_.get()); |
| 478 return false; | 404 return false; |
| 479 } | 405 } |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 512 | 438 |
| 513 Zone* zone_; | 439 Zone* zone_; |
| 514 TFBuilder* builder_; | 440 TFBuilder* builder_; |
| 515 const byte* base_; | 441 const byte* base_; |
| 516 | 442 |
| 517 SsaEnv* ssa_env_; | 443 SsaEnv* ssa_env_; |
| 518 | 444 |
| 519 ZoneVector<LocalType> local_type_vec_; // types of local variables. | 445 ZoneVector<LocalType> local_type_vec_; // types of local variables. |
| 520 ZoneVector<Value> stack_; // stack of values. | 446 ZoneVector<Value> stack_; // stack of values. |
| 521 ZoneVector<Control> control_; // stack of blocks, loops, and ifs. | 447 ZoneVector<Control> control_; // stack of blocks, loops, and ifs. |
| 448 bool last_end_found_; | |
| 522 | 449 |
| 523 inline bool build() { return builder_ && ssa_env_->go(); } | 450 inline bool build() { return builder_ && ssa_env_->go(); } |
| 524 | 451 |
| 525 void InitSsaEnv() { | 452 void InitSsaEnv() { |
| 526 TFNode* start = nullptr; | 453 TFNode* start = nullptr; |
| 527 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); | 454 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv))); |
| 528 size_t size = sizeof(TFNode*) * EnvironmentCount(); | 455 size_t size = sizeof(TFNode*) * EnvironmentCount(); |
| 529 ssa_env->state = SsaEnv::kReached; | 456 ssa_env->state = SsaEnv::kReached; |
| 530 ssa_env->locals = | 457 ssa_env->locals = |
| 531 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; | 458 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr; |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 632 total_locals_ = local_type_vec_.size(); | 559 total_locals_ = local_type_vec_.size(); |
| 633 } | 560 } |
| 634 | 561 |
| 635 // Decodes the body of a function. | 562 // Decodes the body of a function. |
| 636 void DecodeFunctionBody() { | 563 void DecodeFunctionBody() { |
| 637 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n", | 564 TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n", |
| 638 reinterpret_cast<const void*>(start_), | 565 reinterpret_cast<const void*>(start_), |
| 639 reinterpret_cast<const void*>(limit_), baserel(pc_), | 566 reinterpret_cast<const void*>(limit_), baserel(pc_), |
| 640 static_cast<int>(limit_ - start_), builder_ ? "graph building" : ""); | 567 static_cast<int>(limit_ - start_), builder_ ? "graph building" : ""); |
| 641 | 568 |
| 569 { | |
| 570 // Set up initial function block. | |
| 571 SsaEnv* break_env = ssa_env_; | |
| 572 SetEnv("initial env", Steal(break_env)); | |
| 573 PushBlock(break_env); | |
| 574 Control* c = &control_.back(); | |
| 575 c->merge.arity = static_cast<uint32_t>(sig_->return_count()); | |
| 576 | |
| 577 if (c->merge.arity == 1) { | |
| 578 c->merge.vals.first = {pc_, nullptr, sig_->GetReturn(0)}; | |
| 579 } else if (c->merge.arity > 1) { | |
| 580 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity); | |
| 581 for (unsigned i = 0; i < c->merge.arity; i++) { | |
|
ahaas
2016/09/16 11:21:30
same here.
titzer
2016/09/16 12:13:21
Acknowledged.
| |
| 582 c->merge.vals.array[i] = {pc_, nullptr, sig_->GetReturn(i)}; | |
| 583 } | |
| 584 } | |
| 585 } | |
| 586 | |
| 642 if (pc_ >= limit_) return; // Nothing to do. | 587 if (pc_ >= limit_) return; // Nothing to do. |
| 643 | 588 |
| 644 while (true) { // decoding loop. | 589 while (true) { // decoding loop. |
| 645 unsigned len = 1; | 590 unsigned len = 1; |
| 646 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); | 591 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_); |
| 647 if (!WasmOpcodes::IsPrefixOpcode(opcode)) { | 592 if (!WasmOpcodes::IsPrefixOpcode(opcode)) { |
| 648 TRACE(" @%-8d #%02x:%-20s|", startrel(pc_), opcode, | 593 TRACE(" @%-8d #%02x:%-20s|", startrel(pc_), opcode, |
| 649 WasmOpcodes::ShortOpcodeName(opcode)); | 594 WasmOpcodes::ShortOpcodeName(opcode)); |
| 650 } | 595 } |
| 651 | 596 |
| 652 FunctionSig* sig = WasmOpcodes::Signature(opcode); | 597 FunctionSig* sig = WasmOpcodes::Signature(opcode); |
| 653 if (sig) { | 598 if (sig) { |
| 654 BuildSimpleOperator(opcode, sig); | 599 BuildSimpleOperator(opcode, sig); |
| 655 } else { | 600 } else { |
| 656 // Complex bytecode. | 601 // Complex bytecode. |
| 657 switch (opcode) { | 602 switch (opcode) { |
| 658 case kExprNop: | 603 case kExprNop: |
| 659 Push(kAstStmt, nullptr); | |
| 660 break; | 604 break; |
| 661 case kExprBlock: { | 605 case kExprBlock: { |
| 662 // The break environment is the outer environment. | 606 // The break environment is the outer environment. |
| 607 BlockTypeOperand operand(this, pc_); | |
| 663 SsaEnv* break_env = ssa_env_; | 608 SsaEnv* break_env = ssa_env_; |
| 664 PushBlock(break_env); | 609 PushBlock(break_env); |
| 665 SetEnv("block:start", Steal(break_env)); | 610 SetEnv("block:start", Steal(break_env)); |
| 611 SetBlockType(&control_.back(), operand); | |
| 612 len = 1 + operand.length; | |
| 666 break; | 613 break; |
| 667 } | 614 } |
| 668 case kExprThrow: { | 615 case kExprThrow: { |
| 669 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); | 616 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); |
| 670 Value value = Pop(0, kAstI32); | 617 Value value = Pop(0, kAstI32); |
| 671 BUILD(Throw, value.node); | 618 BUILD(Throw, value.node); |
| 672 break; | 619 break; |
| 673 } | 620 } |
| 674 case kExprTry: { | 621 case kExprTry: { |
| 675 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); | 622 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); |
| 623 BlockTypeOperand operand(this, pc_); | |
| 676 SsaEnv* outer_env = ssa_env_; | 624 SsaEnv* outer_env = ssa_env_; |
| 677 SsaEnv* try_env = Steal(outer_env); | 625 SsaEnv* try_env = Steal(outer_env); |
| 678 SsaEnv* catch_env = Split(try_env); | 626 SsaEnv* catch_env = Split(try_env); |
| 679 PushTry(outer_env, catch_env); | 627 PushTry(outer_env, catch_env); |
| 680 SetEnv("try:start", try_env); | 628 SetEnv("try_catch:start", try_env); |
| 629 SetBlockType(&control_.back(), operand); | |
| 630 len = 1 + operand.length; | |
| 681 break; | 631 break; |
| 682 } | 632 } |
| 683 case kExprCatch: { | 633 case kExprCatch: { |
| 684 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); | 634 CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype); |
| 685 LocalIndexOperand operand(this, pc_); | 635 LocalIndexOperand operand(this, pc_); |
| 686 len = 1 + operand.length; | 636 len = 1 + operand.length; |
| 687 | 637 |
| 688 if (control_.empty()) { | 638 if (control_.empty()) { |
| 689 error(pc_, "catch does not match a any try"); | 639 error(pc_, "catch does not match any try"); |
| 690 break; | 640 break; |
| 691 } | 641 } |
| 692 | 642 |
| 693 Control* c = &control_.back(); | 643 Control* c = &control_.back(); |
| 694 if (!c->is_try()) { | 644 if (!c->is_try()) { |
| 695 error(pc_, "catch does not match a try"); | 645 error(pc_, "catch does not match any try"); |
| 696 break; | 646 break; |
| 697 } | 647 } |
| 698 | 648 |
| 699 if (c->catch_env == nullptr) { | 649 if (c->catch_env == nullptr) { |
| 700 error(pc_, "catch already present for try with catch"); | 650 error(pc_, "catch already present for try with catch"); |
| 701 break; | 651 break; |
| 702 } | 652 } |
| 703 | 653 |
| 704 Goto(ssa_env_, c->end_env); | 654 Goto(ssa_env_, c->end_env); |
| 705 | 655 |
| 706 SsaEnv* catch_env = c->catch_env; | 656 SsaEnv* catch_env = c->catch_env; |
| 707 c->catch_env = nullptr; | 657 c->catch_env = nullptr; |
| 708 SetEnv("catch:begin", catch_env); | 658 SetEnv("catch:begin", catch_env); |
| 709 | 659 |
| 710 if (Validate(pc_, operand)) { | 660 if (Validate(pc_, operand)) { |
| 711 // TODO(jpp): figure out how thrown value is propagated. It is | 661 // TODO(jpp): figure out how thrown value is propagated. It is |
| 712 // unlikely to be a value on the stack. | 662 // unlikely to be a value on the stack. |
| 713 if (ssa_env_->locals) { | 663 if (ssa_env_->locals) { |
| 714 ssa_env_->locals[operand.index] = nullptr; | 664 ssa_env_->locals[operand.index] = nullptr; |
| 715 } | 665 } |
| 716 } | 666 } |
| 717 | 667 |
| 718 PopUpTo(c->stack_depth); | 668 PopUpTo(c->stack_depth); |
| 719 | |
| 720 break; | 669 break; |
| 721 } | 670 } |
| 722 case kExprLoop: { | 671 case kExprLoop: { |
| 723 // The break environment is the outer environment. | 672 BlockTypeOperand operand(this, pc_); |
| 724 SsaEnv* break_env = ssa_env_; | 673 SsaEnv* finish_try_env = Steal(ssa_env_); |
| 725 PushBlock(break_env); | |
| 726 SsaEnv* finish_try_env = Steal(break_env); | |
| 727 // The continue environment is the inner environment. | 674 // The continue environment is the inner environment. |
| 728 PrepareForLoop(pc_, finish_try_env); | 675 PrepareForLoop(pc_, finish_try_env); |
| 729 SetEnv("loop:start", Split(finish_try_env)); | 676 SetEnv("loop:start", Split(finish_try_env)); |
| 730 ssa_env_->SetNotMerged(); | 677 ssa_env_->SetNotMerged(); |
| 731 PushLoop(finish_try_env); | 678 PushLoop(finish_try_env); |
| 679 SetBlockType(&control_.back(), operand); | |
| 680 len = 1 + operand.length; | |
| 732 break; | 681 break; |
| 733 } | 682 } |
| 734 case kExprIf: { | 683 case kExprIf: { |
| 735 // Condition on top of stack. Split environments for branches. | 684 // Condition on top of stack. Split environments for branches. |
| 685 BlockTypeOperand operand(this, pc_); | |
| 736 Value cond = Pop(0, kAstI32); | 686 Value cond = Pop(0, kAstI32); |
| 737 TFNode* if_true = nullptr; | 687 TFNode* if_true = nullptr; |
| 738 TFNode* if_false = nullptr; | 688 TFNode* if_false = nullptr; |
| 739 BUILD(Branch, cond.node, &if_true, &if_false); | 689 BUILD(Branch, cond.node, &if_true, &if_false); |
| 740 SsaEnv* end_env = ssa_env_; | 690 SsaEnv* end_env = ssa_env_; |
| 741 SsaEnv* false_env = Split(ssa_env_); | 691 SsaEnv* false_env = Split(ssa_env_); |
| 742 false_env->control = if_false; | 692 false_env->control = if_false; |
| 743 SsaEnv* true_env = Steal(ssa_env_); | 693 SsaEnv* true_env = Steal(ssa_env_); |
| 744 true_env->control = if_true; | 694 true_env->control = if_true; |
| 745 PushIf(end_env, false_env); | 695 PushIf(end_env, false_env); |
| 746 SetEnv("if:true", true_env); | 696 SetEnv("if:true", true_env); |
| 697 SetBlockType(&control_.back(), operand); | |
| 698 len = 1 + operand.length; | |
| 747 break; | 699 break; |
| 748 } | 700 } |
| 749 case kExprElse: { | 701 case kExprElse: { |
| 750 if (control_.empty()) { | 702 if (control_.empty()) { |
| 751 error(pc_, "else does not match any if"); | 703 error(pc_, "else does not match any if"); |
| 752 break; | 704 break; |
| 753 } | 705 } |
| 754 Control* c = &control_.back(); | 706 Control* c = &control_.back(); |
| 755 if (!c->is_if()) { | 707 if (!c->is_if()) { |
| 756 error(pc_, c->pc, "else does not match an if"); | 708 error(pc_, c->pc, "else does not match an if"); |
| 757 break; | 709 break; |
| 758 } | 710 } |
| 759 if (c->false_env == nullptr) { | 711 if (c->false_env == nullptr) { |
| 760 error(pc_, c->pc, "else already present for if"); | 712 error(pc_, c->pc, "else already present for if"); |
| 761 break; | 713 break; |
| 762 } | 714 } |
| 763 Value val = PopUpTo(c->stack_depth); | 715 FallThruTo(c); |
| 764 MergeInto(c->end_env, &c->node, &c->type, val); | |
| 765 // Switch to environment for false branch. | 716 // Switch to environment for false branch. |
| 717 stack_.resize(c->stack_depth); | |
| 766 SetEnv("if_else:false", c->false_env); | 718 SetEnv("if_else:false", c->false_env); |
| 767 c->false_env = nullptr; // record that an else is already seen | 719 c->false_env = nullptr; // record that an else is already seen |
| 768 break; | 720 break; |
| 769 } | 721 } |
| 770 case kExprEnd: { | 722 case kExprEnd: { |
| 771 if (control_.empty()) { | 723 if (control_.empty()) { |
| 772 error(pc_, "end does not match any if, try, or block"); | 724 error(pc_, "end does not match any if, try, or block"); |
| 773 break; | 725 return; |
| 774 } | 726 } |
| 775 const char* name = "block:end"; | 727 const char* name = "block:end"; |
| 776 Control* c = &control_.back(); | 728 Control* c = &control_.back(); |
| 777 Value val = PopUpTo(c->stack_depth); | 729 if (c->is_loop()) { |
| 778 if (c->is_loop) { | 730 // A loop just leaves the values on the stack. |
| 779 // Loops always push control in pairs. | 731 TypeCheckLoopFallThru(c); |
| 780 PopControl(); | 732 PopControl(); |
| 781 c = &control_.back(); | 733 SetEnv("loop:end", ssa_env_); |
| 782 name = "loop:end"; | 734 break; |
| 783 } else if (c->is_if()) { | 735 } |
| 736 if (c->is_if()) { | |
| 784 if (c->false_env != nullptr) { | 737 if (c->false_env != nullptr) { |
| 785 // End the true branch of a one-armed if. | 738 // End the true branch of a one-armed if. |
| 786 Goto(c->false_env, c->end_env); | 739 Goto(c->false_env, c->end_env); |
| 787 val = {val.pc, nullptr, kAstStmt}; | 740 if (ssa_env_->go() && stack_.size() != c->stack_depth) { |
| 741 error(pc_, pc_, "end of if expected empty stack"); | |
| 742 stack_.resize(c->stack_depth); | |
| 743 } | |
| 744 if (c->merge.arity > 0) { | |
| 745 error(pc_, pc_, "non-void one-armed if"); | |
| 746 } | |
| 788 name = "if:merge"; | 747 name = "if:merge"; |
| 789 } else { | 748 } else { |
| 790 // End the false branch of a two-armed if. | 749 // End the false branch of a two-armed if. |
| 791 name = "if_else:merge"; | 750 name = "if_else:merge"; |
| 792 } | 751 } |
| 793 } else if (c->is_try()) { | 752 } else if (c->is_try()) { |
| 794 name = "try:end"; | 753 name = "try:end"; |
| 795 | 754 |
| 796 // validate that catch was seen. | 755 // validate that catch was seen. |
| 797 if (c->catch_env != nullptr) { | 756 if (c->catch_env != nullptr) { |
| 798 error(pc_, "missing catch in try"); | 757 error(pc_, "missing catch in try"); |
| 799 break; | 758 break; |
| 800 } | 759 } |
| 801 } | 760 } |
| 761 FallThruTo(c); | |
| 762 SetEnv(name, c->end_env); | |
| 802 | 763 |
| 803 if (ssa_env_->go()) { | 764 // Push the end values onto the stack. |
| 804 // Adds a fallthrough edge to the next control block. | 765 stack_.resize(c->stack_depth); |
| 805 MergeInto(c->end_env, &c->node, &c->type, val); | 766 if (c->merge.arity == 1) { |
| 767 stack_.push_back(c->merge.vals.first); | |
| 768 } else { | |
| 769 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 770 stack_.push_back(c->merge.vals.array[i]); | |
| 771 } | |
| 806 } | 772 } |
| 807 SetEnv(name, c->end_env); | 773 |
| 808 stack_.resize(c->stack_depth); | |
| 809 Push(c->type, c->node); | |
| 810 PopControl(); | 774 PopControl(); |
| 775 | |
| 776 if (control_.empty()) { | |
| 777 // If the last (implicit) control was popped, check we are at end. | |
| 778 if (pc_ + 1 != end_) { | |
| 779 error(pc_, pc_ + 1, "trailing code after function end"); | |
| 780 } | |
| 781 last_end_found_ = true; | |
| 782 if (ssa_env_->go()) { | |
| 783 // The result of the block is the return value. | |
| 784 TRACE(" @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn"); | |
| 785 DoReturn(); | |
| 786 TRACE("\n"); | |
| 787 } | |
| 788 return; | |
| 789 } | |
| 811 break; | 790 break; |
| 812 } | 791 } |
| 813 case kExprSelect: { | 792 case kExprSelect: { |
| 814 Value cond = Pop(2, kAstI32); | 793 Value cond = Pop(2, kAstI32); |
| 815 Value fval = Pop(); | 794 Value fval = Pop(); |
| 816 Value tval = Pop(); | 795 Value tval = Pop(); |
| 817 if (tval.type == kAstStmt || tval.type != fval.type) { | 796 if (tval.type == kAstStmt || tval.type != fval.type) { |
| 818 if (tval.type != kAstEnd && fval.type != kAstEnd) { | 797 if (tval.type != kAstEnd && fval.type != kAstEnd) { |
| 819 error(pc_, "type mismatch in select"); | 798 error(pc_, "type mismatch in select"); |
| 820 break; | 799 break; |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 831 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge); | 810 TFNode* phi = builder_->Phi(tval.type, 2, vals, merge); |
| 832 Push(tval.type, phi); | 811 Push(tval.type, phi); |
| 833 ssa_env_->control = merge; | 812 ssa_env_->control = merge; |
| 834 } else { | 813 } else { |
| 835 Push(tval.type, nullptr); | 814 Push(tval.type, nullptr); |
| 836 } | 815 } |
| 837 break; | 816 break; |
| 838 } | 817 } |
| 839 case kExprBr: { | 818 case kExprBr: { |
| 840 BreakDepthOperand operand(this, pc_); | 819 BreakDepthOperand operand(this, pc_); |
| 841 Value val = {pc_, nullptr, kAstStmt}; | |
| 842 if (operand.arity) val = Pop(); | |
| 843 if (Validate(pc_, operand, control_)) { | 820 if (Validate(pc_, operand, control_)) { |
| 844 BreakTo(operand.target, val); | 821 BreakTo(&control_[control_.size() - operand.depth - 1]); |
|
ahaas
2016/09/16 11:21:30
Could you extract a BreakTo which takes operand.de
titzer
2016/09/16 12:13:21
Good idea. In fact, all uses were of this form, so
| |
| 845 } | 822 } |
| 846 len = 1 + operand.length; | 823 len = 1 + operand.length; |
| 847 Push(kAstEnd, nullptr); | 824 EndControl(); |
| 848 break; | 825 break; |
| 849 } | 826 } |
| 850 case kExprBrIf: { | 827 case kExprBrIf: { |
| 851 BreakDepthOperand operand(this, pc_); | 828 BreakDepthOperand operand(this, pc_); |
| 852 Value cond = Pop(operand.arity, kAstI32); | 829 Value cond = Pop(0, kAstI32); |
| 853 Value val = {pc_, nullptr, kAstStmt}; | |
| 854 if (operand.arity == 1) val = Pop(); | |
| 855 if (ok() && Validate(pc_, operand, control_)) { | 830 if (ok() && Validate(pc_, operand, control_)) { |
| 856 SsaEnv* fenv = ssa_env_; | 831 SsaEnv* fenv = ssa_env_; |
| 857 SsaEnv* tenv = Split(fenv); | 832 SsaEnv* tenv = Split(fenv); |
| 858 fenv->SetNotMerged(); | 833 fenv->SetNotMerged(); |
| 859 BUILD(Branch, cond.node, &tenv->control, &fenv->control); | 834 BUILD(Branch, cond.node, &tenv->control, &fenv->control); |
| 860 ssa_env_ = tenv; | 835 ssa_env_ = tenv; |
| 861 BreakTo(operand.target, val); | 836 BreakTo(&control_[control_.size() - operand.depth - 1]); |
| 862 ssa_env_ = fenv; | 837 ssa_env_ = fenv; |
| 863 } | 838 } |
| 864 len = 1 + operand.length; | 839 len = 1 + operand.length; |
| 865 Push(kAstStmt, nullptr); | |
| 866 break; | 840 break; |
| 867 } | 841 } |
| 868 case kExprBrTable: { | 842 case kExprBrTable: { |
| 869 BranchTableOperand operand(this, pc_); | 843 BranchTableOperand operand(this, pc_); |
| 870 if (Validate(pc_, operand, control_.size())) { | 844 if (Validate(pc_, operand, control_.size())) { |
| 871 Value key = Pop(operand.arity, kAstI32); | 845 Value key = Pop(0, kAstI32); |
| 872 Value val = {pc_, nullptr, kAstStmt}; | |
| 873 if (operand.arity == 1) val = Pop(); | |
| 874 if (failed()) break; | 846 if (failed()) break; |
| 875 | 847 |
| 876 SsaEnv* break_env = ssa_env_; | 848 SsaEnv* break_env = ssa_env_; |
| 877 if (operand.table_count > 0) { | 849 if (operand.table_count > 0) { |
| 878 // Build branches to the various blocks based on the table. | 850 // Build branches to the various blocks based on the table. |
| 879 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node); | 851 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node); |
| 880 | 852 |
| 881 SsaEnv* copy = Steal(break_env); | 853 SsaEnv* copy = Steal(break_env); |
| 882 ssa_env_ = copy; | 854 ssa_env_ = copy; |
| 883 for (uint32_t i = 0; i < operand.table_count + 1; ++i) { | 855 for (uint32_t i = 0; i < operand.table_count + 1; ++i) { |
| 884 uint16_t target = operand.read_entry(this, i); | 856 uint16_t target = operand.read_entry(this, i); |
| 885 ssa_env_ = Split(copy); | 857 ssa_env_ = Split(copy); |
| 886 ssa_env_->control = (i == operand.table_count) | 858 ssa_env_->control = (i == operand.table_count) |
| 887 ? BUILD(IfDefault, sw) | 859 ? BUILD(IfDefault, sw) |
| 888 : BUILD(IfValue, i, sw); | 860 : BUILD(IfValue, i, sw); |
| 889 int depth = target; | 861 BreakTo(&control_[control_.size() - target - 1]); |
| 890 Control* c = &control_[control_.size() - depth - 1]; | |
| 891 MergeInto(c->end_env, &c->node, &c->type, val); | |
| 892 } | 862 } |
| 893 } else { | 863 } else { |
| 894 // Only a default target. Do the equivalent of br. | 864 // Only a default target. Do the equivalent of br. |
| 895 uint16_t target = operand.read_entry(this, 0); | 865 uint16_t target = operand.read_entry(this, 0); |
| 896 int depth = target; | 866 BreakTo(&control_[control_.size() - target - 1]); |
| 897 Control* c = &control_[control_.size() - depth - 1]; | |
| 898 MergeInto(c->end_env, &c->node, &c->type, val); | |
| 899 } | 867 } |
| 900 // br_table ends the control flow like br. | 868 // br_table ends the control flow like br. |
| 901 ssa_env_ = break_env; | 869 ssa_env_ = break_env; |
| 902 Push(kAstStmt, nullptr); | |
| 903 } | 870 } |
| 904 len = 1 + operand.length; | 871 len = 1 + operand.length; |
| 905 break; | 872 break; |
| 906 } | 873 } |
| 907 case kExprReturn: { | 874 case kExprReturn: { |
| 908 ReturnArityOperand operand(this, pc_); | |
| 909 if (operand.arity != sig_->return_count()) { | |
| 910 error(pc_, pc_ + 1, "arity mismatch in return"); | |
| 911 } | |
| 912 DoReturn(); | 875 DoReturn(); |
| 913 len = 1 + operand.length; | |
| 914 break; | 876 break; |
| 915 } | 877 } |
| 916 case kExprUnreachable: { | 878 case kExprUnreachable: { |
| 917 Push(kAstEnd, BUILD(Unreachable, position())); | 879 BUILD(Unreachable, position()); |
| 918 ssa_env_->Kill(SsaEnv::kControlEnd); | 880 EndControl(); |
| 919 break; | 881 break; |
| 920 } | 882 } |
| 921 case kExprI8Const: { | 883 case kExprI8Const: { |
| 922 ImmI8Operand operand(this, pc_); | 884 ImmI8Operand operand(this, pc_); |
| 923 Push(kAstI32, BUILD(Int32Constant, operand.value)); | 885 Push(kAstI32, BUILD(Int32Constant, operand.value)); |
| 924 len = 1 + operand.length; | 886 len = 1 + operand.length; |
| 925 break; | 887 break; |
| 926 } | 888 } |
| 927 case kExprI32Const: { | 889 case kExprI32Const: { |
| 928 ImmI32Operand operand(this, pc_); | 890 ImmI32Operand operand(this, pc_); |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 958 } | 920 } |
| 959 } | 921 } |
| 960 len = 1 + operand.length; | 922 len = 1 + operand.length; |
| 961 break; | 923 break; |
| 962 } | 924 } |
| 963 case kExprSetLocal: { | 925 case kExprSetLocal: { |
| 964 LocalIndexOperand operand(this, pc_); | 926 LocalIndexOperand operand(this, pc_); |
| 965 if (Validate(pc_, operand)) { | 927 if (Validate(pc_, operand)) { |
| 966 Value val = Pop(0, local_type_vec_[operand.index]); | 928 Value val = Pop(0, local_type_vec_[operand.index]); |
| 967 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node; | 929 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node; |
| 930 } | |
| 931 len = 1 + operand.length; | |
| 932 break; | |
| 933 } | |
| 934 case kExprTeeLocal: { | |
| 935 LocalIndexOperand operand(this, pc_); | |
| 936 if (Validate(pc_, operand)) { | |
| 937 Value val = Pop(0, local_type_vec_[operand.index]); | |
| 938 if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node; | |
| 968 Push(val.type, val.node); | 939 Push(val.type, val.node); |
| 969 } | 940 } |
| 970 len = 1 + operand.length; | 941 len = 1 + operand.length; |
| 971 break; | 942 break; |
| 972 } | 943 } |
| 944 case kExprDrop: { | |
| 945 Pop(); | |
| 946 break; | |
| 947 } | |
| 973 case kExprGetGlobal: { | 948 case kExprGetGlobal: { |
| 974 GlobalIndexOperand operand(this, pc_); | 949 GlobalIndexOperand operand(this, pc_); |
| 975 if (Validate(pc_, operand)) { | 950 if (Validate(pc_, operand)) { |
| 976 Push(operand.type, BUILD(GetGlobal, operand.index)); | 951 Push(operand.type, BUILD(GetGlobal, operand.index)); |
| 977 } | 952 } |
| 978 len = 1 + operand.length; | 953 len = 1 + operand.length; |
| 979 break; | 954 break; |
| 980 } | 955 } |
| 981 case kExprSetGlobal: { | 956 case kExprSetGlobal: { |
| 982 GlobalIndexOperand operand(this, pc_); | 957 GlobalIndexOperand operand(this, pc_); |
| 983 if (Validate(pc_, operand)) { | 958 if (Validate(pc_, operand)) { |
| 984 Value val = Pop(0, operand.type); | 959 if (operand.global->mutability) { |
| 985 BUILD(SetGlobal, operand.index, val.node); | 960 Value val = Pop(0, operand.type); |
| 986 Push(val.type, val.node); | 961 BUILD(SetGlobal, operand.index, val.node); |
| 962 } else { | |
| 963 error(pc_, pc_ + 1, "immutable global #%u cannot be assigned", | |
| 964 operand.index); | |
| 965 } | |
| 987 } | 966 } |
| 988 len = 1 + operand.length; | 967 len = 1 + operand.length; |
| 989 break; | 968 break; |
| 990 } | 969 } |
| 991 case kExprI32LoadMem8S: | 970 case kExprI32LoadMem8S: |
| 992 len = DecodeLoadMem(kAstI32, MachineType::Int8()); | 971 len = DecodeLoadMem(kAstI32, MachineType::Int8()); |
| 993 break; | 972 break; |
| 994 case kExprI32LoadMem8U: | 973 case kExprI32LoadMem8U: |
| 995 len = DecodeLoadMem(kAstI32, MachineType::Uint8()); | 974 len = DecodeLoadMem(kAstI32, MachineType::Uint8()); |
| 996 break; | 975 break; |
| 997 case kExprI32LoadMem16S: | 976 case kExprI32LoadMem16S: |
| 998 len = DecodeLoadMem(kAstI32, MachineType::Int16()); | 977 len = DecodeLoadMem(kAstI32, MachineType::Int16()); |
| 999 break; | 978 break; |
| 1000 case kExprI32LoadMem16U: | 979 case kExprI32LoadMem16U: |
| 1001 len = DecodeLoadMem(kAstI32, MachineType::Uint16()); | 980 len = DecodeLoadMem(kAstI32, MachineType::Uint16()); |
| 1002 break; | 981 break; |
| 1003 case kExprI32LoadMem: | 982 case kExprI32LoadMem: |
| 1004 len = DecodeLoadMem(kAstI32, MachineType::Int32()); | 983 len = DecodeLoadMem(kAstI32, MachineType::Int32()); |
| 1005 break; | 984 break; |
| 1006 | |
| 1007 case kExprI64LoadMem8S: | 985 case kExprI64LoadMem8S: |
| 1008 len = DecodeLoadMem(kAstI64, MachineType::Int8()); | 986 len = DecodeLoadMem(kAstI64, MachineType::Int8()); |
| 1009 break; | 987 break; |
| 1010 case kExprI64LoadMem8U: | 988 case kExprI64LoadMem8U: |
| 1011 len = DecodeLoadMem(kAstI64, MachineType::Uint8()); | 989 len = DecodeLoadMem(kAstI64, MachineType::Uint8()); |
| 1012 break; | 990 break; |
| 1013 case kExprI64LoadMem16S: | 991 case kExprI64LoadMem16S: |
| 1014 len = DecodeLoadMem(kAstI64, MachineType::Int16()); | 992 len = DecodeLoadMem(kAstI64, MachineType::Int16()); |
| 1015 break; | 993 break; |
| 1016 case kExprI64LoadMem16U: | 994 case kExprI64LoadMem16U: |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1066 error("grow_memory is not supported for asmjs modules"); | 1044 error("grow_memory is not supported for asmjs modules"); |
| 1067 } | 1045 } |
| 1068 break; | 1046 break; |
| 1069 case kExprMemorySize: | 1047 case kExprMemorySize: |
| 1070 Push(kAstI32, BUILD(MemSize, 0)); | 1048 Push(kAstI32, BUILD(MemSize, 0)); |
| 1071 break; | 1049 break; |
| 1072 case kExprCallFunction: { | 1050 case kExprCallFunction: { |
| 1073 CallFunctionOperand operand(this, pc_); | 1051 CallFunctionOperand operand(this, pc_); |
| 1074 if (Validate(pc_, operand)) { | 1052 if (Validate(pc_, operand)) { |
| 1075 TFNode** buffer = PopArgs(operand.sig); | 1053 TFNode** buffer = PopArgs(operand.sig); |
| 1076 TFNode* call = | 1054 TFNode** rets = |
| 1077 BUILD(CallDirect, operand.index, buffer, position()); | 1055 BUILD(CallDirect, operand.index, buffer, position()); |
| 1078 Push(GetReturnType(operand.sig), call); | 1056 PushReturns(operand.sig, rets); |
| 1079 } | 1057 } |
| 1080 len = 1 + operand.length; | 1058 len = 1 + operand.length; |
| 1081 break; | 1059 break; |
| 1082 } | 1060 } |
| 1083 case kExprCallIndirect: { | 1061 case kExprCallIndirect: { |
| 1084 CallIndirectOperand operand(this, pc_); | 1062 CallIndirectOperand operand(this, pc_); |
| 1085 if (Validate(pc_, operand)) { | 1063 if (Validate(pc_, operand)) { |
| 1064 Value index = Pop(0, kAstI32); | |
| 1086 TFNode** buffer = PopArgs(operand.sig); | 1065 TFNode** buffer = PopArgs(operand.sig); |
| 1087 Value index = Pop(0, kAstI32); | |
| 1088 if (buffer) buffer[0] = index.node; | 1066 if (buffer) buffer[0] = index.node; |
| 1089 TFNode* call = | 1067 TFNode** rets = |
| 1090 BUILD(CallIndirect, operand.index, buffer, position()); | 1068 BUILD(CallIndirect, operand.index, buffer, position()); |
| 1091 Push(GetReturnType(operand.sig), call); | 1069 PushReturns(operand.sig, rets); |
| 1092 } | 1070 } |
| 1093 len = 1 + operand.length; | 1071 len = 1 + operand.length; |
| 1094 break; | 1072 break; |
| 1095 } | |
| 1096 case kExprCallImport: { | |
| 1097 CallImportOperand operand(this, pc_); | |
| 1098 if (Validate(pc_, operand)) { | |
| 1099 TFNode** buffer = PopArgs(operand.sig); | |
| 1100 TFNode* call = | |
| 1101 BUILD(CallImport, operand.index, buffer, position()); | |
| 1102 Push(GetReturnType(operand.sig), call); | |
| 1103 } | |
| 1104 len = 1 + operand.length; | |
| 1105 break; | |
| 1106 } | 1073 } |
| 1107 case kSimdPrefix: { | 1074 case kSimdPrefix: { |
| 1108 CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype); | 1075 CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype); |
| 1109 len++; | 1076 len++; |
| 1110 byte simd_index = *(pc_ + 1); | 1077 byte simd_index = *(pc_ + 1); |
| 1111 opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index); | 1078 opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index); |
| 1112 TRACE(" @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix, | 1079 TRACE(" @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix, |
| 1113 simd_index, WasmOpcodes::ShortOpcodeName(opcode)); | 1080 simd_index, WasmOpcodes::ShortOpcodeName(opcode)); |
| 1114 len += DecodeSimdOpcode(opcode); | 1081 len += DecodeSimdOpcode(opcode); |
| 1115 break; | 1082 break; |
| 1116 } | 1083 } |
| 1117 default: | 1084 default: { |
| 1118 // Deal with special asmjs opcodes. | 1085 // Deal with special asmjs opcodes. |
| 1119 if (module_->origin == kAsmJsOrigin) { | 1086 if (module_ && module_->origin == kAsmJsOrigin) { |
| 1120 sig = WasmOpcodes::AsmjsSignature(opcode); | 1087 sig = WasmOpcodes::AsmjsSignature(opcode); |
| 1121 if (sig) { | 1088 if (sig) { |
| 1122 BuildSimpleOperator(opcode, sig); | 1089 BuildSimpleOperator(opcode, sig); |
| 1123 } | 1090 } |
| 1124 } else { | 1091 } else { |
| 1125 error("Invalid opcode"); | 1092 error("Invalid opcode"); |
| 1126 return; | 1093 return; |
| 1127 } | 1094 } |
| 1095 } | |
| 1128 } | 1096 } |
| 1129 } // end complex bytecode | 1097 } |
| 1130 | 1098 |
| 1131 #if DEBUG | 1099 #if DEBUG |
| 1132 if (FLAG_trace_wasm_decoder) { | 1100 if (FLAG_trace_wasm_decoder) { |
| 1133 for (size_t i = 0; i < stack_.size(); ++i) { | 1101 for (size_t i = 0; i < stack_.size(); ++i) { |
| 1134 Value& val = stack_[i]; | 1102 Value& val = stack_[i]; |
| 1135 WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc); | 1103 WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc); |
| 1136 if (WasmOpcodes::IsPrefixOpcode(opcode)) { | 1104 if (WasmOpcodes::IsPrefixOpcode(opcode)) { |
| 1137 opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1)); | 1105 opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1)); |
| 1138 } | 1106 } |
| 1139 PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type), | 1107 PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type), |
| 1140 static_cast<int>(val.pc - start_), | 1108 static_cast<int>(val.pc - start_), |
| 1141 WasmOpcodes::ShortOpcodeName(opcode)); | 1109 WasmOpcodes::ShortOpcodeName(opcode)); |
| 1142 switch (opcode) { | 1110 switch (opcode) { |
| 1143 case kExprI32Const: { | 1111 case kExprI32Const: { |
| 1144 ImmI32Operand operand(this, val.pc); | 1112 ImmI32Operand operand(this, val.pc); |
| 1145 PrintF("[%d]", operand.value); | 1113 PrintF("[%d]", operand.value); |
| 1146 break; | 1114 break; |
| 1147 } | 1115 } |
| 1148 case kExprGetLocal: { | 1116 case kExprGetLocal: { |
| 1149 LocalIndexOperand operand(this, val.pc); | 1117 LocalIndexOperand operand(this, val.pc); |
| 1150 PrintF("[%u]", operand.index); | 1118 PrintF("[%u]", operand.index); |
| 1151 break; | 1119 break; |
| 1152 } | 1120 } |
| 1153 case kExprSetLocal: { | 1121 case kExprSetLocal: // fallthru |
| 1122 case kExprTeeLocal: { | |
| 1154 LocalIndexOperand operand(this, val.pc); | 1123 LocalIndexOperand operand(this, val.pc); |
| 1155 PrintF("[%u]", operand.index); | 1124 PrintF("[%u]", operand.index); |
| 1156 break; | 1125 break; |
| 1157 } | 1126 } |
| 1158 default: | 1127 default: |
| 1159 break; | 1128 break; |
| 1160 } | 1129 } |
| 1161 } | 1130 } |
| 1162 PrintF("\n"); | 1131 PrintF("\n"); |
| 1163 } | 1132 } |
| 1164 #endif | 1133 #endif |
| 1165 pc_ += len; | 1134 pc_ += len; |
| 1166 if (pc_ >= limit_) { | 1135 if (pc_ >= limit_) { |
| 1167 // End of code reached or exceeded. | 1136 // End of code reached or exceeded. |
| 1168 if (pc_ > limit_ && ok()) error("Beyond end of code"); | 1137 if (pc_ > limit_ && ok()) error("Beyond end of code"); |
| 1169 return; | 1138 return; |
| 1170 } | 1139 } |
| 1171 } // end decode loop | 1140 } // end decode loop |
| 1172 } // end DecodeFunctionBody() | 1141 } |
| 1142 | |
| 1143 void EndControl() { ssa_env_->Kill(SsaEnv::kControlEnd); } | |
| 1144 | |
| 1145 void SetBlockType(Control* c, BlockTypeOperand& operand) { | |
| 1146 c->merge.arity = operand.arity; | |
| 1147 if (c->merge.arity == 1) { | |
| 1148 c->merge.vals.first = {pc_, nullptr, operand.read_entry(0)}; | |
| 1149 } else if (c->merge.arity > 1) { | |
| 1150 c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity); | |
| 1151 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 1152 c->merge.vals.array[i] = {pc_, nullptr, operand.read_entry(i)}; | |
| 1153 } | |
| 1154 } | |
| 1155 } | |
| 1173 | 1156 |
| 1174 TFNode** PopArgs(FunctionSig* sig) { | 1157 TFNode** PopArgs(FunctionSig* sig) { |
| 1175 if (build()) { | 1158 if (build()) { |
| 1176 int count = static_cast<int>(sig->parameter_count()); | 1159 int count = static_cast<int>(sig->parameter_count()); |
| 1177 TFNode** buffer = builder_->Buffer(count + 1); | 1160 TFNode** buffer = builder_->Buffer(count + 1); |
| 1178 buffer[0] = nullptr; // reserved for code object or function index. | 1161 buffer[0] = nullptr; // reserved for code object or function index. |
| 1179 for (int i = count - 1; i >= 0; i--) { | 1162 for (int i = count - 1; i >= 0; i--) { |
| 1180 buffer[i + 1] = Pop(i, sig->GetParam(i)).node; | 1163 buffer[i + 1] = Pop(i, sig->GetParam(i)).node; |
| 1181 } | 1164 } |
| 1182 return buffer; | 1165 return buffer; |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1226 return 1 + operand.length; | 1209 return 1 + operand.length; |
| 1227 } | 1210 } |
| 1228 | 1211 |
| 1229 int DecodeStoreMem(LocalType type, MachineType mem_type) { | 1212 int DecodeStoreMem(LocalType type, MachineType mem_type) { |
| 1230 MemoryAccessOperand operand(this, pc_, | 1213 MemoryAccessOperand operand(this, pc_, |
| 1231 ElementSizeLog2Of(mem_type.representation())); | 1214 ElementSizeLog2Of(mem_type.representation())); |
| 1232 Value val = Pop(1, type); | 1215 Value val = Pop(1, type); |
| 1233 Value index = Pop(0, kAstI32); | 1216 Value index = Pop(0, kAstI32); |
| 1234 BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment, | 1217 BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment, |
| 1235 val.node, position()); | 1218 val.node, position()); |
| 1236 Push(type, val.node); | |
| 1237 return 1 + operand.length; | 1219 return 1 + operand.length; |
| 1238 } | 1220 } |
| 1239 | 1221 |
| 1240 unsigned DecodeSimdOpcode(WasmOpcode opcode) { | 1222 unsigned DecodeSimdOpcode(WasmOpcode opcode) { |
| 1241 unsigned len = 0; | 1223 unsigned len = 0; |
| 1242 switch (opcode) { | 1224 switch (opcode) { |
| 1243 case kExprI32x4ExtractLane: { | 1225 case kExprI32x4ExtractLane: { |
| 1244 uint8_t lane = this->checked_read_u8(pc_, 2, "lane number"); | 1226 uint8_t lane = this->checked_read_u8(pc_, 2, "lane number"); |
| 1245 if (lane < 0 || lane > 3) { | 1227 if (lane < 0 || lane > 3) { |
| 1246 error(pc_, pc_ + 2, "invalid extract lane value"); | 1228 error(pc_, pc_ + 2, "invalid extract lane value"); |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 1273 int count = static_cast<int>(sig_->return_count()); | 1255 int count = static_cast<int>(sig_->return_count()); |
| 1274 TFNode** buffer = nullptr; | 1256 TFNode** buffer = nullptr; |
| 1275 if (build()) buffer = builder_->Buffer(count); | 1257 if (build()) buffer = builder_->Buffer(count); |
| 1276 | 1258 |
| 1277 // Pop return values off the stack in reverse order. | 1259 // Pop return values off the stack in reverse order. |
| 1278 for (int i = count - 1; i >= 0; i--) { | 1260 for (int i = count - 1; i >= 0; i--) { |
| 1279 Value val = Pop(i, sig_->GetReturn(i)); | 1261 Value val = Pop(i, sig_->GetReturn(i)); |
| 1280 if (buffer) buffer[i] = val.node; | 1262 if (buffer) buffer[i] = val.node; |
| 1281 } | 1263 } |
| 1282 | 1264 |
| 1283 Push(kAstEnd, BUILD(Return, count, buffer)); | 1265 BUILD(Return, count, buffer); |
| 1284 ssa_env_->Kill(SsaEnv::kControlEnd); | 1266 EndControl(); |
| 1285 } | 1267 } |
| 1286 | 1268 |
| 1287 void Push(LocalType type, TFNode* node) { | 1269 void Push(LocalType type, TFNode* node) { |
| 1288 stack_.push_back({pc_, node, type}); | 1270 if (type != kAstStmt && type != kAstEnd) { |
| 1271 stack_.push_back({pc_, node, type}); | |
| 1272 } | |
| 1273 } | |
| 1274 | |
| 1275 void PushReturns(FunctionSig* sig, TFNode** rets) { | |
| 1276 for (size_t i = 0; i < sig->return_count(); i++) { | |
| 1277 Push(sig->GetReturn(i), rets ? rets[i] : nullptr); | |
|
ahaas
2016/09/16 11:21:30
Why do you push nullptr here? Please add a comment
titzer
2016/09/16 12:13:21
I added a comment. It's because the decoder can be
| |
| 1278 } | |
| 1289 } | 1279 } |
| 1290 | 1280 |
| 1291 const char* SafeOpcodeNameAt(const byte* pc) { | 1281 const char* SafeOpcodeNameAt(const byte* pc) { |
| 1292 if (pc >= end_) return "<end>"; | 1282 if (pc >= end_) return "<end>"; |
| 1293 return WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc)); | 1283 return WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc)); |
| 1294 } | 1284 } |
| 1295 | 1285 |
| 1296 Value Pop(int index, LocalType expected) { | 1286 Value Pop(int index, LocalType expected) { |
| 1287 if (!ssa_env_->go()) { | |
| 1288 // Unreachable code is essentially not typechecked. | |
| 1289 return {pc_, nullptr, expected}; | |
| 1290 } | |
| 1297 Value val = Pop(); | 1291 Value val = Pop(); |
| 1298 if (val.type != expected) { | 1292 if (val.type != expected) { |
| 1299 if (val.type != kAstEnd) { | 1293 if (val.type != kAstEnd) { |
| 1300 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s", | 1294 error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s", |
| 1301 SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected), | 1295 SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected), |
| 1302 SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type)); | 1296 SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type)); |
| 1303 } | 1297 } |
| 1304 } | 1298 } |
| 1305 return val; | 1299 return val; |
| 1306 } | 1300 } |
| 1307 | 1301 |
| 1308 Value Pop() { | 1302 Value Pop() { |
| 1303 if (!ssa_env_->go()) { | |
| 1304 // Unreachable code is essentially not typechecked. | |
| 1305 return {pc_, nullptr, kAstEnd}; | |
| 1306 } | |
| 1309 size_t limit = control_.empty() ? 0 : control_.back().stack_depth; | 1307 size_t limit = control_.empty() ? 0 : control_.back().stack_depth; |
| 1310 if (stack_.size() <= limit) { | 1308 if (stack_.size() <= limit) { |
| 1311 Value val = {pc_, nullptr, kAstStmt}; | 1309 Value val = {pc_, nullptr, kAstStmt}; |
| 1312 error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_)); | 1310 error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_)); |
| 1313 return val; | 1311 return val; |
| 1314 } | 1312 } |
| 1315 Value val = stack_.back(); | 1313 Value val = stack_.back(); |
| 1316 stack_.pop_back(); | 1314 stack_.pop_back(); |
| 1317 return val; | 1315 return val; |
| 1318 } | 1316 } |
| 1319 | 1317 |
| 1320 Value PopUpTo(int stack_depth) { | 1318 Value PopUpTo(int stack_depth) { |
| 1319 if (!ssa_env_->go()) { | |
| 1320 // Unreachable code is essentially not typechecked. | |
| 1321 return {pc_, nullptr, kAstEnd}; | |
| 1322 } | |
| 1321 if (stack_depth == stack_.size()) { | 1323 if (stack_depth == stack_.size()) { |
| 1322 Value val = {pc_, nullptr, kAstStmt}; | 1324 Value val = {pc_, nullptr, kAstStmt}; |
| 1323 return val; | 1325 return val; |
| 1324 } else { | 1326 } else { |
| 1325 DCHECK_LE(stack_depth, static_cast<int>(stack_.size())); | 1327 DCHECK_LE(stack_depth, static_cast<int>(stack_.size())); |
| 1326 Value val = Pop(); | 1328 Value val = Pop(); |
| 1327 stack_.resize(stack_depth); | 1329 stack_.resize(stack_depth); |
| 1328 return val; | 1330 return val; |
| 1329 } | 1331 } |
| 1330 } | 1332 } |
| 1331 | 1333 |
| 1332 int baserel(const byte* ptr) { | 1334 int baserel(const byte* ptr) { |
| 1333 return base_ ? static_cast<int>(ptr - base_) : 0; | 1335 return base_ ? static_cast<int>(ptr - base_) : 0; |
| 1334 } | 1336 } |
| 1335 | 1337 |
| 1336 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); } | 1338 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); } |
| 1337 | 1339 |
| 1338 void BreakTo(Control* block, const Value& val) { | 1340 void BreakTo(Control* c) { |
| 1339 if (block->is_loop) { | 1341 if (!ssa_env_->go()) return; |
| 1342 if (c->is_loop()) { | |
| 1340 // This is the inner loop block, which does not have a value. | 1343 // This is the inner loop block, which does not have a value. |
| 1341 Goto(ssa_env_, block->end_env); | 1344 Goto(ssa_env_, c->end_env); |
| 1342 } else { | 1345 } else { |
| 1343 // Merge the value into the production for the block. | 1346 // Merge the value(s) into the end of the block. |
| 1344 MergeInto(block->end_env, &block->node, &block->type, val); | 1347 if (static_cast<size_t>(c->stack_depth + c->merge.arity) > |
| 1348 stack_.size()) { | |
| 1349 error( | |
| 1350 pc_, pc_, | |
| 1351 "expected at least %d values on the stack for br to @%d, found %d", | |
| 1352 c->merge.arity, startrel(c->pc), | |
| 1353 static_cast<int>(stack_.size() - c->stack_depth)); | |
| 1354 return; | |
| 1355 } | |
| 1356 MergeValuesInto(c); | |
| 1345 } | 1357 } |
| 1346 } | 1358 } |
| 1347 | 1359 |
| 1348 void MergeInto(SsaEnv* target, TFNode** node, LocalType* type, | 1360 void FallThruTo(Control* c) { |
| 1349 const Value& val) { | |
| 1350 if (!ssa_env_->go()) return; | 1361 if (!ssa_env_->go()) return; |
| 1351 DCHECK_NE(kAstEnd, val.type); | 1362 // Merge the value(s) into the end of the block. |
| 1363 int arity = static_cast<int>(c->merge.arity); | |
|
ahaas
2016/09/16 11:21:30
is stack_.size really of type int? I think arity s
titzer
2016/09/16 12:13:21
Yeah, we should probably just make that size_t, bu
| |
| 1364 if (c->stack_depth + arity != stack_.size()) { | |
| 1365 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d", | |
| 1366 arity, startrel(c->pc)); | |
| 1367 return; | |
| 1368 } | |
| 1369 MergeValuesInto(c); | |
| 1370 } | |
| 1352 | 1371 |
| 1372 void TypeCheckLoopFallThru(Control* c) { | |
| 1373 if (!ssa_env_->go()) return; | |
| 1374 // Fallthru must match arity exactly. | |
| 1375 int arity = static_cast<int>(c->merge.arity); | |
| 1376 if (c->stack_depth + arity != stack_.size()) { | |
| 1377 error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d", | |
| 1378 arity, startrel(c->pc)); | |
| 1379 return; | |
| 1380 } | |
| 1381 // Typecheck the values left on the stack. | |
| 1382 for (unsigned i = 0; i < c->merge.arity; i++) { | |
| 1383 Value& val = stack_[stack_.size() - c->merge.arity + i]; | |
|
ahaas
2016/09/16 11:21:30
I think it would be good to wrap this stack access
titzer
2016/09/16 12:13:21
Not sure what you mean by that. This loop basicall
ahaas
2016/09/19 11:36:02
I meant, could you create an inline function which
titzer
2016/09/21 08:58:43
Ah, ok. Done.
| |
| 1384 Value& old = | |
| 1385 c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i]; | |
| 1386 if (val.type != old.type) { | |
| 1387 error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i, | |
| 1388 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type)); | |
| 1389 return; | |
| 1390 } | |
| 1391 } | |
| 1392 } | |
| 1393 | |
| 1394 void MergeValuesInto(Control* c) { | |
| 1395 SsaEnv* target = c->end_env; | |
| 1353 bool first = target->state == SsaEnv::kUnreachable; | 1396 bool first = target->state == SsaEnv::kUnreachable; |
| 1354 Goto(ssa_env_, target); | 1397 Goto(ssa_env_, target); |
| 1355 | 1398 |
| 1356 if (first) { | 1399 for (unsigned i = 0; i < c->merge.arity; i++) { |
| 1357 // first merge to this environment; set the type and the node. | 1400 Value& val = stack_[stack_.size() - c->merge.arity + i]; |
|
ahaas
2016/09/16 11:21:31
same here.
titzer
2016/09/16 12:13:21
Acknowledged.
| |
| 1358 *type = val.type; | 1401 Value& old = |
| 1359 *node = val.node; | 1402 c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i]; |
| 1360 } else if (val.type == *type && val.type != kAstStmt) { | 1403 if (val.type != old.type) { |
| 1361 // merge with the existing value for this block. | 1404 error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i, |
| 1362 *node = CreateOrMergeIntoPhi(*type, target->control, *node, val.node); | 1405 WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type)); |
| 1363 } else { | 1406 return; |
| 1364 // types don't match, or block is already a stmt. | 1407 } |
| 1365 *type = kAstStmt; | 1408 old.node = |
| 1366 *node = nullptr; | 1409 first ? val.node : CreateOrMergeIntoPhi(old.type, target->control, |
| 1410 old.node, val.node); | |
| 1367 } | 1411 } |
| 1368 } | 1412 } |
| 1369 | 1413 |
| 1370 void SetEnv(const char* reason, SsaEnv* env) { | 1414 void SetEnv(const char* reason, SsaEnv* env) { |
| 1371 #if DEBUG | 1415 #if DEBUG |
| 1372 if (FLAG_trace_wasm_decoder) { | 1416 if (FLAG_trace_wasm_decoder) { |
| 1373 char state = 'X'; | 1417 char state = 'X'; |
| 1374 if (env) { | 1418 if (env) { |
| 1375 switch (env->state) { | 1419 switch (env->state) { |
| 1376 case SsaEnv::kReached: | 1420 case SsaEnv::kReached: |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1585 int depth = 0; | 1629 int depth = 0; |
| 1586 // Iteratively process all AST nodes nested inside the loop. | 1630 // Iteratively process all AST nodes nested inside the loop. |
| 1587 while (pc < limit_ && ok()) { | 1631 while (pc < limit_ && ok()) { |
| 1588 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); | 1632 WasmOpcode opcode = static_cast<WasmOpcode>(*pc); |
| 1589 unsigned length = 1; | 1633 unsigned length = 1; |
| 1590 switch (opcode) { | 1634 switch (opcode) { |
| 1591 case kExprLoop: | 1635 case kExprLoop: |
| 1592 case kExprIf: | 1636 case kExprIf: |
| 1593 case kExprBlock: | 1637 case kExprBlock: |
| 1594 case kExprTry: | 1638 case kExprTry: |
| 1639 length = OpcodeLength(pc); | |
| 1595 depth++; | 1640 depth++; |
| 1596 DCHECK_EQ(1, OpcodeLength(pc)); | |
| 1597 break; | 1641 break; |
| 1598 case kExprSetLocal: { | 1642 case kExprSetLocal: // fallthru |
| 1643 case kExprTeeLocal: { | |
| 1599 LocalIndexOperand operand(this, pc); | 1644 LocalIndexOperand operand(this, pc); |
| 1600 if (assigned->length() > 0 && | 1645 if (assigned->length() > 0 && |
| 1601 operand.index < static_cast<uint32_t>(assigned->length())) { | 1646 operand.index < static_cast<uint32_t>(assigned->length())) { |
| 1602 // Unverified code might have an out-of-bounds index. | 1647 // Unverified code might have an out-of-bounds index. |
| 1603 assigned->Add(operand.index); | 1648 assigned->Add(operand.index); |
| 1604 } | 1649 } |
| 1605 length = 1 + operand.length; | 1650 length = 1 + operand.length; |
| 1606 break; | 1651 break; |
| 1607 } | 1652 } |
| 1608 case kExprEnd: | 1653 case kExprEnd: |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1681 WasmFullDecoder decoder(&zone, builder, body); | 1726 WasmFullDecoder decoder(&zone, builder, body); |
| 1682 decoder.Decode(); | 1727 decoder.Decode(); |
| 1683 return decoder.toResult<DecodeStruct*>(nullptr); | 1728 return decoder.toResult<DecodeStruct*>(nullptr); |
| 1684 } | 1729 } |
| 1685 | 1730 |
| 1686 unsigned OpcodeLength(const byte* pc, const byte* end) { | 1731 unsigned OpcodeLength(const byte* pc, const byte* end) { |
| 1687 WasmDecoder decoder(nullptr, nullptr, pc, end); | 1732 WasmDecoder decoder(nullptr, nullptr, pc, end); |
| 1688 return decoder.OpcodeLength(pc); | 1733 return decoder.OpcodeLength(pc); |
| 1689 } | 1734 } |
| 1690 | 1735 |
| 1691 unsigned OpcodeArity(const byte* pc, const byte* end) { | |
| 1692 WasmDecoder decoder(nullptr, nullptr, pc, end); | |
| 1693 return decoder.OpcodeArity(pc); | |
| 1694 } | |
| 1695 | |
| 1696 void PrintAstForDebugging(const byte* start, const byte* end) { | 1736 void PrintAstForDebugging(const byte* start, const byte* end) { |
| 1697 base::AccountingAllocator allocator; | 1737 base::AccountingAllocator allocator; |
| 1698 OFStream os(stdout); | 1738 OFStream os(stdout); |
| 1699 PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr); | 1739 PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr); |
| 1700 } | 1740 } |
| 1701 | 1741 |
| 1702 bool PrintAst(base::AccountingAllocator* allocator, const FunctionBody& body, | 1742 bool PrintAst(base::AccountingAllocator* allocator, const FunctionBody& body, |
| 1703 std::ostream& os, | 1743 std::ostream& os, |
| 1704 std::vector<std::tuple<uint32_t, int, int>>* offset_table) { | 1744 std::vector<std::tuple<uint32_t, int, int>>* offset_table) { |
| 1705 Zone zone(allocator); | 1745 Zone zone(allocator); |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1751 const char* padding = | 1791 const char* padding = |
| 1752 " "; | 1792 " "; |
| 1753 os.write(padding, num_whitespaces); | 1793 os.write(padding, num_whitespaces); |
| 1754 os << "k" << WasmOpcodes::OpcodeName(opcode) << ","; | 1794 os << "k" << WasmOpcodes::OpcodeName(opcode) << ","; |
| 1755 | 1795 |
| 1756 for (size_t j = 1; j < length; ++j) { | 1796 for (size_t j = 1; j < length; ++j) { |
| 1757 os << " " << AsHex(i.pc()[j], 2) << ","; | 1797 os << " " << AsHex(i.pc()[j], 2) << ","; |
| 1758 } | 1798 } |
| 1759 | 1799 |
| 1760 switch (opcode) { | 1800 switch (opcode) { |
| 1761 case kExprIf: | |
| 1762 case kExprElse: | 1801 case kExprElse: |
| 1763 case kExprLoop: | |
| 1764 case kExprBlock: | |
| 1765 case kExprTry: | |
| 1766 os << " // @" << i.pc_offset(); | 1802 os << " // @" << i.pc_offset(); |
| 1767 control_depth++; | 1803 control_depth++; |
| 1768 break; | 1804 break; |
| 1805 case kExprLoop: | |
| 1806 case kExprIf: | |
| 1807 case kExprBlock: | |
| 1808 case kExprTry: { | |
| 1809 BlockTypeOperand operand(&i, i.pc()); | |
| 1810 os << " // @" << i.pc_offset(); | |
| 1811 for (unsigned i = 0; i < operand.arity; i++) { | |
| 1812 os << " " << WasmOpcodes::TypeName(operand.read_entry(i)); | |
| 1813 } | |
| 1814 control_depth++; | |
| 1815 break; | |
| 1816 } | |
| 1769 case kExprEnd: | 1817 case kExprEnd: |
| 1770 os << " // @" << i.pc_offset(); | 1818 os << " // @" << i.pc_offset(); |
| 1771 control_depth--; | 1819 control_depth--; |
| 1772 break; | 1820 break; |
| 1773 case kExprBr: { | 1821 case kExprBr: { |
| 1774 BreakDepthOperand operand(&i, i.pc()); | 1822 BreakDepthOperand operand(&i, i.pc()); |
| 1775 os << " // arity=" << operand.arity << " depth=" << operand.depth; | 1823 os << " // depth=" << operand.depth; |
| 1776 break; | 1824 break; |
| 1777 } | 1825 } |
| 1778 case kExprBrIf: { | 1826 case kExprBrIf: { |
| 1779 BreakDepthOperand operand(&i, i.pc()); | 1827 BreakDepthOperand operand(&i, i.pc()); |
| 1780 os << " // arity=" << operand.arity << " depth" << operand.depth; | 1828 os << " // depth=" << operand.depth; |
| 1781 break; | 1829 break; |
| 1782 } | 1830 } |
| 1783 case kExprBrTable: { | 1831 case kExprBrTable: { |
| 1784 BranchTableOperand operand(&i, i.pc()); | 1832 BranchTableOperand operand(&i, i.pc()); |
| 1785 os << " // arity=" << operand.arity | 1833 os << " // entries=" << operand.table_count; |
| 1786 << " entries=" << operand.table_count; | |
| 1787 break; | 1834 break; |
| 1788 } | 1835 } |
| 1789 case kExprCallIndirect: { | 1836 case kExprCallIndirect: { |
| 1790 CallIndirectOperand operand(&i, i.pc()); | 1837 CallIndirectOperand operand(&i, i.pc()); |
| 1838 os << " // sig #" << operand.index; | |
| 1791 if (decoder.Complete(i.pc(), operand)) { | 1839 if (decoder.Complete(i.pc(), operand)) { |
| 1792 os << " // sig #" << operand.index << ": " << *operand.sig; | 1840 os << ": " << *operand.sig; |
| 1793 } else { | |
| 1794 os << " // arity=" << operand.arity << " sig #" << operand.index; | |
| 1795 } | |
| 1796 break; | |
| 1797 } | |
| 1798 case kExprCallImport: { | |
| 1799 CallImportOperand operand(&i, i.pc()); | |
| 1800 if (decoder.Complete(i.pc(), operand)) { | |
| 1801 os << " // import #" << operand.index << ": " << *operand.sig; | |
| 1802 } else { | |
| 1803 os << " // arity=" << operand.arity << " import #" << operand.index; | |
| 1804 } | 1841 } |
| 1805 break; | 1842 break; |
| 1806 } | 1843 } |
| 1807 case kExprCallFunction: { | 1844 case kExprCallFunction: { |
| 1808 CallFunctionOperand operand(&i, i.pc()); | 1845 CallFunctionOperand operand(&i, i.pc()); |
| 1846 os << " // function #" << operand.index; | |
| 1809 if (decoder.Complete(i.pc(), operand)) { | 1847 if (decoder.Complete(i.pc(), operand)) { |
| 1810 os << " // function #" << operand.index << ": " << *operand.sig; | 1848 os << ": " << *operand.sig; |
| 1811 } else { | |
| 1812 os << " // arity=" << operand.arity << " function #" << operand.index; | |
| 1813 } | 1849 } |
| 1814 break; | 1850 break; |
| 1815 } | 1851 } |
| 1816 case kExprReturn: { | |
| 1817 ReturnArityOperand operand(&i, i.pc()); | |
| 1818 os << " // arity=" << operand.arity; | |
| 1819 break; | |
| 1820 } | |
| 1821 default: | 1852 default: |
| 1822 break; | 1853 break; |
| 1823 } | 1854 } |
| 1824 os << std::endl; | 1855 os << std::endl; |
| 1825 ++line_nr; | 1856 ++line_nr; |
| 1826 } | 1857 } |
| 1827 | 1858 |
| 1828 return decoder.ok(); | 1859 return decoder.ok(); |
| 1829 } | 1860 } |
| 1830 | 1861 |
| 1831 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, | 1862 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, |
| 1832 const byte* start, const byte* end) { | 1863 const byte* start, const byte* end) { |
| 1833 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; | 1864 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; |
| 1834 WasmFullDecoder decoder(zone, nullptr, body); | 1865 WasmFullDecoder decoder(zone, nullptr, body); |
| 1835 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); | 1866 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); |
| 1836 } | 1867 } |
| 1837 | 1868 |
| 1838 } // namespace wasm | 1869 } // namespace wasm |
| 1839 } // namespace internal | 1870 } // namespace internal |
| 1840 } // namespace v8 | 1871 } // namespace v8 |
| OLD | NEW |