| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/base/platform/elapsed-timer.h" | 5 #include "src/base/platform/elapsed-timer.h" |
| 6 #include "src/signature.h" | 6 #include "src/signature.h" |
| 7 | 7 |
| 8 #include "src/bit-vector.h" | 8 #include "src/bit-vector.h" |
| 9 #include "src/flags.h" | 9 #include "src/flags.h" |
| 10 #include "src/handles.h" | 10 #include "src/handles.h" |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 105 WasmDecoder() : Decoder(nullptr, nullptr), function_env_(nullptr) {} | 105 WasmDecoder() : Decoder(nullptr, nullptr), function_env_(nullptr) {} |
| 106 | 106 |
| 107 protected: | 107 protected: |
| 108 FunctionEnv* function_env_; | 108 FunctionEnv* function_env_; |
| 109 | 109 |
| 110 void Reset(FunctionEnv* function_env, const byte* start, const byte* end) { | 110 void Reset(FunctionEnv* function_env, const byte* start, const byte* end) { |
| 111 Decoder::Reset(start, end); | 111 Decoder::Reset(start, end); |
| 112 function_env_ = function_env; | 112 function_env_ = function_env; |
| 113 } | 113 } |
| 114 | 114 |
| 115 // Load an operand at [pc + 1]. | 115 byte ByteOperand(const byte* pc, const char* msg = "missing 1-byte operand") { |
| 116 template <typename V> | 116 if ((pc + sizeof(byte)) >= limit_) { |
| 117 V Operand(const byte* pc) { | |
| 118 if ((limit_ - pc) < static_cast<int>(1 + sizeof(V))) { | |
| 119 const char* msg = "Expected operand following opcode"; | |
| 120 switch (sizeof(V)) { | |
| 121 case 1: | |
| 122 msg = "Expected 1-byte operand following opcode"; | |
| 123 break; | |
| 124 case 2: | |
| 125 msg = "Expected 2-byte operand following opcode"; | |
| 126 break; | |
| 127 case 4: | |
| 128 msg = "Expected 4-byte operand following opcode"; | |
| 129 break; | |
| 130 default: | |
| 131 break; | |
| 132 } | |
| 133 error(pc, msg); | 117 error(pc, msg); |
| 134 return -1; | 118 return 0; |
| 135 } | 119 } |
| 136 return *reinterpret_cast<const V*>(pc + 1); | 120 return pc[1]; |
| 121 } |
| 122 |
| 123 uint32_t Uint32Operand(const byte* pc) { |
| 124 if ((pc + sizeof(uint32_t)) >= limit_) { |
| 125 error(pc, "missing 4-byte operand"); |
| 126 return 0; |
| 127 } |
| 128 return read_u32(pc + 1); |
| 129 } |
| 130 |
| 131 uint64_t Uint64Operand(const byte* pc) { |
| 132 if ((pc + sizeof(uint64_t)) >= limit_) { |
| 133 error(pc, "missing 8-byte operand"); |
| 134 return 0; |
| 135 } |
| 136 return read_u64(pc + 1); |
| 137 } | 137 } |
| 138 | 138 |
| 139 LocalType LocalOperand(const byte* pc, uint32_t* index, int* length) { | 139 LocalType LocalOperand(const byte* pc, uint32_t* index, int* length) { |
| 140 *index = UnsignedLEB128Operand(pc, length); | 140 *index = UnsignedLEB128Operand(pc, length); |
| 141 if (function_env_->IsValidLocal(*index)) { | 141 if (function_env_->IsValidLocal(*index)) { |
| 142 return function_env_->GetLocalType(*index); | 142 return function_env_->GetLocalType(*index); |
| 143 } | 143 } |
| 144 error(pc, "invalid local variable index"); | 144 error(pc, "invalid local variable index"); |
| 145 return kAstStmt; | 145 return kAstStmt; |
| 146 } | 146 } |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 178 uint32_t result = 0; | 178 uint32_t result = 0; |
| 179 ReadUnsignedLEB128ErrorCode error_code = | 179 ReadUnsignedLEB128ErrorCode error_code = |
| 180 ReadUnsignedLEB128Operand(pc + 1, limit_, length, &result); | 180 ReadUnsignedLEB128Operand(pc + 1, limit_, length, &result); |
| 181 if (error_code == kInvalidLEB128) error(pc, "invalid LEB128 varint"); | 181 if (error_code == kInvalidLEB128) error(pc, "invalid LEB128 varint"); |
| 182 if (error_code == kMissingLEB128) error(pc, "expected LEB128 varint"); | 182 if (error_code == kMissingLEB128) error(pc, "expected LEB128 varint"); |
| 183 (*length)++; | 183 (*length)++; |
| 184 return result; | 184 return result; |
| 185 } | 185 } |
| 186 | 186 |
| 187 void MemoryAccessOperand(const byte* pc, int* length, uint32_t* offset) { | 187 void MemoryAccessOperand(const byte* pc, int* length, uint32_t* offset) { |
| 188 byte bitfield = Operand<uint8_t>(pc); | 188 byte bitfield = ByteOperand(pc, "missing memory access operand"); |
| 189 if (MemoryAccess::OffsetField::decode(bitfield)) { | 189 if (MemoryAccess::OffsetField::decode(bitfield)) { |
| 190 *offset = UnsignedLEB128Operand(pc + 1, length); | 190 *offset = UnsignedLEB128Operand(pc + 1, length); |
| 191 (*length)++; // to account for the memory access byte | 191 (*length)++; // to account for the memory access byte |
| 192 } else { | 192 } else { |
| 193 *offset = 0; | 193 *offset = 0; |
| 194 *length = 2; | 194 *length = 2; |
| 195 } | 195 } |
| 196 } | 196 } |
| 197 }; | 197 }; |
| 198 | 198 |
| (...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 424 return; | 424 return; |
| 425 } | 425 } |
| 426 continue; // back to decoding loop. | 426 continue; // back to decoding loop. |
| 427 } | 427 } |
| 428 | 428 |
| 429 switch (opcode) { | 429 switch (opcode) { |
| 430 case kExprNop: | 430 case kExprNop: |
| 431 Leaf(kAstStmt); | 431 Leaf(kAstStmt); |
| 432 break; | 432 break; |
| 433 case kExprBlock: { | 433 case kExprBlock: { |
| 434 int length = Operand<uint8_t>(pc_); | 434 int length = ByteOperand(pc_); |
| 435 if (length < 1) { | 435 if (length < 1) { |
| 436 Leaf(kAstStmt); | 436 Leaf(kAstStmt); |
| 437 } else { | 437 } else { |
| 438 Shift(kAstEnd, length); | 438 Shift(kAstEnd, length); |
| 439 // The break environment is the outer environment. | 439 // The break environment is the outer environment. |
| 440 SsaEnv* break_env = ssa_env_; | 440 SsaEnv* break_env = ssa_env_; |
| 441 PushBlock(break_env); | 441 PushBlock(break_env); |
| 442 SetEnv("block:start", Steal(break_env)); | 442 SetEnv("block:start", Steal(break_env)); |
| 443 } | 443 } |
| 444 len = 2; | 444 len = 2; |
| 445 break; | 445 break; |
| 446 } | 446 } |
| 447 case kExprLoop: { | 447 case kExprLoop: { |
| 448 int length = Operand<uint8_t>(pc_); | 448 int length = ByteOperand(pc_); |
| 449 if (length < 1) { | 449 if (length < 1) { |
| 450 Leaf(kAstStmt); | 450 Leaf(kAstStmt); |
| 451 } else { | 451 } else { |
| 452 Shift(kAstEnd, length); | 452 Shift(kAstEnd, length); |
| 453 // The break environment is the outer environment. | 453 // The break environment is the outer environment. |
| 454 SsaEnv* break_env = ssa_env_; | 454 SsaEnv* break_env = ssa_env_; |
| 455 PushBlock(break_env); | 455 PushBlock(break_env); |
| 456 SsaEnv* cont_env = Steal(break_env); | 456 SsaEnv* cont_env = Steal(break_env); |
| 457 // The continue environment is the inner environment. | 457 // The continue environment is the inner environment. |
| 458 PrepareForLoop(cont_env); | 458 PrepareForLoop(cont_env); |
| 459 SetEnv("loop:start", Split(cont_env)); | 459 SetEnv("loop:start", Split(cont_env)); |
| 460 if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached; | 460 if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached; |
| 461 PushBlock(cont_env); | 461 PushBlock(cont_env); |
| 462 blocks_.back().stack_depth = -1; // no production for inner block. | 462 blocks_.back().stack_depth = -1; // no production for inner block. |
| 463 } | 463 } |
| 464 len = 2; | 464 len = 2; |
| 465 break; | 465 break; |
| 466 } | 466 } |
| 467 case kExprIf: | 467 case kExprIf: |
| 468 Shift(kAstStmt, 2); | 468 Shift(kAstStmt, 2); |
| 469 break; | 469 break; |
| 470 case kExprIfElse: | 470 case kExprIfElse: |
| 471 Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}. | 471 Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}. |
| 472 break; | 472 break; |
| 473 case kExprSelect: | 473 case kExprSelect: |
| 474 Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}. | 474 Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}. |
| 475 break; | 475 break; |
| 476 case kExprBr: { | 476 case kExprBr: { |
| 477 uint32_t depth = Operand<uint8_t>(pc_); | 477 uint32_t depth = ByteOperand(pc_); |
| 478 Shift(kAstEnd, 1); | 478 Shift(kAstEnd, 1); |
| 479 if (depth >= blocks_.size()) { | 479 if (depth >= blocks_.size()) { |
| 480 error("improperly nested branch"); | 480 error("improperly nested branch"); |
| 481 } | 481 } |
| 482 len = 2; | 482 len = 2; |
| 483 break; | 483 break; |
| 484 } | 484 } |
| 485 case kExprBrIf: { | 485 case kExprBrIf: { |
| 486 uint32_t depth = Operand<uint8_t>(pc_); | 486 uint32_t depth = ByteOperand(pc_); |
| 487 Shift(kAstStmt, 2); | 487 Shift(kAstStmt, 2); |
| 488 if (depth >= blocks_.size()) { | 488 if (depth >= blocks_.size()) { |
| 489 error("improperly nested conditional branch"); | 489 error("improperly nested conditional branch"); |
| 490 } | 490 } |
| 491 len = 2; | 491 len = 2; |
| 492 break; | 492 break; |
| 493 } | 493 } |
| 494 case kExprTableSwitch: { | 494 case kExprTableSwitch: { |
| 495 if (!checkAvailable(5)) { | 495 if (!checkAvailable(5)) { |
| 496 error("expected #tableswitch <cases> <table>, fell off end"); | 496 error("expected #tableswitch <cases> <table>, fell off end"); |
| 497 break; | 497 break; |
| 498 } | 498 } |
| 499 uint16_t case_count = *reinterpret_cast<const uint16_t*>(pc_ + 1); | 499 uint16_t case_count = read_u16(pc_ + 1); |
| 500 uint16_t table_count = *reinterpret_cast<const uint16_t*>(pc_ + 3); | 500 uint16_t table_count = read_u16(pc_ + 3); |
| 501 len = 5 + table_count * 2; | 501 len = 5 + table_count * 2; |
| 502 | 502 |
| 503 if (table_count == 0) { | 503 if (table_count == 0) { |
| 504 error("tableswitch with 0 entries"); | 504 error("tableswitch with 0 entries"); |
| 505 break; | 505 break; |
| 506 } | 506 } |
| 507 | 507 |
| 508 if (!checkAvailable(len)) { | 508 if (!checkAvailable(len)) { |
| 509 error("expected #tableswitch <cases> <table>, fell off end"); | 509 error("expected #tableswitch <cases> <table>, fell off end"); |
| 510 break; | 510 break; |
| 511 } | 511 } |
| 512 | 512 |
| 513 Shift(kAstEnd, 1 + case_count); | 513 Shift(kAstEnd, 1 + case_count); |
| 514 | 514 |
| 515 // Verify table. | 515 // Verify table. |
| 516 for (int i = 0; i < table_count; i++) { | 516 for (int i = 0; i < table_count; i++) { |
| 517 uint16_t target = | 517 uint16_t target = read_u16(pc_ + 5 + i * 2); |
| 518 *reinterpret_cast<const uint16_t*>(pc_ + 5 + i * 2); | |
| 519 if (target >= 0x8000) { | 518 if (target >= 0x8000) { |
| 520 size_t depth = target - 0x8000; | 519 size_t depth = target - 0x8000; |
| 521 if (depth > blocks_.size()) { | 520 if (depth > blocks_.size()) { |
| 522 error(pc_ + 5 + i * 2, "improper branch in tableswitch"); | 521 error(pc_ + 5 + i * 2, "improper branch in tableswitch"); |
| 523 } | 522 } |
| 524 } else { | 523 } else { |
| 525 if (target >= case_count) { | 524 if (target >= case_count) { |
| 526 error(pc_ + 5 + i * 2, "invalid case target in tableswitch"); | 525 error(pc_ + 5 + i * 2, "invalid case target in tableswitch"); |
| 527 } | 526 } |
| 528 } | 527 } |
| (...skipping 11 matching lines...) Expand all Loading... |
| 540 } | 539 } |
| 541 break; | 540 break; |
| 542 } | 541 } |
| 543 case kExprUnreachable: { | 542 case kExprUnreachable: { |
| 544 BUILD0(Unreachable); | 543 BUILD0(Unreachable); |
| 545 ssa_env_->Kill(SsaEnv::kControlEnd); | 544 ssa_env_->Kill(SsaEnv::kControlEnd); |
| 546 Leaf(kAstEnd, nullptr); | 545 Leaf(kAstEnd, nullptr); |
| 547 break; | 546 break; |
| 548 } | 547 } |
| 549 case kExprI8Const: { | 548 case kExprI8Const: { |
| 550 int32_t value = Operand<int8_t>(pc_); | 549 int32_t value = bit_cast<int8_t>(ByteOperand(pc_)); |
| 551 Leaf(kAstI32, BUILD(Int32Constant, value)); | 550 Leaf(kAstI32, BUILD(Int32Constant, value)); |
| 552 len = 2; | 551 len = 2; |
| 553 break; | 552 break; |
| 554 } | 553 } |
| 555 case kExprI32Const: { | 554 case kExprI32Const: { |
| 556 int32_t value = Operand<int32_t>(pc_); | 555 uint32_t value = Uint32Operand(pc_); |
| 557 Leaf(kAstI32, BUILD(Int32Constant, value)); | 556 Leaf(kAstI32, BUILD(Int32Constant, value)); |
| 558 len = 5; | 557 len = 5; |
| 559 break; | 558 break; |
| 560 } | 559 } |
| 561 case kExprI64Const: { | 560 case kExprI64Const: { |
| 562 int64_t value = Operand<int64_t>(pc_); | 561 uint64_t value = Uint64Operand(pc_); |
| 563 Leaf(kAstI64, BUILD(Int64Constant, value)); | 562 Leaf(kAstI64, BUILD(Int64Constant, value)); |
| 564 len = 9; | 563 len = 9; |
| 565 break; | 564 break; |
| 566 } | 565 } |
| 567 case kExprF32Const: { | 566 case kExprF32Const: { |
| 568 float value = Operand<float>(pc_); | 567 float value = bit_cast<float>(Uint32Operand(pc_)); |
| 569 Leaf(kAstF32, BUILD(Float32Constant, value)); | 568 Leaf(kAstF32, BUILD(Float32Constant, value)); |
| 570 len = 5; | 569 len = 5; |
| 571 break; | 570 break; |
| 572 } | 571 } |
| 573 case kExprF64Const: { | 572 case kExprF64Const: { |
| 574 double value = Operand<double>(pc_); | 573 double value = bit_cast<double>(Uint64Operand(pc_)); |
| 575 Leaf(kAstF64, BUILD(Float64Constant, value)); | 574 Leaf(kAstF64, BUILD(Float64Constant, value)); |
| 576 len = 9; | 575 len = 9; |
| 577 break; | 576 break; |
| 578 } | 577 } |
| 579 case kExprGetLocal: { | 578 case kExprGetLocal: { |
| 580 uint32_t index; | 579 uint32_t index; |
| 581 LocalType type = LocalOperand(pc_, &index, &len); | 580 LocalType type = LocalOperand(pc_, &index, &len); |
| 582 TFNode* val = | 581 TFNode* val = |
| 583 build() && type != kAstStmt ? ssa_env_->locals[index] : nullptr; | 582 build() && type != kAstStmt ? ssa_env_->locals[index] : nullptr; |
| 584 Leaf(type, val); | 583 Leaf(type, val); |
| (...skipping 285 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 870 TFNode* vals[2] = {p->tree->children[1]->node, | 869 TFNode* vals[2] = {p->tree->children[1]->node, |
| 871 p->tree->children[2]->node}; | 870 p->tree->children[2]->node}; |
| 872 TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge); | 871 TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge); |
| 873 p->tree->node = phi; | 872 p->tree->node = phi; |
| 874 ssa_env_->control = merge; | 873 ssa_env_->control = merge; |
| 875 } | 874 } |
| 876 } | 875 } |
| 877 break; | 876 break; |
| 878 } | 877 } |
| 879 case kExprBr: { | 878 case kExprBr: { |
| 880 uint32_t depth = Operand<uint8_t>(p->pc()); | 879 uint32_t depth = ByteOperand(p->pc()); |
| 881 if (depth >= blocks_.size()) { | 880 if (depth >= blocks_.size()) { |
| 882 error("improperly nested branch"); | 881 error("improperly nested branch"); |
| 883 break; | 882 break; |
| 884 } | 883 } |
| 885 Block* block = &blocks_[blocks_.size() - depth - 1]; | 884 Block* block = &blocks_[blocks_.size() - depth - 1]; |
| 886 ReduceBreakToExprBlock(p, block); | 885 ReduceBreakToExprBlock(p, block); |
| 887 break; | 886 break; |
| 888 } | 887 } |
| 889 case kExprBrIf: { | 888 case kExprBrIf: { |
| 890 if (p->index == 1) { | 889 if (p->index == 1) { |
| 891 TypeCheckLast(p, kAstI32); | 890 TypeCheckLast(p, kAstI32); |
| 892 } else if (p->done()) { | 891 } else if (p->done()) { |
| 893 uint32_t depth = Operand<uint8_t>(p->pc()); | 892 uint32_t depth = ByteOperand(p->pc()); |
| 894 if (depth >= blocks_.size()) { | 893 if (depth >= blocks_.size()) { |
| 895 error("improperly nested branch"); | 894 error("improperly nested branch"); |
| 896 break; | 895 break; |
| 897 } | 896 } |
| 898 Block* block = &blocks_[blocks_.size() - depth - 1]; | 897 Block* block = &blocks_[blocks_.size() - depth - 1]; |
| 899 SsaEnv* fenv = ssa_env_; | 898 SsaEnv* fenv = ssa_env_; |
| 900 SsaEnv* tenv = Split(fenv); | 899 SsaEnv* tenv = Split(fenv); |
| 901 BUILD(Branch, p->tree->children[0]->node, &tenv->control, | 900 BUILD(Branch, p->tree->children[0]->node, &tenv->control, |
| 902 &fenv->control); | 901 &fenv->control); |
| 903 ssa_env_ = tenv; | 902 ssa_env_ = tenv; |
| 904 ReduceBreakToExprBlock(p, block); | 903 ReduceBreakToExprBlock(p, block); |
| 905 ssa_env_ = fenv; | 904 ssa_env_ = fenv; |
| 906 } | 905 } |
| 907 break; | 906 break; |
| 908 } | 907 } |
| 909 case kExprTableSwitch: { | 908 case kExprTableSwitch: { |
| 910 if (p->index == 1) { | 909 if (p->index == 1) { |
| 911 // Switch key finished. | 910 // Switch key finished. |
| 912 TypeCheckLast(p, kAstI32); | 911 TypeCheckLast(p, kAstI32); |
| 913 | 912 |
| 914 uint16_t table_count = | 913 uint16_t table_count = read_u16(p->pc() + 3); |
| 915 *reinterpret_cast<const uint16_t*>(p->pc() + 3); | |
| 916 | 914 |
| 917 // Build the switch only if it has more than just a default target. | 915 // Build the switch only if it has more than just a default target. |
| 918 bool build_switch = table_count > 1; | 916 bool build_switch = table_count > 1; |
| 919 TFNode* sw = nullptr; | 917 TFNode* sw = nullptr; |
| 920 if (build_switch) sw = BUILD(Switch, table_count, p->last()->node); | 918 if (build_switch) sw = BUILD(Switch, table_count, p->last()->node); |
| 921 | 919 |
| 922 // Allocate environments for each case. | 920 // Allocate environments for each case. |
| 923 uint16_t case_count = *reinterpret_cast<const uint16_t*>(p->pc() + 1); | 921 uint16_t case_count = read_u16(p->pc() + 1); |
| 924 SsaEnv** case_envs = zone_->NewArray<SsaEnv*>(case_count); | 922 SsaEnv** case_envs = zone_->NewArray<SsaEnv*>(case_count); |
| 925 for (int i = 0; i < case_count; i++) case_envs[i] = UnreachableEnv(); | 923 for (int i = 0; i < case_count; i++) case_envs[i] = UnreachableEnv(); |
| 926 | 924 |
| 927 ifs_.push_back({nullptr, nullptr, case_envs}); | 925 ifs_.push_back({nullptr, nullptr, case_envs}); |
| 928 SsaEnv* break_env = ssa_env_; | 926 SsaEnv* break_env = ssa_env_; |
| 929 PushBlock(break_env); | 927 PushBlock(break_env); |
| 930 SsaEnv* copy = Steal(break_env); | 928 SsaEnv* copy = Steal(break_env); |
| 931 ssa_env_ = copy; | 929 ssa_env_ = copy; |
| 932 | 930 |
| 933 // Build the environments for each case based on the table. | 931 // Build the environments for each case based on the table. |
| 934 const uint16_t* table = | |
| 935 reinterpret_cast<const uint16_t*>(p->pc() + 5); | |
| 936 for (int i = 0; i < table_count; i++) { | 932 for (int i = 0; i < table_count; i++) { |
| 937 uint16_t target = table[i]; | 933 uint16_t target = read_u16(p->pc() + 5 + i * 2); |
| 938 SsaEnv* env = copy; | 934 SsaEnv* env = copy; |
| 939 if (build_switch) { | 935 if (build_switch) { |
| 940 env = Split(env); | 936 env = Split(env); |
| 941 env->control = (i == table_count - 1) ? BUILD(IfDefault, sw) | 937 env->control = (i == table_count - 1) ? BUILD(IfDefault, sw) |
| 942 : BUILD(IfValue, i, sw); | 938 : BUILD(IfValue, i, sw); |
| 943 } | 939 } |
| 944 if (target >= 0x8000) { | 940 if (target >= 0x8000) { |
| 945 // Targets an outer block. | 941 // Targets an outer block. |
| 946 int depth = target - 0x8000; | 942 int depth = target - 0x8000; |
| 947 SsaEnv* tenv = blocks_[blocks_.size() - depth - 1].ssa_env; | 943 SsaEnv* tenv = blocks_[blocks_.size() - depth - 1].ssa_env; |
| (...skipping 737 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1685 | 1681 |
| 1686 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, FunctionEnv* env, | 1682 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, FunctionEnv* env, |
| 1687 const byte* start, const byte* end) { | 1683 const byte* start, const byte* end) { |
| 1688 LoopAssignmentAnalyzer analyzer(zone, env); | 1684 LoopAssignmentAnalyzer analyzer(zone, env); |
| 1689 return analyzer.Analyze(start, end); | 1685 return analyzer.Analyze(start, end); |
| 1690 } | 1686 } |
| 1691 | 1687 |
| 1692 } // namespace wasm | 1688 } // namespace wasm |
| 1693 } // namespace internal | 1689 } // namespace internal |
| 1694 } // namespace v8 | 1690 } // namespace v8 |
| OLD | NEW |