OLD | NEW |
---|---|
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/signature.h" | 5 #include "src/signature.h" |
6 | 6 |
7 #include "src/bit-vector.h" | 7 #include "src/bit-vector.h" |
8 #include "src/flags.h" | 8 #include "src/flags.h" |
9 #include "src/handles.h" | 9 #include "src/handles.h" |
10 #include "src/zone-containers.h" | 10 #include "src/zone-containers.h" |
(...skipping 700 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
711 if (count == 0) { | 711 if (count == 0) { |
712 BUILD(Return, 0, builder_->Buffer(0)); | 712 BUILD(Return, 0, builder_->Buffer(0)); |
713 ssa_env_->Kill(); | 713 ssa_env_->Kill(); |
714 Leaf(kAstEnd); | 714 Leaf(kAstEnd); |
715 } else { | 715 } else { |
716 Shift(kAstEnd, count); | 716 Shift(kAstEnd, count); |
717 } | 717 } |
718 break; | 718 break; |
719 } | 719 } |
720 case kExprUnreachable: { | 720 case kExprUnreachable: { |
721 // TODO(clemensh): add source position for unreachable | 721 BUILD(Unreachable, current_position()); |
722 BUILD0(Unreachable); | |
723 ssa_env_->Kill(SsaEnv::kControlEnd); | 722 ssa_env_->Kill(SsaEnv::kControlEnd); |
724 Leaf(kAstEnd, nullptr); | 723 Leaf(kAstEnd, nullptr); |
725 break; | 724 break; |
726 } | 725 } |
727 case kExprI8Const: { | 726 case kExprI8Const: { |
728 ImmI8Operand operand(this, pc_); | 727 ImmI8Operand operand(this, pc_); |
729 Leaf(kAstI32, BUILD(Int32Constant, operand.value)); | 728 Leaf(kAstI32, BUILD(Int32Constant, operand.value)); |
730 len = 1 + operand.length; | 729 len = 1 + operand.length; |
731 break; | 730 break; |
732 } | 731 } |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
785 Shift(operand.type, 1); | 784 Shift(operand.type, 1); |
786 } | 785 } |
787 len = 1 + operand.length; | 786 len = 1 + operand.length; |
788 break; | 787 break; |
789 } | 788 } |
790 case kExprI32LoadMem8S: | 789 case kExprI32LoadMem8S: |
791 case kExprI32LoadMem8U: | 790 case kExprI32LoadMem8U: |
792 case kExprI32LoadMem16S: | 791 case kExprI32LoadMem16S: |
793 case kExprI32LoadMem16U: | 792 case kExprI32LoadMem16U: |
794 case kExprI32LoadMem: | 793 case kExprI32LoadMem: |
795 len = DecodeLoadMem(pc_, kAstI32); | 794 len = DecodeLoadMem(kAstI32); |
796 break; | 795 break; |
797 case kExprI64LoadMem8S: | 796 case kExprI64LoadMem8S: |
798 case kExprI64LoadMem8U: | 797 case kExprI64LoadMem8U: |
799 case kExprI64LoadMem16S: | 798 case kExprI64LoadMem16S: |
800 case kExprI64LoadMem16U: | 799 case kExprI64LoadMem16U: |
801 case kExprI64LoadMem32S: | 800 case kExprI64LoadMem32S: |
802 case kExprI64LoadMem32U: | 801 case kExprI64LoadMem32U: |
803 case kExprI64LoadMem: | 802 case kExprI64LoadMem: |
804 len = DecodeLoadMem(pc_, kAstI64); | 803 len = DecodeLoadMem(kAstI64); |
805 break; | 804 break; |
806 case kExprF32LoadMem: | 805 case kExprF32LoadMem: |
807 len = DecodeLoadMem(pc_, kAstF32); | 806 len = DecodeLoadMem(kAstF32); |
808 break; | 807 break; |
809 case kExprF64LoadMem: | 808 case kExprF64LoadMem: |
810 len = DecodeLoadMem(pc_, kAstF64); | 809 len = DecodeLoadMem(kAstF64); |
811 break; | 810 break; |
812 case kExprI32StoreMem8: | 811 case kExprI32StoreMem8: |
813 case kExprI32StoreMem16: | 812 case kExprI32StoreMem16: |
814 case kExprI32StoreMem: | 813 case kExprI32StoreMem: |
815 len = DecodeStoreMem(pc_, kAstI32); | 814 len = DecodeStoreMem(kAstI32); |
816 break; | 815 break; |
817 case kExprI64StoreMem8: | 816 case kExprI64StoreMem8: |
818 case kExprI64StoreMem16: | 817 case kExprI64StoreMem16: |
819 case kExprI64StoreMem32: | 818 case kExprI64StoreMem32: |
820 case kExprI64StoreMem: | 819 case kExprI64StoreMem: |
821 len = DecodeStoreMem(pc_, kAstI64); | 820 len = DecodeStoreMem(kAstI64); |
822 break; | 821 break; |
823 case kExprF32StoreMem: | 822 case kExprF32StoreMem: |
824 len = DecodeStoreMem(pc_, kAstF32); | 823 len = DecodeStoreMem(kAstF32); |
825 break; | 824 break; |
826 case kExprF64StoreMem: | 825 case kExprF64StoreMem: |
827 len = DecodeStoreMem(pc_, kAstF64); | 826 len = DecodeStoreMem(kAstF64); |
828 break; | 827 break; |
829 case kExprMemorySize: | 828 case kExprMemorySize: |
830 Leaf(kAstI32, BUILD(MemSize, 0)); | 829 Leaf(kAstI32, BUILD(MemSize, 0)); |
831 break; | 830 break; |
832 case kExprGrowMemory: | 831 case kExprGrowMemory: |
833 Shift(kAstI32, 1); | 832 Shift(kAstI32, 1); |
834 break; | 833 break; |
835 case kExprCallFunction: { | 834 case kExprCallFunction: { |
836 FunctionIndexOperand operand(this, pc_); | 835 FunctionIndexOperand operand(this, pc_); |
837 if (Validate(pc_, operand)) { | 836 if (Validate(pc_, operand)) { |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
878 } | 877 } |
879 return; | 878 return; |
880 } | 879 } |
881 } | 880 } |
882 } | 881 } |
883 | 882 |
884 void PushBlock(SsaEnv* ssa_env) { | 883 void PushBlock(SsaEnv* ssa_env) { |
885 blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)}); | 884 blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)}); |
886 } | 885 } |
887 | 886 |
888 int DecodeLoadMem(const byte* pc, LocalType type) { | 887 int DecodeLoadMem(LocalType type) { |
889 MemoryAccessOperand operand(this, pc); | 888 MemoryAccessOperand operand(this, pc_); |
890 Shift(type, 1); | 889 Shift(type, 1); |
891 return 1 + operand.length; | 890 return 1 + operand.length; |
892 } | 891 } |
893 | 892 |
894 int DecodeStoreMem(const byte* pc, LocalType type) { | 893 int DecodeStoreMem(LocalType type) { |
895 MemoryAccessOperand operand(this, pc); | 894 MemoryAccessOperand operand(this, pc_); |
896 Shift(type, 2); | 895 Shift(type, 2); |
897 return 1 + operand.length; | 896 return 1 + operand.length; |
898 } | 897 } |
899 | 898 |
900 void AddImplicitReturnAtEnd() { | 899 void AddImplicitReturnAtEnd() { |
901 int retcount = static_cast<int>(sig_->return_count()); | 900 int retcount = static_cast<int>(sig_->return_count()); |
902 if (retcount == 0) { | 901 if (retcount == 0) { |
903 BUILD0(ReturnVoid); | 902 BUILD0(ReturnVoid); |
904 return; | 903 return; |
905 } | 904 } |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
941 WasmOpcode opcode = p->opcode(); | 940 WasmOpcode opcode = p->opcode(); |
942 TRACE("-----reduce module+%-6d %s func+%d: 0x%02x %s\n", baserel(p->pc()), | 941 TRACE("-----reduce module+%-6d %s func+%d: 0x%02x %s\n", baserel(p->pc()), |
943 indentation(), startrel(p->pc()), opcode, | 942 indentation(), startrel(p->pc()), opcode, |
944 WasmOpcodes::OpcodeName(opcode)); | 943 WasmOpcodes::OpcodeName(opcode)); |
945 FunctionSig* sig = WasmOpcodes::Signature(opcode); | 944 FunctionSig* sig = WasmOpcodes::Signature(opcode); |
946 if (sig) { | 945 if (sig) { |
947 // A simple expression with a fixed signature. | 946 // A simple expression with a fixed signature. |
948 TypeCheckLast(p, sig->GetParam(p->index - 1)); | 947 TypeCheckLast(p, sig->GetParam(p->index - 1)); |
949 if (p->done() && build()) { | 948 if (p->done() && build()) { |
950 if (sig->parameter_count() == 2) { | 949 if (sig->parameter_count() == 2) { |
951 p->tree->node = builder_->Binop(opcode, p->tree->children[0]->node, | 950 p->tree->node = |
952 p->tree->children[1]->node); | 951 builder_->Binop(opcode, p->tree->children[0]->node, |
952 p->tree->children[1]->node, position(p)); | |
953 } else if (sig->parameter_count() == 1) { | 953 } else if (sig->parameter_count() == 1) { |
954 p->tree->node = builder_->Unop(opcode, p->tree->children[0]->node); | 954 p->tree->node = |
955 builder_->Unop(opcode, p->tree->children[0]->node, position(p)); | |
955 } else { | 956 } else { |
956 UNREACHABLE(); | 957 UNREACHABLE(); |
957 } | 958 } |
958 } | 959 } |
959 return; | 960 return; |
960 } | 961 } |
961 | 962 |
962 switch (opcode) { | 963 switch (opcode) { |
963 case kExprBlock: { | 964 case kExprBlock: { |
964 if (p->done()) { | 965 if (p->done()) { |
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1247 TypeCheckLast(p, kAstI32); | 1248 TypeCheckLast(p, kAstI32); |
1248 } else { | 1249 } else { |
1249 TypeCheckLast(p, operand.sig->GetParam(p->index - 2)); | 1250 TypeCheckLast(p, operand.sig->GetParam(p->index - 2)); |
1250 } | 1251 } |
1251 if (p->done() && build()) { | 1252 if (p->done() && build()) { |
1252 uint32_t count = p->tree->count; | 1253 uint32_t count = p->tree->count; |
1253 TFNode** buffer = builder_->Buffer(count); | 1254 TFNode** buffer = builder_->Buffer(count); |
1254 for (uint32_t i = 0; i < count; i++) { | 1255 for (uint32_t i = 0; i < count; i++) { |
1255 buffer[i] = p->tree->children[i]->node; | 1256 buffer[i] = p->tree->children[i]->node; |
1256 } | 1257 } |
1257 p->tree->node = builder_->CallIndirect(operand.index, buffer); | 1258 p->tree->node = |
1259 builder_->CallIndirect(operand.index, buffer, position(p)); | |
1258 AddSourcePosition(p); | 1260 AddSourcePosition(p); |
1259 } | 1261 } |
1260 break; | 1262 break; |
1261 } | 1263 } |
1262 case kExprCallImport: { | 1264 case kExprCallImport: { |
1263 ImportIndexOperand operand(this, p->pc()); | 1265 ImportIndexOperand operand(this, p->pc()); |
1264 CHECK(Validate(p->pc(), operand)); | 1266 CHECK(Validate(p->pc(), operand)); |
1265 if (p->index > 0) { | 1267 if (p->index > 0) { |
1266 TypeCheckLast(p, operand.sig->GetParam(p->index - 1)); | 1268 TypeCheckLast(p, operand.sig->GetParam(p->index - 1)); |
1267 } | 1269 } |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1320 p->tree->node, expr->node); | 1322 p->tree->node, expr->node); |
1321 } | 1323 } |
1322 } | 1324 } |
1323 } | 1325 } |
1324 | 1326 |
1325 void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) { | 1327 void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) { |
1326 DCHECK_EQ(1, p->index); | 1328 DCHECK_EQ(1, p->index); |
1327 TypeCheckLast(p, kAstI32); // index | 1329 TypeCheckLast(p, kAstI32); // index |
1328 if (build()) { | 1330 if (build()) { |
1329 MemoryAccessOperand operand(this, p->pc()); | 1331 MemoryAccessOperand operand(this, p->pc()); |
1330 p->tree->node = | 1332 p->tree->node = builder_->LoadMem(type, mem_type, p->last()->node, |
1331 builder_->LoadMem(type, mem_type, p->last()->node, operand.offset); | 1333 operand.offset, position(p)); |
1332 } | 1334 } |
1333 } | 1335 } |
1334 | 1336 |
1335 void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) { | 1337 void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) { |
1336 if (p->index == 1) { | 1338 if (p->index == 1) { |
1337 TypeCheckLast(p, kAstI32); // index | 1339 TypeCheckLast(p, kAstI32); // index |
1338 } else { | 1340 } else { |
1339 DCHECK_EQ(2, p->index); | 1341 DCHECK_EQ(2, p->index); |
1340 TypeCheckLast(p, type); | 1342 TypeCheckLast(p, type); |
1341 if (build()) { | 1343 if (build()) { |
1342 MemoryAccessOperand operand(this, p->pc()); | 1344 MemoryAccessOperand operand(this, p->pc()); |
1343 TFNode* val = p->tree->children[1]->node; | 1345 TFNode* val = p->tree->children[1]->node; |
1344 builder_->StoreMem(mem_type, p->tree->children[0]->node, operand.offset, | 1346 builder_->StoreMem(mem_type, p->tree->children[0]->node, operand.offset, |
1345 val); | 1347 val, position(p)); |
1346 p->tree->node = val; | 1348 p->tree->node = val; |
1347 } | 1349 } |
1348 } | 1350 } |
1349 } | 1351 } |
1350 | 1352 |
1351 void TypeCheckLast(Production* p, LocalType expected) { | 1353 void TypeCheckLast(Production* p, LocalType expected) { |
1352 LocalType result = p->last()->type; | 1354 LocalType result = p->last()->type; |
1353 if (result == expected) return; | 1355 if (result == expected) return; |
1354 if (result == kAstEnd) return; | 1356 if (result == kAstEnd) return; |
1355 if (expected != kAstStmt) { | 1357 if (expected != kAstStmt) { |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1413 } | 1415 } |
1414 break; | 1416 break; |
1415 } | 1417 } |
1416 case SsaEnv::kMerged: { | 1418 case SsaEnv::kMerged: { |
1417 if (!builder_) break; | 1419 if (!builder_) break; |
1418 TFNode* merge = to->control; | 1420 TFNode* merge = to->control; |
1419 // Extend the existing merge. | 1421 // Extend the existing merge. |
1420 builder_->AppendToMerge(merge, from->control); | 1422 builder_->AppendToMerge(merge, from->control); |
1421 // Merge effects. | 1423 // Merge effects. |
1422 if (builder_->IsPhiWithMerge(to->effect, merge)) { | 1424 if (builder_->IsPhiWithMerge(to->effect, merge)) { |
1423 builder_->AppendToPhi(merge, to->effect, from->effect); | 1425 builder_->AppendToPhi(to->effect, from->effect); |
1424 } else if (to->effect != from->effect) { | 1426 } else if (to->effect != from->effect) { |
1425 uint32_t count = builder_->InputCount(merge); | 1427 uint32_t count = builder_->InputCount(merge); |
1426 TFNode** effects = builder_->Buffer(count); | 1428 TFNode** effects = builder_->Buffer(count); |
1427 for (uint32_t j = 0; j < count - 1; j++) { | 1429 for (uint32_t j = 0; j < count - 1; j++) { |
1428 effects[j] = to->effect; | 1430 effects[j] = to->effect; |
1429 } | 1431 } |
1430 effects[count - 1] = from->effect; | 1432 effects[count - 1] = from->effect; |
1431 to->effect = builder_->EffectPhi(count, effects, merge); | 1433 to->effect = builder_->EffectPhi(count, effects, merge); |
1432 } | 1434 } |
1433 // Merge locals. | 1435 // Merge locals. |
1434 for (int i = EnvironmentCount() - 1; i >= 0; i--) { | 1436 for (int i = EnvironmentCount() - 1; i >= 0; i--) { |
1435 TFNode* tnode = to->locals[i]; | 1437 TFNode* tnode = to->locals[i]; |
1436 TFNode* fnode = from->locals[i]; | 1438 TFNode* fnode = from->locals[i]; |
1437 if (builder_->IsPhiWithMerge(tnode, merge)) { | 1439 if (builder_->IsPhiWithMerge(tnode, merge)) { |
1438 builder_->AppendToPhi(merge, tnode, fnode); | 1440 builder_->AppendToPhi(tnode, fnode); |
1439 } else if (tnode != fnode) { | 1441 } else if (tnode != fnode) { |
1440 uint32_t count = builder_->InputCount(merge); | 1442 uint32_t count = builder_->InputCount(merge); |
1441 TFNode** vals = builder_->Buffer(count); | 1443 TFNode** vals = builder_->Buffer(count); |
1442 for (uint32_t j = 0; j < count - 1; j++) { | 1444 for (uint32_t j = 0; j < count - 1; j++) { |
1443 vals[j] = tnode; | 1445 vals[j] = tnode; |
1444 } | 1446 } |
1445 vals[count - 1] = fnode; | 1447 vals[count - 1] = fnode; |
1446 to->locals[i] = | 1448 to->locals[i] = |
1447 builder_->Phi(local_type_vec_[i], count, vals, merge); | 1449 builder_->Phi(local_type_vec_[i], count, vals, merge); |
1448 } | 1450 } |
1449 } | 1451 } |
1450 break; | 1452 break; |
1451 } | 1453 } |
1452 default: | 1454 default: |
1453 UNREACHABLE(); | 1455 UNREACHABLE(); |
1454 } | 1456 } |
1455 return from->Kill(); | 1457 return from->Kill(); |
1456 } | 1458 } |
1457 | 1459 |
1458 TFNode* CreateOrMergeIntoPhi(LocalType type, TFNode* merge, TFNode* tnode, | 1460 TFNode* CreateOrMergeIntoPhi(LocalType type, TFNode* merge, TFNode* tnode, |
1459 TFNode* fnode) { | 1461 TFNode* fnode) { |
1460 if (builder_->IsPhiWithMerge(tnode, merge)) { | 1462 if (builder_->IsPhiWithMerge(tnode, merge)) { |
1461 builder_->AppendToPhi(merge, tnode, fnode); | 1463 builder_->AppendToPhi(tnode, fnode); |
1462 } else if (tnode != fnode) { | 1464 } else if (tnode != fnode) { |
1463 uint32_t count = builder_->InputCount(merge); | 1465 uint32_t count = builder_->InputCount(merge); |
1464 TFNode** vals = builder_->Buffer(count); | 1466 TFNode** vals = builder_->Buffer(count); |
1465 for (uint32_t j = 0; j < count - 1; j++) vals[j] = tnode; | 1467 for (uint32_t j = 0; j < count - 1; j++) vals[j] = tnode; |
1466 vals[count - 1] = fnode; | 1468 vals[count - 1] = fnode; |
1467 return builder_->Phi(type, count, vals, merge); | 1469 return builder_->Phi(type, count, vals, merge); |
1468 } | 1470 } |
1469 return tnode; | 1471 return tnode; |
1470 } | 1472 } |
1471 | 1473 |
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1629 arity_stack.push_back(arity); | 1631 arity_stack.push_back(arity); |
1630 while (arity_stack.back() == 0) { | 1632 while (arity_stack.back() == 0) { |
1631 arity_stack.pop_back(); | 1633 arity_stack.pop_back(); |
1632 if (arity_stack.empty()) return assigned; // reached end of loop | 1634 if (arity_stack.empty()) return assigned; // reached end of loop |
1633 arity_stack.back()--; | 1635 arity_stack.back()--; |
1634 } | 1636 } |
1635 } | 1637 } |
1636 return assigned; | 1638 return assigned; |
1637 } | 1639 } |
1638 | 1640 |
1639 void AddSourcePosition(Production* p) { | 1641 inline void AddSourcePosition(Production* p) { |
1640 DCHECK_NOT_NULL(p->tree->node); | 1642 DCHECK_NOT_NULL(p->tree->node); |
1641 AddSourcePosition(p->tree->node, p->pc()); | 1643 builder_->SetSourcePosition(p->tree->node, position(p)); |
1642 } | 1644 } |
1643 | 1645 |
1644 void AddSourcePosition(TFNode* node, const byte* pc) { | 1646 inline int current_position() { return position(pc_); } |
titzer
2016/04/28 11:27:00
Let's inline this one, since it's not actually sho
Clemens Hammacher
2016/04/28 12:43:28
.. and it just had one usage.
Done.
| |
1647 inline int position(Production* p) { return position(p->pc()); } | |
1648 inline int position(const byte* pc) { | |
1645 int offset = static_cast<int>(pc - start_); | 1649 int offset = static_cast<int>(pc - start_); |
1646 DCHECK_EQ(pc - start_, offset); // overflows cannot happen | 1650 DCHECK_EQ(pc - start_, offset); // overflows cannot happen |
1647 builder_->SetSourcePosition(node, offset); | 1651 return offset; |
1648 } | 1652 } |
1649 }; | 1653 }; |
1650 | 1654 |
1651 bool DecodeLocalDecls(AstLocalDecls& decls, const byte* start, | 1655 bool DecodeLocalDecls(AstLocalDecls& decls, const byte* start, |
1652 const byte* end) { | 1656 const byte* end) { |
1653 base::AccountingAllocator allocator; | 1657 base::AccountingAllocator allocator; |
1654 Zone tmp(&allocator); | 1658 Zone tmp(&allocator); |
1655 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; | 1659 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; |
1656 SR_WasmDecoder decoder(&tmp, nullptr, body); | 1660 SR_WasmDecoder decoder(&tmp, nullptr, body); |
1657 return decoder.DecodeLocalDecls(decls); | 1661 return decoder.DecodeLocalDecls(decls); |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1791 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, | 1795 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals, |
1792 const byte* start, const byte* end) { | 1796 const byte* start, const byte* end) { |
1793 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; | 1797 FunctionBody body = {nullptr, nullptr, nullptr, start, end}; |
1794 SR_WasmDecoder decoder(zone, nullptr, body); | 1798 SR_WasmDecoder decoder(zone, nullptr, body); |
1795 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); | 1799 return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals); |
1796 } | 1800 } |
1797 | 1801 |
1798 } // namespace wasm | 1802 } // namespace wasm |
1799 } // namespace internal | 1803 } // namespace internal |
1800 } // namespace v8 | 1804 } // namespace v8 |
OLD | NEW |