Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(17)

Side by Side Diff: src/interpreter/interpreter.cc

Issue 2552883012: [interpreter][stubs] Fixing issues found by machine graph verifier. (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter.h" 5 #include "src/interpreter/interpreter.h"
6 6
7 #include <fstream> 7 #include <fstream>
8 #include <memory> 8 #include <memory>
9 9
10 #include "src/ast/prettyprinter.h" 10 #include "src/ast/prettyprinter.h"
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
91 91
92 // Initialization should have been successful. 92 // Initialization should have been successful.
93 DCHECK(IsDispatchTableInitialized()); 93 DCHECK(IsDispatchTableInitialized());
94 } 94 }
95 95
96 void Interpreter::InstallBytecodeHandler(Zone* zone, Bytecode bytecode, 96 void Interpreter::InstallBytecodeHandler(Zone* zone, Bytecode bytecode,
97 OperandScale operand_scale, 97 OperandScale operand_scale,
98 BytecodeGeneratorFunc generator) { 98 BytecodeGeneratorFunc generator) {
99 if (!Bytecodes::BytecodeHasHandler(bytecode, operand_scale)) return; 99 if (!Bytecodes::BytecodeHasHandler(bytecode, operand_scale)) return;
100 100
101 // TODO(ishell): remove this when code stub assembler graphs verification
102 // is enabled for all stubs.
103 bool sav_csa_verify = FLAG_csa_verify;
104 FLAG_csa_verify = DEBUG_BOOL;
105
101 InterpreterDispatchDescriptor descriptor(isolate_); 106 InterpreterDispatchDescriptor descriptor(isolate_);
102 compiler::CodeAssemblerState state( 107 compiler::CodeAssemblerState state(
103 isolate_, zone, descriptor, Code::ComputeFlags(Code::BYTECODE_HANDLER), 108 isolate_, zone, descriptor, Code::ComputeFlags(Code::BYTECODE_HANDLER),
104 Bytecodes::ToString(bytecode), Bytecodes::ReturnCount(bytecode)); 109 Bytecodes::ToString(bytecode), Bytecodes::ReturnCount(bytecode));
105 InterpreterAssembler assembler(&state, bytecode, operand_scale); 110 InterpreterAssembler assembler(&state, bytecode, operand_scale);
106 (this->*generator)(&assembler); 111 (this->*generator)(&assembler);
107 Handle<Code> code = compiler::CodeAssembler::GenerateCode(&state); 112 Handle<Code> code = compiler::CodeAssembler::GenerateCode(&state);
108 size_t index = GetDispatchTableIndex(bytecode, operand_scale); 113 size_t index = GetDispatchTableIndex(bytecode, operand_scale);
109 dispatch_table_[index] = code->entry(); 114 dispatch_table_[index] = code->entry();
110 TraceCodegen(code); 115 TraceCodegen(code);
111 PROFILE(isolate_, CodeCreateEvent( 116 PROFILE(isolate_, CodeCreateEvent(
112 CodeEventListener::BYTECODE_HANDLER_TAG, 117 CodeEventListener::BYTECODE_HANDLER_TAG,
113 AbstractCode::cast(*code), 118 AbstractCode::cast(*code),
114 Bytecodes::ToString(bytecode, operand_scale).c_str())); 119 Bytecodes::ToString(bytecode, operand_scale).c_str()));
120 FLAG_csa_verify = sav_csa_verify;
115 } 121 }
116 122
117 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode, 123 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode,
118 OperandScale operand_scale) { 124 OperandScale operand_scale) {
119 DCHECK(IsDispatchTableInitialized()); 125 DCHECK(IsDispatchTableInitialized());
120 DCHECK(Bytecodes::BytecodeHasHandler(bytecode, operand_scale)); 126 DCHECK(Bytecodes::BytecodeHasHandler(bytecode, operand_scale));
121 size_t index = GetDispatchTableIndex(bytecode, operand_scale); 127 size_t index = GetDispatchTableIndex(bytecode, operand_scale);
122 Address code_entry = dispatch_table_[index]; 128 Address code_entry = dispatch_table_[index];
123 return Code::GetCodeFromTargetAddress(code_entry); 129 return Code::GetCodeFromTargetAddress(code_entry);
124 } 130 }
(...skipping 753 matching lines...) Expand 10 before | Expand all | Expand 10 after
878 Label if_export(assembler), if_import(assembler), end(assembler); 884 Label if_export(assembler), if_import(assembler), end(assembler);
879 __ Branch(__ IntPtrGreaterThan(cell_index, __ IntPtrConstant(0)), &if_export, 885 __ Branch(__ IntPtrGreaterThan(cell_index, __ IntPtrConstant(0)), &if_export,
880 &if_import); 886 &if_import);
881 887
882 __ Bind(&if_export); 888 __ Bind(&if_export);
883 { 889 {
884 Node* regular_exports = 890 Node* regular_exports =
885 __ LoadObjectField(module, Module::kRegularExportsOffset); 891 __ LoadObjectField(module, Module::kRegularExportsOffset);
886 // The actual array index is (cell_index - 1). 892 // The actual array index is (cell_index - 1).
887 Node* export_index = __ IntPtrSub(cell_index, __ IntPtrConstant(1)); 893 Node* export_index = __ IntPtrSub(cell_index, __ IntPtrConstant(1));
888 Node* cell = __ LoadFixedArrayElement(regular_exports, export_index); 894 Node* cell = __ LoadFixedArrayElement(regular_exports, export_index, 0,
895 CodeStubAssembler::INTPTR_PARAMETERS);
889 __ SetAccumulator(__ LoadObjectField(cell, Cell::kValueOffset)); 896 __ SetAccumulator(__ LoadObjectField(cell, Cell::kValueOffset));
890 __ Goto(&end); 897 __ Goto(&end);
891 } 898 }
892 899
893 __ Bind(&if_import); 900 __ Bind(&if_import);
894 { 901 {
895 Node* regular_imports = 902 Node* regular_imports =
896 __ LoadObjectField(module, Module::kRegularImportsOffset); 903 __ LoadObjectField(module, Module::kRegularImportsOffset);
897 // The actual array index is (-cell_index - 1). 904 // The actual array index is (-cell_index - 1).
898 Node* import_index = __ IntPtrSub(__ IntPtrConstant(-1), cell_index); 905 Node* import_index = __ IntPtrSub(__ IntPtrConstant(-1), cell_index);
899 Node* cell = __ LoadFixedArrayElement(regular_imports, import_index); 906 Node* cell = __ LoadFixedArrayElement(regular_imports, import_index, 0,
907 CodeStubAssembler::INTPTR_PARAMETERS);
900 __ SetAccumulator(__ LoadObjectField(cell, Cell::kValueOffset)); 908 __ SetAccumulator(__ LoadObjectField(cell, Cell::kValueOffset));
901 __ Goto(&end); 909 __ Goto(&end);
902 } 910 }
903 911
904 __ Bind(&end); 912 __ Bind(&end);
905 __ Dispatch(); 913 __ Dispatch();
906 } 914 }
907 915
908 // StaModuleVariable <cell_index> <depth> 916 // StaModuleVariable <cell_index> <depth>
909 // 917 //
(...skipping 11 matching lines...) Expand all
921 Label if_export(assembler), if_import(assembler), end(assembler); 929 Label if_export(assembler), if_import(assembler), end(assembler);
922 __ Branch(__ IntPtrGreaterThan(cell_index, __ IntPtrConstant(0)), &if_export, 930 __ Branch(__ IntPtrGreaterThan(cell_index, __ IntPtrConstant(0)), &if_export,
923 &if_import); 931 &if_import);
924 932
925 __ Bind(&if_export); 933 __ Bind(&if_export);
926 { 934 {
927 Node* regular_exports = 935 Node* regular_exports =
928 __ LoadObjectField(module, Module::kRegularExportsOffset); 936 __ LoadObjectField(module, Module::kRegularExportsOffset);
929 // The actual array index is (cell_index - 1). 937 // The actual array index is (cell_index - 1).
930 Node* export_index = __ IntPtrSub(cell_index, __ IntPtrConstant(1)); 938 Node* export_index = __ IntPtrSub(cell_index, __ IntPtrConstant(1));
931 Node* cell = __ LoadFixedArrayElement(regular_exports, export_index); 939 Node* cell = __ LoadFixedArrayElement(regular_exports, export_index, 0,
940 CodeStubAssembler::INTPTR_PARAMETERS);
932 __ StoreObjectField(cell, Cell::kValueOffset, value); 941 __ StoreObjectField(cell, Cell::kValueOffset, value);
933 __ Goto(&end); 942 __ Goto(&end);
934 } 943 }
935 944
936 __ Bind(&if_import); 945 __ Bind(&if_import);
937 { 946 {
938 // Not supported (probably never). 947 // Not supported (probably never).
939 __ Abort(kUnsupportedModuleOperation); 948 __ Abort(kUnsupportedModuleOperation);
940 __ Goto(&end); 949 __ Goto(&end);
941 } 950 }
(...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after
1352 var_result.Bind(__ BitcastWordToTaggedSigned(__ Projection(0, pair))); 1361 var_result.Bind(__ BitcastWordToTaggedSigned(__ Projection(0, pair)));
1353 __ Goto(&end); 1362 __ Goto(&end);
1354 } 1363 }
1355 } 1364 }
1356 __ Bind(&slowpath); 1365 __ Bind(&slowpath);
1357 { 1366 {
1358 Node* context = __ GetContext(); 1367 Node* context = __ GetContext();
1359 AddWithFeedbackStub stub(__ isolate()); 1368 AddWithFeedbackStub stub(__ isolate());
1360 Callable callable = 1369 Callable callable =
1361 Callable(stub.GetCode(), AddWithFeedbackStub::Descriptor(__ isolate())); 1370 Callable(stub.GetCode(), AddWithFeedbackStub::Descriptor(__ isolate()));
1362 Node* args[] = {left, right, slot_index, type_feedback_vector, context}; 1371 Node* args[] = {left, right, __ TruncateWordToWord32(slot_index),
1372 type_feedback_vector, context};
1363 var_result.Bind(__ CallStubN(callable, args, 1)); 1373 var_result.Bind(__ CallStubN(callable, args, 1));
1364 __ Goto(&end); 1374 __ Goto(&end);
1365 } 1375 }
1366 __ Bind(&end); 1376 __ Bind(&end);
1367 { 1377 {
1368 __ SetAccumulator(var_result.value()); 1378 __ SetAccumulator(var_result.value());
1369 __ Dispatch(); 1379 __ Dispatch();
1370 } 1380 }
1371 } 1381 }
1372 1382
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1406 var_result.Bind(__ BitcastWordToTaggedSigned(__ Projection(0, pair))); 1416 var_result.Bind(__ BitcastWordToTaggedSigned(__ Projection(0, pair)));
1407 __ Goto(&end); 1417 __ Goto(&end);
1408 } 1418 }
1409 } 1419 }
1410 __ Bind(&slowpath); 1420 __ Bind(&slowpath);
1411 { 1421 {
1412 Node* context = __ GetContext(); 1422 Node* context = __ GetContext();
1413 SubtractWithFeedbackStub stub(__ isolate()); 1423 SubtractWithFeedbackStub stub(__ isolate());
1414 Callable callable = Callable( 1424 Callable callable = Callable(
1415 stub.GetCode(), SubtractWithFeedbackStub::Descriptor(__ isolate())); 1425 stub.GetCode(), SubtractWithFeedbackStub::Descriptor(__ isolate()));
1416 Node* args[] = {left, right, slot_index, type_feedback_vector, context}; 1426 Node* args[] = {left, right, __ TruncateWordToWord32(slot_index),
1427 type_feedback_vector, context};
1417 var_result.Bind(__ CallStubN(callable, args, 1)); 1428 var_result.Bind(__ CallStubN(callable, args, 1));
1418 __ Goto(&end); 1429 __ Goto(&end);
1419 } 1430 }
1420 __ Bind(&end); 1431 __ Bind(&end);
1421 { 1432 {
1422 __ SetAccumulator(var_result.value()); 1433 __ SetAccumulator(var_result.value());
1423 __ Dispatch(); 1434 __ Dispatch();
1424 } 1435 }
1425 } 1436 }
1426 1437
(...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after
1697 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler); 1708 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler);
1698 } 1709 }
1699 1710
1700 void Interpreter::DoJSCall(InterpreterAssembler* assembler, 1711 void Interpreter::DoJSCall(InterpreterAssembler* assembler,
1701 TailCallMode tail_call_mode) { 1712 TailCallMode tail_call_mode) {
1702 Node* function_reg = __ BytecodeOperandReg(0); 1713 Node* function_reg = __ BytecodeOperandReg(0);
1703 Node* function = __ LoadRegister(function_reg); 1714 Node* function = __ LoadRegister(function_reg);
1704 Node* receiver_reg = __ BytecodeOperandReg(1); 1715 Node* receiver_reg = __ BytecodeOperandReg(1);
1705 Node* receiver_arg = __ RegisterLocation(receiver_reg); 1716 Node* receiver_arg = __ RegisterLocation(receiver_reg);
1706 Node* receiver_args_count = __ BytecodeOperandCount(2); 1717 Node* receiver_args_count = __ BytecodeOperandCount(2);
1707 Node* receiver_count = __ Int32Constant(1); 1718 Node* receiver_count = __ IntPtrConstant(1);
1708 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); 1719 Node* args_count = __ IntPtrSub(receiver_args_count, receiver_count);
1709 Node* slot_id = __ BytecodeOperandIdx(3); 1720 Node* slot_id = __ BytecodeOperandIdx(3);
1710 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 1721 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
1711 Node* context = __ GetContext(); 1722 Node* context = __ GetContext();
1712 Node* result = 1723 Node* result =
1713 __ CallJSWithFeedback(function, context, receiver_arg, args_count, 1724 __ CallJSWithFeedback(function, context, receiver_arg, args_count,
1714 slot_id, type_feedback_vector, tail_call_mode); 1725 slot_id, type_feedback_vector, tail_call_mode);
1715 __ SetAccumulator(result); 1726 __ SetAccumulator(result);
1716 __ Dispatch(); 1727 __ Dispatch();
1717 } 1728 }
1718 1729
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1806 1817
1807 // CallJSRuntime <context_index> <receiver> <arg_count> 1818 // CallJSRuntime <context_index> <receiver> <arg_count>
1808 // 1819 //
1809 // Call the JS runtime function that has the |context_index| with the receiver 1820 // Call the JS runtime function that has the |context_index| with the receiver
1810 // in register |receiver| and |arg_count| arguments in subsequent registers. 1821 // in register |receiver| and |arg_count| arguments in subsequent registers.
1811 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { 1822 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) {
1812 Node* context_index = __ BytecodeOperandIdx(0); 1823 Node* context_index = __ BytecodeOperandIdx(0);
1813 Node* receiver_reg = __ BytecodeOperandReg(1); 1824 Node* receiver_reg = __ BytecodeOperandReg(1);
1814 Node* first_arg = __ RegisterLocation(receiver_reg); 1825 Node* first_arg = __ RegisterLocation(receiver_reg);
1815 Node* receiver_args_count = __ BytecodeOperandCount(2); 1826 Node* receiver_args_count = __ BytecodeOperandCount(2);
1816 Node* receiver_count = __ Int32Constant(1); 1827 Node* receiver_count = __ IntPtrConstant(1);
1817 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); 1828 Node* args_count = __ IntPtrSub(receiver_args_count, receiver_count);
1818 1829
1819 // Get the function to call from the native context. 1830 // Get the function to call from the native context.
1820 Node* context = __ GetContext(); 1831 Node* context = __ GetContext();
1821 Node* native_context = __ LoadNativeContext(context); 1832 Node* native_context = __ LoadNativeContext(context);
1822 Node* function = __ LoadContextElement(native_context, context_index); 1833 Node* function = __ LoadContextElement(native_context, context_index);
1823 1834
1824 // Call the function. 1835 // Call the function.
1825 Node* result = __ CallJS(function, context, first_arg, args_count, 1836 Node* result = __ CallJS(function, context, first_arg, args_count,
1826 TailCallMode::kDisallow); 1837 TailCallMode::kDisallow);
1827 __ SetAccumulator(result); 1838 __ SetAccumulator(result);
1828 __ Dispatch(); 1839 __ Dispatch();
1829 } 1840 }
1830 1841
1831 // NewWithSpread <first_arg> <arg_count> 1842 // NewWithSpread <first_arg> <arg_count>
1832 // 1843 //
1833 // Call the constructor in |first_arg| with the new.target in |first_arg + 1| 1844 // Call the constructor in |first_arg| with the new.target in |first_arg + 1|
1834 // for the |arg_count - 2| following arguments. The final argument is always a 1845 // for the |arg_count - 2| following arguments. The final argument is always a
1835 // spread. 1846 // spread.
1836 // 1847 //
1837 void Interpreter::DoNewWithSpread(InterpreterAssembler* assembler) { 1848 void Interpreter::DoNewWithSpread(InterpreterAssembler* assembler) {
1838 Node* first_arg_reg = __ BytecodeOperandReg(0); 1849 Node* first_arg_reg = __ BytecodeOperandReg(0);
1839 Node* first_arg = __ RegisterLocation(first_arg_reg); 1850 Node* first_arg = __ RegisterLocation(first_arg_reg);
1840 Node* args_count = __ BytecodeOperandCount(1); 1851 Node* args_count = __ BytecodeOperandCount(1);
1841 Node* context = __ GetContext(); 1852 Node* context = __ GetContext();
1842 1853
1843 // Call into Runtime function NewWithSpread which does everything. 1854 // Call into Runtime function NewWithSpread which does everything.
1844 Node* runtime_function = __ Int32Constant(Runtime::kNewWithSpread); 1855 Node* runtime_function = __ IntPtrConstant(Runtime::kNewWithSpread);
1845 Node* result = 1856 Node* result =
1846 __ CallRuntimeN(runtime_function, context, first_arg, args_count); 1857 __ CallRuntimeN(runtime_function, context, first_arg, args_count);
1847 __ SetAccumulator(result); 1858 __ SetAccumulator(result);
1848 __ Dispatch(); 1859 __ Dispatch();
1849 } 1860 }
1850 1861
1851 // New <constructor> <first_arg> <arg_count> 1862 // New <constructor> <first_arg> <arg_count>
1852 // 1863 //
1853 // Call operator new with |constructor| and the first argument in 1864 // Call operator new with |constructor| and the first argument in
1854 // register |first_arg| and |arg_count| arguments in subsequent 1865 // register |first_arg| and |arg_count| arguments in subsequent
(...skipping 364 matching lines...) Expand 10 before | Expand all | Expand 10 after
2219 // performs a loop nesting check and potentially triggers OSR in case the 2230 // performs a loop nesting check and potentially triggers OSR in case the
2220 // current OSR level matches (or exceeds) the specified |loop_depth|. 2231 // current OSR level matches (or exceeds) the specified |loop_depth|.
2221 void Interpreter::DoJumpLoop(InterpreterAssembler* assembler) { 2232 void Interpreter::DoJumpLoop(InterpreterAssembler* assembler) {
2222 Node* relative_jump = __ BytecodeOperandImm(0); 2233 Node* relative_jump = __ BytecodeOperandImm(0);
2223 Node* loop_depth = __ BytecodeOperandImm(1); 2234 Node* loop_depth = __ BytecodeOperandImm(1);
2224 Node* osr_level = __ LoadOSRNestingLevel(); 2235 Node* osr_level = __ LoadOSRNestingLevel();
2225 2236
2226 // Check if OSR points at the given {loop_depth} are armed by comparing it to 2237 // Check if OSR points at the given {loop_depth} are armed by comparing it to
2227 // the current {osr_level} loaded from the header of the BytecodeArray. 2238 // the current {osr_level} loaded from the header of the BytecodeArray.
2228 Label ok(assembler), osr_armed(assembler, Label::kDeferred); 2239 Label ok(assembler), osr_armed(assembler, Label::kDeferred);
2229 Node* condition = __ Int32GreaterThanOrEqual(loop_depth, osr_level); 2240 Node* condition = __ IntPtrGreaterThanOrEqual(loop_depth, osr_level);
2230 __ Branch(condition, &ok, &osr_armed); 2241 __ Branch(condition, &ok, &osr_armed);
2231 2242
2232 __ Bind(&ok); 2243 __ Bind(&ok);
2233 __ Jump(relative_jump); 2244 __ Jump(relative_jump);
2234 2245
2235 __ Bind(&osr_armed); 2246 __ Bind(&osr_armed);
2236 { 2247 {
2237 Callable callable = CodeFactory::InterpreterOnStackReplacement(isolate_); 2248 Callable callable = CodeFactory::InterpreterOnStackReplacement(isolate_);
2238 Node* target = __ HeapConstant(callable.code()); 2249 Node* target = __ HeapConstant(callable.code());
2239 Node* context = __ GetContext(); 2250 Node* context = __ GetContext();
(...skipping 27 matching lines...) Expand all
2267 // CreateArrayLiteral flags <flags> and constant elements in <element_idx>. 2278 // CreateArrayLiteral flags <flags> and constant elements in <element_idx>.
2268 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { 2279 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) {
2269 Node* literal_index_raw = __ BytecodeOperandIdx(1); 2280 Node* literal_index_raw = __ BytecodeOperandIdx(1);
2270 Node* literal_index = __ SmiTag(literal_index_raw); 2281 Node* literal_index = __ SmiTag(literal_index_raw);
2271 Node* closure = __ LoadRegister(Register::function_closure()); 2282 Node* closure = __ LoadRegister(Register::function_closure());
2272 Node* context = __ GetContext(); 2283 Node* context = __ GetContext();
2273 Node* bytecode_flags = __ BytecodeOperandFlag(2); 2284 Node* bytecode_flags = __ BytecodeOperandFlag(2);
2274 2285
2275 Label fast_shallow_clone(assembler), 2286 Label fast_shallow_clone(assembler),
2276 call_runtime(assembler, Label::kDeferred); 2287 call_runtime(assembler, Label::kDeferred);
2277 Node* use_fast_shallow_clone = __ Word32And( 2288 __ Branch(__ IsSetWord<CreateArrayLiteralFlags::FastShallowCloneBit>(
2278 bytecode_flags, 2289 bytecode_flags),
2279 __ Int32Constant(CreateArrayLiteralFlags::FastShallowCloneBit::kMask)); 2290 &fast_shallow_clone, &call_runtime);
2280 __ Branch(use_fast_shallow_clone, &fast_shallow_clone, &call_runtime);
2281 2291
2282 __ Bind(&fast_shallow_clone); 2292 __ Bind(&fast_shallow_clone);
2283 { 2293 {
2284 DCHECK(FLAG_allocation_site_pretenuring); 2294 DCHECK(FLAG_allocation_site_pretenuring);
2285 Node* result = FastCloneShallowArrayStub::Generate( 2295 Node* result = FastCloneShallowArrayStub::Generate(
2286 assembler, closure, literal_index, context, &call_runtime, 2296 assembler, closure, literal_index, context, &call_runtime,
2287 TRACK_ALLOCATION_SITE); 2297 TRACK_ALLOCATION_SITE);
2288 __ SetAccumulator(result); 2298 __ SetAccumulator(result);
2289 __ Dispatch(); 2299 __ Dispatch();
2290 } 2300 }
2291 2301
2292 __ Bind(&call_runtime); 2302 __ Bind(&call_runtime);
2293 { 2303 {
2294 STATIC_ASSERT(CreateArrayLiteralFlags::FlagsBits::kShift == 0); 2304 Node* flags_raw =
2295 Node* flags_raw = __ Word32And( 2305 __ DecodeWord<CreateArrayLiteralFlags::FlagsBits>(bytecode_flags);
2296 bytecode_flags,
2297 __ Int32Constant(CreateArrayLiteralFlags::FlagsBits::kMask));
2298 Node* flags = __ SmiTag(flags_raw); 2306 Node* flags = __ SmiTag(flags_raw);
2299 Node* index = __ BytecodeOperandIdx(0); 2307 Node* index = __ BytecodeOperandIdx(0);
2300 Node* constant_elements = __ LoadConstantPoolEntry(index); 2308 Node* constant_elements = __ LoadConstantPoolEntry(index);
2301 Node* result = 2309 Node* result =
2302 __ CallRuntime(Runtime::kCreateArrayLiteral, context, closure, 2310 __ CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
2303 literal_index, constant_elements, flags); 2311 literal_index, constant_elements, flags);
2304 __ SetAccumulator(result); 2312 __ SetAccumulator(result);
2305 __ Dispatch(); 2313 __ Dispatch();
2306 } 2314 }
2307 } 2315 }
2308 2316
2309 // CreateObjectLiteral <element_idx> <literal_idx> <flags> 2317 // CreateObjectLiteral <element_idx> <literal_idx> <flags>
2310 // 2318 //
2311 // Creates an object literal for literal index <literal_idx> with 2319 // Creates an object literal for literal index <literal_idx> with
2312 // CreateObjectLiteralFlags <flags> and constant elements in <element_idx>. 2320 // CreateObjectLiteralFlags <flags> and constant elements in <element_idx>.
2313 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { 2321 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) {
2314 Node* literal_index_raw = __ BytecodeOperandIdx(1); 2322 Node* literal_index_raw = __ BytecodeOperandIdx(1);
2315 Node* literal_index = __ SmiTag(literal_index_raw); 2323 Node* literal_index = __ SmiTag(literal_index_raw);
2316 Node* bytecode_flags = __ BytecodeOperandFlag(2); 2324 Node* bytecode_flags = __ BytecodeOperandFlag(2);
2317 Node* closure = __ LoadRegister(Register::function_closure()); 2325 Node* closure = __ LoadRegister(Register::function_closure());
2318 2326
2319 // Check if we can do a fast clone or have to call the runtime. 2327 // Check if we can do a fast clone or have to call the runtime.
2320 Label if_fast_clone(assembler), 2328 Label if_fast_clone(assembler),
2321 if_not_fast_clone(assembler, Label::kDeferred); 2329 if_not_fast_clone(assembler, Label::kDeferred);
2322 Node* fast_clone_properties_count = 2330 Node* fast_clone_properties_count =
2323 __ DecodeWord32<CreateObjectLiteralFlags::FastClonePropertiesCountBits>( 2331 __ DecodeWord<CreateObjectLiteralFlags::FastClonePropertiesCountBits>(
2324 bytecode_flags); 2332 bytecode_flags);
2325 __ Branch(fast_clone_properties_count, &if_fast_clone, &if_not_fast_clone); 2333 __ Branch(__ WordNotEqual(fast_clone_properties_count, __ IntPtrConstant(0)),
2334 &if_fast_clone, &if_not_fast_clone);
2326 2335
2327 __ Bind(&if_fast_clone); 2336 __ Bind(&if_fast_clone);
2328 { 2337 {
2329 // If we can do a fast clone do the fast-path in FastCloneShallowObjectStub. 2338 // If we can do a fast clone do the fast-path in FastCloneShallowObjectStub.
2330 Node* result = FastCloneShallowObjectStub::GenerateFastPath( 2339 Node* result = FastCloneShallowObjectStub::GenerateFastPath(
2331 assembler, &if_not_fast_clone, closure, literal_index, 2340 assembler, &if_not_fast_clone, closure, literal_index,
2332 fast_clone_properties_count); 2341 fast_clone_properties_count);
2333 __ StoreRegister(result, __ BytecodeOperandReg(3)); 2342 __ StoreRegister(result, __ BytecodeOperandReg(3));
2334 __ Dispatch(); 2343 __ Dispatch();
2335 } 2344 }
2336 2345
2337 __ Bind(&if_not_fast_clone); 2346 __ Bind(&if_not_fast_clone);
2338 { 2347 {
2339 // If we can't do a fast clone, call into the runtime. 2348 // If we can't do a fast clone, call into the runtime.
2340 Node* index = __ BytecodeOperandIdx(0); 2349 Node* index = __ BytecodeOperandIdx(0);
2341 Node* constant_elements = __ LoadConstantPoolEntry(index); 2350 Node* constant_elements = __ LoadConstantPoolEntry(index);
2342 Node* context = __ GetContext(); 2351 Node* context = __ GetContext();
2343 2352
2344 STATIC_ASSERT(CreateObjectLiteralFlags::FlagsBits::kShift == 0); 2353 Node* flags_raw =
2345 Node* flags_raw = __ Word32And( 2354 __ DecodeWord<CreateObjectLiteralFlags::FlagsBits>(bytecode_flags);
2346 bytecode_flags,
2347 __ Int32Constant(CreateObjectLiteralFlags::FlagsBits::kMask));
2348 Node* flags = __ SmiTag(flags_raw); 2355 Node* flags = __ SmiTag(flags_raw);
2349 2356
2350 Node* result = 2357 Node* result =
2351 __ CallRuntime(Runtime::kCreateObjectLiteral, context, closure, 2358 __ CallRuntime(Runtime::kCreateObjectLiteral, context, closure,
2352 literal_index, constant_elements, flags); 2359 literal_index, constant_elements, flags);
2353 __ StoreRegister(result, __ BytecodeOperandReg(3)); 2360 __ StoreRegister(result, __ BytecodeOperandReg(3));
2354 // TODO(klaasb) build a single dispatch once the call is inlined 2361 // TODO(klaasb) build a single dispatch once the call is inlined
2355 __ Dispatch(); 2362 __ Dispatch();
2356 } 2363 }
2357 } 2364 }
2358 2365
2359 // CreateClosure <index> <tenured> 2366 // CreateClosure <index> <tenured>
2360 // 2367 //
2361 // Creates a new closure for SharedFunctionInfo at position |index| in the 2368 // Creates a new closure for SharedFunctionInfo at position |index| in the
2362 // constant pool and with the PretenureFlag <tenured>. 2369 // constant pool and with the PretenureFlag <tenured>.
2363 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { 2370 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) {
2364 Node* index = __ BytecodeOperandIdx(0); 2371 Node* index = __ BytecodeOperandIdx(0);
2365 Node* shared = __ LoadConstantPoolEntry(index); 2372 Node* shared = __ LoadConstantPoolEntry(index);
2366 Node* flags = __ BytecodeOperandFlag(1); 2373 Node* flags = __ BytecodeOperandFlag(1);
2367 Node* context = __ GetContext(); 2374 Node* context = __ GetContext();
2368 2375
2369 Label call_runtime(assembler, Label::kDeferred); 2376 Label call_runtime(assembler, Label::kDeferred);
2370 Node* fast_new_closure = __ Word32And( 2377 __ GotoUnless(__ IsSetWord<CreateClosureFlags::FastNewClosureBit>(flags),
2371 flags, __ Int32Constant(CreateClosureFlags::FastNewClosureBit::kMask)); 2378 &call_runtime);
2372 __ GotoUnless(fast_new_closure, &call_runtime);
2373 __ SetAccumulator(FastNewClosureStub::Generate(assembler, shared, context)); 2379 __ SetAccumulator(FastNewClosureStub::Generate(assembler, shared, context));
2374 __ Dispatch(); 2380 __ Dispatch();
2375 2381
2376 __ Bind(&call_runtime); 2382 __ Bind(&call_runtime);
2377 { 2383 {
2378 STATIC_ASSERT(CreateClosureFlags::PretenuredBit::kShift == 0); 2384 Node* tenured_raw = __ DecodeWord<CreateClosureFlags::PretenuredBit>(flags);
2379 Node* tenured_raw = __ Word32And(
2380 flags, __ Int32Constant(CreateClosureFlags::PretenuredBit::kMask));
2381 Node* tenured = __ SmiTag(tenured_raw); 2385 Node* tenured = __ SmiTag(tenured_raw);
2382 Node* result = __ CallRuntime(Runtime::kInterpreterNewClosure, context, 2386 Node* result = __ CallRuntime(Runtime::kInterpreterNewClosure, context,
2383 shared, tenured); 2387 shared, tenured);
2384 __ SetAccumulator(result); 2388 __ SetAccumulator(result);
2385 __ Dispatch(); 2389 __ Dispatch();
2386 } 2390 }
2387 } 2391 }
2388 2392
2389 // CreateBlockContext <index> 2393 // CreateBlockContext <index>
2390 // 2394 //
(...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after
2722 __ Dispatch(); 2726 __ Dispatch();
2723 } 2727 }
2724 __ Bind(&if_slow); 2728 __ Bind(&if_slow);
2725 { 2729 {
2726 // Record the fact that we hit the for-in slow path. 2730 // Record the fact that we hit the for-in slow path.
2727 Node* vector_index = __ BytecodeOperandIdx(3); 2731 Node* vector_index = __ BytecodeOperandIdx(3);
2728 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 2732 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
2729 Node* megamorphic_sentinel = 2733 Node* megamorphic_sentinel =
2730 __ HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate_)); 2734 __ HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate_));
2731 __ StoreFixedArrayElement(type_feedback_vector, vector_index, 2735 __ StoreFixedArrayElement(type_feedback_vector, vector_index,
2732 megamorphic_sentinel, SKIP_WRITE_BARRIER); 2736 megamorphic_sentinel, SKIP_WRITE_BARRIER, 0,
2737 CodeStubAssembler::INTPTR_PARAMETERS);
2733 2738
2734 // Need to filter the {key} for the {receiver}. 2739 // Need to filter the {key} for the {receiver}.
2735 Node* context = __ GetContext(); 2740 Node* context = __ GetContext();
2736 Callable callable = CodeFactory::ForInFilter(assembler->isolate()); 2741 Callable callable = CodeFactory::ForInFilter(assembler->isolate());
2737 Node* result = __ CallStub(callable, context, key, receiver); 2742 Node* result = __ CallStub(callable, context, key, receiver);
2738 __ SetAccumulator(result); 2743 __ SetAccumulator(result);
2739 __ Dispatch(); 2744 __ Dispatch();
2740 } 2745 }
2741 } 2746 }
2742 2747
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
2866 __ StoreObjectField(generator, JSGeneratorObject::kContinuationOffset, 2871 __ StoreObjectField(generator, JSGeneratorObject::kContinuationOffset,
2867 __ SmiTag(new_state)); 2872 __ SmiTag(new_state));
2868 __ SetAccumulator(old_state); 2873 __ SetAccumulator(old_state);
2869 2874
2870 __ Dispatch(); 2875 __ Dispatch();
2871 } 2876 }
2872 2877
2873 } // namespace interpreter 2878 } // namespace interpreter
2874 } // namespace internal 2879 } // namespace internal
2875 } // namespace v8 2880 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698