| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2017 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/interpreter/interpreter.h" | 5 #include "src/interpreter/interpreter-generator.h" |
| 6 | 6 |
| 7 #include <array> | 7 #include <array> |
| 8 #include <fstream> | 8 #include <tuple> |
| 9 #include <memory> | |
| 10 | 9 |
| 11 #include "src/ast/prettyprinter.h" | |
| 12 #include "src/builtins/builtins-arguments.h" | 10 #include "src/builtins/builtins-arguments.h" |
| 13 #include "src/builtins/builtins-constructor.h" | 11 #include "src/builtins/builtins-constructor.h" |
| 14 #include "src/builtins/builtins-forin.h" | 12 #include "src/builtins/builtins-forin.h" |
| 13 #include "src/code-events.h" |
| 15 #include "src/code-factory.h" | 14 #include "src/code-factory.h" |
| 16 #include "src/compilation-info.h" | |
| 17 #include "src/compiler.h" | |
| 18 #include "src/counters.h" | |
| 19 #include "src/debug/debug.h" | |
| 20 #include "src/factory.h" | 15 #include "src/factory.h" |
| 21 #include "src/ic/accessor-assembler.h" | 16 #include "src/ic/accessor-assembler.h" |
| 22 #include "src/interpreter/bytecode-flags.h" | 17 #include "src/interpreter/bytecode-flags.h" |
| 23 #include "src/interpreter/bytecode-generator.h" | |
| 24 #include "src/interpreter/bytecodes.h" | 18 #include "src/interpreter/bytecodes.h" |
| 25 #include "src/interpreter/interpreter-assembler.h" | 19 #include "src/interpreter/interpreter-assembler.h" |
| 26 #include "src/interpreter/interpreter-intrinsics.h" | 20 #include "src/interpreter/interpreter-intrinsics.h" |
| 27 #include "src/log.h" | |
| 28 #include "src/objects-inl.h" | 21 #include "src/objects-inl.h" |
| 29 #include "src/zone/zone.h" | |
| 30 | 22 |
| 31 namespace v8 { | 23 namespace v8 { |
| 32 namespace internal { | 24 namespace internal { |
| 33 namespace interpreter { | 25 namespace interpreter { |
| 34 | 26 |
| 35 using compiler::Node; | 27 using compiler::Node; |
| 36 typedef CodeStubAssembler::Label Label; | 28 typedef CodeStubAssembler::Label Label; |
| 37 typedef CodeStubAssembler::Variable Variable; | 29 typedef CodeStubAssembler::Variable Variable; |
| 38 | 30 |
| 39 #define __ assembler-> | 31 class InterpreterGenerator { |
| 32 public: |
| 33 explicit InterpreterGenerator(Isolate* isolate) : isolate_(isolate) {} |
| 40 | 34 |
| 41 class InterpreterCompilationJob final : public CompilationJob { | 35 // Bytecode handler generator functions. |
| 42 public: | 36 #define DECLARE_BYTECODE_HANDLER_GENERATOR(Name, ...) \ |
| 43 explicit InterpreterCompilationJob(CompilationInfo* info); | 37 void Do##Name(InterpreterAssembler* assembler); |
| 44 | 38 BYTECODE_LIST(DECLARE_BYTECODE_HANDLER_GENERATOR) |
| 45 protected: | 39 #undef DECLARE_BYTECODE_HANDLER_GENERATOR |
| 46 Status PrepareJobImpl() final; | |
| 47 Status ExecuteJobImpl() final; | |
| 48 Status FinalizeJobImpl() final; | |
| 49 | 40 |
| 50 private: | 41 private: |
| 51 class TimerScope final { | 42 // Generates code to perform the binary operation via |Generator|. |
| 52 public: | 43 template <class Generator> |
| 53 TimerScope(RuntimeCallStats* stats, RuntimeCallStats::CounterId counter_id) | 44 void DoBinaryOpWithFeedback(InterpreterAssembler* assembler); |
| 54 : stats_(stats) { | |
| 55 if (V8_UNLIKELY(FLAG_runtime_stats)) { | |
| 56 RuntimeCallStats::Enter(stats_, &timer_, counter_id); | |
| 57 } | |
| 58 } | |
| 59 | 45 |
| 60 explicit TimerScope(RuntimeCallCounter* counter) : stats_(nullptr) { | 46 // Generates code to perform the comparison via |Generator| while gathering |
| 61 if (V8_UNLIKELY(FLAG_runtime_stats)) { | 47 // type feedback. |
| 62 timer_.Start(counter, nullptr); | 48 void DoCompareOpWithFeedback(Token::Value compare_op, |
| 63 } | 49 InterpreterAssembler* assembler); |
| 64 } | |
| 65 | 50 |
| 66 ~TimerScope() { | 51 // Generates code to perform the bitwise binary operation corresponding to |
| 67 if (V8_UNLIKELY(FLAG_runtime_stats)) { | 52 // |bitwise_op| while gathering type feedback. |
| 68 if (stats_) { | 53 void DoBitwiseBinaryOp(Token::Value bitwise_op, |
| 69 RuntimeCallStats::Leave(stats_, &timer_); | 54 InterpreterAssembler* assembler); |
| 70 } else { | |
| 71 timer_.Stop(); | |
| 72 } | |
| 73 } | |
| 74 } | |
| 75 | 55 |
| 76 private: | 56 // Generates code to perform the binary operation via |Generator| using |
| 77 RuntimeCallStats* stats_; | 57 // an immediate value rather the accumulator as the rhs operand. |
| 78 RuntimeCallTimer timer_; | 58 template <class Generator> |
| 79 }; | 59 void DoBinaryOpWithImmediate(InterpreterAssembler* assembler); |
| 80 | 60 |
| 81 BytecodeGenerator* generator() { return &generator_; } | 61 // Generates code to perform the unary operation via |Generator| while |
| 62 // gatering type feedback. |
| 63 template <class Generator> |
| 64 void DoUnaryOpWithFeedback(InterpreterAssembler* assembler); |
| 82 | 65 |
| 83 BytecodeGenerator generator_; | 66 // Generates code to perform the comparison operation associated with |
| 84 RuntimeCallStats* runtime_call_stats_; | 67 // |compare_op|. |
| 85 RuntimeCallCounter background_execute_counter_; | 68 void DoCompareOp(Token::Value compare_op, InterpreterAssembler* assembler); |
| 86 bool print_bytecode_; | |
| 87 | 69 |
| 88 DISALLOW_COPY_AND_ASSIGN(InterpreterCompilationJob); | 70 // Generates code to perform a global store via |ic|. |
| 71 void DoStaGlobal(Callable ic, InterpreterAssembler* assembler); |
| 72 |
| 73 // Generates code to perform a named property store via |ic|. |
| 74 void DoStoreIC(Callable ic, InterpreterAssembler* assembler); |
| 75 |
| 76 // Generates code to perform a keyed property store via |ic|. |
| 77 void DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler); |
| 78 |
| 79 // Generates code to perform a JS call that collects type feedback. |
| 80 void DoJSCall(InterpreterAssembler* assembler, TailCallMode tail_call_mode); |
| 81 |
| 82 // Generates code to perform a JS call with a known number of arguments that |
| 83 // collects type feedback. |
| 84 void DoJSCallN(InterpreterAssembler* assembler, int n); |
| 85 |
| 86 // Generates code to perform delete via function_id. |
| 87 void DoDelete(Runtime::FunctionId function_id, |
| 88 InterpreterAssembler* assembler); |
| 89 |
| 90 // Generates code to perform a lookup slot load via |function_id|. |
| 91 void DoLdaLookupSlot(Runtime::FunctionId function_id, |
| 92 InterpreterAssembler* assembler); |
| 93 |
| 94 // Generates code to perform a lookup slot load via |function_id| that can |
| 95 // fast path to a context slot load. |
| 96 void DoLdaLookupContextSlot(Runtime::FunctionId function_id, |
| 97 InterpreterAssembler* assembler); |
| 98 |
| 99 // Generates code to perform a lookup slot load via |function_id| that can |
| 100 // fast path to a global load. |
| 101 void DoLdaLookupGlobalSlot(Runtime::FunctionId function_id, |
| 102 InterpreterAssembler* assembler); |
| 103 |
| 104 // Generates code to perform a lookup slot store depending on |
| 105 // |language_mode|. |
| 106 void DoStaLookupSlot(LanguageMode language_mode, |
| 107 InterpreterAssembler* assembler); |
| 108 |
| 109 // Generates code to load a global property. |
| 110 void BuildLoadGlobalIC(int slot_operand_index, int name_operand_index, |
| 111 TypeofMode typeof_mode, |
| 112 InterpreterAssembler* assembler); |
| 113 |
| 114 // Generates code to load a property. |
| 115 void BuildLoadIC(int recv_operand_index, int slot_operand_index, |
| 116 int name_operand_index, InterpreterAssembler* assembler); |
| 117 |
| 118 // Generates code to prepare the result for ForInPrepare. Cache data |
| 119 // are placed into the consecutive series of registers starting at |
| 120 // |output_register|. |
| 121 void BuildForInPrepareResult(Node* output_register, Node* cache_type, |
| 122 Node* cache_array, Node* cache_length, |
| 123 InterpreterAssembler* assembler); |
| 124 |
| 125 // Generates code to perform the unary operation via |callable|. |
| 126 Node* BuildUnaryOp(Callable callable, InterpreterAssembler* assembler); |
| 127 |
| 128 Isolate* isolate_; |
| 89 }; | 129 }; |
| 90 | 130 |
| 91 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { | 131 Handle<Code> GenerateBytecodeHandler(Isolate* isolate, Bytecode bytecode, |
| 92 memset(dispatch_table_, 0, sizeof(dispatch_table_)); | 132 OperandScale operand_scale) { |
| 93 } | 133 Zone zone(isolate->allocator(), ZONE_NAME); |
| 94 | 134 InterpreterDispatchDescriptor descriptor(isolate); |
| 95 void Interpreter::Initialize() { | |
| 96 if (!ShouldInitializeDispatchTable()) return; | |
| 97 Zone zone(isolate_->allocator(), ZONE_NAME); | |
| 98 HandleScope scope(isolate_); | |
| 99 | |
| 100 if (FLAG_trace_ignition_dispatches) { | |
| 101 static const int kBytecodeCount = static_cast<int>(Bytecode::kLast) + 1; | |
| 102 bytecode_dispatch_counters_table_.reset( | |
| 103 new uintptr_t[kBytecodeCount * kBytecodeCount]); | |
| 104 memset(bytecode_dispatch_counters_table_.get(), 0, | |
| 105 sizeof(uintptr_t) * kBytecodeCount * kBytecodeCount); | |
| 106 } | |
| 107 | |
| 108 // Generate bytecode handlers for all bytecodes and scales. | |
| 109 const OperandScale kOperandScales[] = { | |
| 110 #define VALUE(Name, _) OperandScale::k##Name, | |
| 111 OPERAND_SCALE_LIST(VALUE) | |
| 112 #undef VALUE | |
| 113 }; | |
| 114 | |
| 115 for (OperandScale operand_scale : kOperandScales) { | |
| 116 #define GENERATE_CODE(Name, ...) \ | |
| 117 InstallBytecodeHandler(&zone, Bytecode::k##Name, operand_scale, \ | |
| 118 &Interpreter::Do##Name); | |
| 119 BYTECODE_LIST(GENERATE_CODE) | |
| 120 #undef GENERATE_CODE | |
| 121 } | |
| 122 | |
| 123 // Fill unused entries will the illegal bytecode handler. | |
| 124 size_t illegal_index = | |
| 125 GetDispatchTableIndex(Bytecode::kIllegal, OperandScale::kSingle); | |
| 126 for (size_t index = 0; index < arraysize(dispatch_table_); ++index) { | |
| 127 if (dispatch_table_[index] == nullptr) { | |
| 128 dispatch_table_[index] = dispatch_table_[illegal_index]; | |
| 129 } | |
| 130 } | |
| 131 | |
| 132 // Initialization should have been successful. | |
| 133 DCHECK(IsDispatchTableInitialized()); | |
| 134 } | |
| 135 | |
| 136 bool Interpreter::ReuseExistingHandler(Bytecode bytecode, | |
| 137 OperandScale operand_scale) { | |
| 138 size_t index = GetDispatchTableIndex(bytecode, operand_scale); | |
| 139 switch (bytecode) { | |
| 140 case Bytecode::kCallProperty: | |
| 141 case Bytecode::kCallProperty0: | |
| 142 case Bytecode::kCallProperty1: | |
| 143 case Bytecode::kCallProperty2: { | |
| 144 const int offset = static_cast<int>(Bytecode::kCallProperty) - | |
| 145 static_cast<int>(Bytecode::kCall); | |
| 146 STATIC_ASSERT(offset == | |
| 147 static_cast<int>(Bytecode::kCallProperty0) - | |
| 148 static_cast<int>(Bytecode::kCall0)); | |
| 149 STATIC_ASSERT(offset == | |
| 150 static_cast<int>(Bytecode::kCallProperty1) - | |
| 151 static_cast<int>(Bytecode::kCall1)); | |
| 152 STATIC_ASSERT(offset == | |
| 153 static_cast<int>(Bytecode::kCallProperty2) - | |
| 154 static_cast<int>(Bytecode::kCall2)); | |
| 155 CHECK_LT(offset, index); | |
| 156 dispatch_table_[index] = dispatch_table_[index - offset]; | |
| 157 return true; | |
| 158 break; | |
| 159 } | |
| 160 default: | |
| 161 return false; | |
| 162 } | |
| 163 } | |
| 164 | |
| 165 void Interpreter::InstallBytecodeHandler(Zone* zone, Bytecode bytecode, | |
| 166 OperandScale operand_scale, | |
| 167 BytecodeGeneratorFunc generator) { | |
| 168 if (!Bytecodes::BytecodeHasHandler(bytecode, operand_scale)) return; | |
| 169 if (ReuseExistingHandler(bytecode, operand_scale)) return; | |
| 170 | |
| 171 size_t index = GetDispatchTableIndex(bytecode, operand_scale); | |
| 172 InterpreterDispatchDescriptor descriptor(isolate_); | |
| 173 compiler::CodeAssemblerState state( | 135 compiler::CodeAssemblerState state( |
| 174 isolate_, zone, descriptor, Code::ComputeFlags(Code::BYTECODE_HANDLER), | 136 isolate, &zone, descriptor, Code::ComputeFlags(Code::BYTECODE_HANDLER), |
| 175 Bytecodes::ToString(bytecode), Bytecodes::ReturnCount(bytecode)); | 137 Bytecodes::ToString(bytecode), Bytecodes::ReturnCount(bytecode)); |
| 176 InterpreterAssembler assembler(&state, bytecode, operand_scale); | 138 InterpreterAssembler assembler(&state, bytecode, operand_scale); |
| 177 if (Bytecodes::MakesCallAlongCriticalPath(bytecode)) { | 139 if (Bytecodes::MakesCallAlongCriticalPath(bytecode)) { |
| 178 assembler.SaveBytecodeOffset(); | 140 assembler.SaveBytecodeOffset(); |
| 179 } | 141 } |
| 180 (this->*generator)(&assembler); | 142 InterpreterGenerator generator(isolate); |
| 181 Handle<Code> code = compiler::CodeAssembler::GenerateCode(&state); | |
| 182 dispatch_table_[index] = code->entry(); | |
| 183 TraceCodegen(code); | |
| 184 PROFILE(isolate_, CodeCreateEvent( | |
| 185 CodeEventListener::BYTECODE_HANDLER_TAG, | |
| 186 AbstractCode::cast(*code), | |
| 187 Bytecodes::ToString(bytecode, operand_scale).c_str())); | |
| 188 } | |
| 189 | 143 |
| 190 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode, | 144 switch (bytecode) { |
| 191 OperandScale operand_scale) { | 145 #define CALL_GENERATOR(Name, ...) \ |
| 192 DCHECK(IsDispatchTableInitialized()); | 146 case Bytecode::k##Name: \ |
| 193 DCHECK(Bytecodes::BytecodeHasHandler(bytecode, operand_scale)); | 147 generator.Do##Name(&assembler); \ |
| 194 size_t index = GetDispatchTableIndex(bytecode, operand_scale); | 148 break; |
| 195 Address code_entry = dispatch_table_[index]; | 149 BYTECODE_LIST(CALL_GENERATOR); |
| 196 return Code::GetCodeFromTargetAddress(code_entry); | 150 #undef CALL_GENERATOR |
| 197 } | |
| 198 | |
| 199 // static | |
| 200 size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode, | |
| 201 OperandScale operand_scale) { | |
| 202 static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte; | |
| 203 size_t index = static_cast<size_t>(bytecode); | |
| 204 switch (operand_scale) { | |
| 205 case OperandScale::kSingle: | |
| 206 return index; | |
| 207 case OperandScale::kDouble: | |
| 208 return index + kEntriesPerOperandScale; | |
| 209 case OperandScale::kQuadruple: | |
| 210 return index + 2 * kEntriesPerOperandScale; | |
| 211 } | |
| 212 UNREACHABLE(); | |
| 213 return 0; | |
| 214 } | |
| 215 | |
| 216 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { | |
| 217 for (int i = 0; i < kDispatchTableSize; i++) { | |
| 218 Address code_entry = dispatch_table_[i]; | |
| 219 Object* code = code_entry == nullptr | |
| 220 ? nullptr | |
| 221 : Code::GetCodeFromTargetAddress(code_entry); | |
| 222 Object* old_code = code; | |
| 223 v->VisitPointer(&code); | |
| 224 if (code != old_code) { | |
| 225 dispatch_table_[i] = reinterpret_cast<Code*>(code)->entry(); | |
| 226 } | |
| 227 } | |
| 228 } | |
| 229 | |
| 230 // static | |
| 231 int Interpreter::InterruptBudget() { | |
| 232 return FLAG_interrupt_budget * kCodeSizeMultiplier; | |
| 233 } | |
| 234 | |
| 235 namespace { | |
| 236 | |
| 237 bool ShouldPrintBytecode(Handle<SharedFunctionInfo> shared) { | |
| 238 if (!FLAG_print_bytecode) return false; | |
| 239 | |
| 240 // Checks whether function passed the filter. | |
| 241 if (shared->is_toplevel()) { | |
| 242 Vector<const char> filter = CStrVector(FLAG_print_bytecode_filter); | |
| 243 return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*'); | |
| 244 } else { | |
| 245 return shared->PassesFilter(FLAG_print_bytecode_filter); | |
| 246 } | |
| 247 } | |
| 248 | |
| 249 } // namespace | |
| 250 | |
| 251 InterpreterCompilationJob::InterpreterCompilationJob(CompilationInfo* info) | |
| 252 : CompilationJob(info->isolate(), info, "Ignition"), | |
| 253 generator_(info), | |
| 254 runtime_call_stats_(info->isolate()->counters()->runtime_call_stats()), | |
| 255 background_execute_counter_("CompileBackgroundIgnition"), | |
| 256 print_bytecode_(ShouldPrintBytecode(info->shared_info())) {} | |
| 257 | |
| 258 InterpreterCompilationJob::Status InterpreterCompilationJob::PrepareJobImpl() { | |
| 259 CodeGenerator::MakeCodePrologue(info(), "interpreter"); | |
| 260 | |
| 261 if (print_bytecode_) { | |
| 262 OFStream os(stdout); | |
| 263 std::unique_ptr<char[]> name = info()->GetDebugName(); | |
| 264 os << "[generating bytecode for function: " << info()->GetDebugName().get() | |
| 265 << "]" << std::endl | |
| 266 << std::flush; | |
| 267 } | 151 } |
| 268 | 152 |
| 269 return SUCCEEDED; | 153 Handle<Code> code = compiler::CodeAssembler::GenerateCode(&state); |
| 270 } | 154 PROFILE(isolate, CodeCreateEvent( |
| 271 | 155 CodeEventListener::BYTECODE_HANDLER_TAG, |
| 272 InterpreterCompilationJob::Status InterpreterCompilationJob::ExecuteJobImpl() { | 156 AbstractCode::cast(*code), |
| 273 TimerScope runtimeTimer = | 157 Bytecodes::ToString(bytecode, operand_scale).c_str())); |
| 274 executed_on_background_thread() | |
| 275 ? TimerScope(&background_execute_counter_) | |
| 276 : TimerScope(runtime_call_stats_, &RuntimeCallStats::CompileIgnition); | |
| 277 // TODO(lpy): add support for background compilation RCS trace. | |
| 278 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileIgnition"); | |
| 279 | |
| 280 generator()->GenerateBytecode(stack_limit()); | |
| 281 | |
| 282 if (generator()->HasStackOverflow()) { | |
| 283 return FAILED; | |
| 284 } | |
| 285 return SUCCEEDED; | |
| 286 } | |
| 287 | |
| 288 InterpreterCompilationJob::Status InterpreterCompilationJob::FinalizeJobImpl() { | |
| 289 // Add background runtime call stats. | |
| 290 if (V8_UNLIKELY(FLAG_runtime_stats && executed_on_background_thread())) { | |
| 291 runtime_call_stats_->CompileBackgroundIgnition.Add( | |
| 292 &background_execute_counter_); | |
| 293 } | |
| 294 | |
| 295 RuntimeCallTimerScope runtimeTimer( | |
| 296 runtime_call_stats_, &RuntimeCallStats::CompileIgnitionFinalization); | |
| 297 | |
| 298 Handle<BytecodeArray> bytecodes = generator()->FinalizeBytecode(isolate()); | |
| 299 if (generator()->HasStackOverflow()) { | |
| 300 return FAILED; | |
| 301 } | |
| 302 | |
| 303 if (print_bytecode_) { | |
| 304 OFStream os(stdout); | |
| 305 bytecodes->Print(os); | |
| 306 os << std::flush; | |
| 307 } | |
| 308 | |
| 309 info()->SetBytecodeArray(bytecodes); | |
| 310 info()->SetCode(info()->isolate()->builtins()->InterpreterEntryTrampoline()); | |
| 311 return SUCCEEDED; | |
| 312 } | |
| 313 | |
| 314 CompilationJob* Interpreter::NewCompilationJob(CompilationInfo* info) { | |
| 315 return new InterpreterCompilationJob(info); | |
| 316 } | |
| 317 | |
| 318 bool Interpreter::IsDispatchTableInitialized() { | |
| 319 return dispatch_table_[0] != nullptr; | |
| 320 } | |
| 321 | |
| 322 bool Interpreter::ShouldInitializeDispatchTable() { | |
| 323 if (FLAG_trace_ignition || FLAG_trace_ignition_codegen || | |
| 324 FLAG_trace_ignition_dispatches) { | |
| 325 // Regenerate table to add bytecode tracing operations, print the assembly | |
| 326 // code generated by TurboFan or instrument handlers with dispatch counters. | |
| 327 return true; | |
| 328 } | |
| 329 return !IsDispatchTableInitialized(); | |
| 330 } | |
| 331 | |
| 332 void Interpreter::TraceCodegen(Handle<Code> code) { | |
| 333 #ifdef ENABLE_DISASSEMBLER | 158 #ifdef ENABLE_DISASSEMBLER |
| 334 if (FLAG_trace_ignition_codegen) { | 159 if (FLAG_trace_ignition_codegen) { |
| 335 OFStream os(stdout); | 160 OFStream os(stdout); |
| 336 code->Disassemble(nullptr, os); | 161 code->Disassemble(nullptr, os); |
| 337 os << std::flush; | 162 os << std::flush; |
| 338 } | 163 } |
| 339 #endif // ENABLE_DISASSEMBLER | 164 #endif // ENABLE_DISASSEMBLER |
| 165 return code; |
| 340 } | 166 } |
| 341 | 167 |
| 342 const char* Interpreter::LookupNameOfBytecodeHandler(Code* code) { | 168 #define __ assembler-> |
| 343 #ifdef ENABLE_DISASSEMBLER | |
| 344 #define RETURN_NAME(Name, ...) \ | |
| 345 if (dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] == \ | |
| 346 code->entry()) { \ | |
| 347 return #Name; \ | |
| 348 } | |
| 349 BYTECODE_LIST(RETURN_NAME) | |
| 350 #undef RETURN_NAME | |
| 351 #endif // ENABLE_DISASSEMBLER | |
| 352 return nullptr; | |
| 353 } | |
| 354 | |
| 355 uintptr_t Interpreter::GetDispatchCounter(Bytecode from, Bytecode to) const { | |
| 356 int from_index = Bytecodes::ToByte(from); | |
| 357 int to_index = Bytecodes::ToByte(to); | |
| 358 return bytecode_dispatch_counters_table_[from_index * kNumberOfBytecodes + | |
| 359 to_index]; | |
| 360 } | |
| 361 | |
| 362 Local<v8::Object> Interpreter::GetDispatchCountersObject() { | |
| 363 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(isolate_); | |
| 364 Local<v8::Context> context = isolate->GetCurrentContext(); | |
| 365 | |
| 366 Local<v8::Object> counters_map = v8::Object::New(isolate); | |
| 367 | |
| 368 // Output is a JSON-encoded object of objects. | |
| 369 // | |
| 370 // The keys on the top level object are source bytecodes, | |
| 371 // and corresponding value are objects. Keys on these last are the | |
| 372 // destinations of the dispatch and the value associated is a counter for | |
| 373 // the correspondent source-destination dispatch chain. | |
| 374 // | |
| 375 // Only non-zero counters are written to file, but an entry in the top-level | |
| 376 // object is always present, even if the value is empty because all counters | |
| 377 // for that source are zero. | |
| 378 | |
| 379 for (int from_index = 0; from_index < kNumberOfBytecodes; ++from_index) { | |
| 380 Bytecode from_bytecode = Bytecodes::FromByte(from_index); | |
| 381 Local<v8::Object> counters_row = v8::Object::New(isolate); | |
| 382 | |
| 383 for (int to_index = 0; to_index < kNumberOfBytecodes; ++to_index) { | |
| 384 Bytecode to_bytecode = Bytecodes::FromByte(to_index); | |
| 385 uintptr_t counter = GetDispatchCounter(from_bytecode, to_bytecode); | |
| 386 | |
| 387 if (counter > 0) { | |
| 388 std::string to_name = Bytecodes::ToString(to_bytecode); | |
| 389 Local<v8::String> to_name_object = | |
| 390 v8::String::NewFromUtf8(isolate, to_name.c_str(), | |
| 391 NewStringType::kNormal) | |
| 392 .ToLocalChecked(); | |
| 393 Local<v8::Number> counter_object = v8::Number::New(isolate, counter); | |
| 394 CHECK(counters_row | |
| 395 ->DefineOwnProperty(context, to_name_object, counter_object) | |
| 396 .IsJust()); | |
| 397 } | |
| 398 } | |
| 399 | |
| 400 std::string from_name = Bytecodes::ToString(from_bytecode); | |
| 401 Local<v8::String> from_name_object = | |
| 402 v8::String::NewFromUtf8(isolate, from_name.c_str(), | |
| 403 NewStringType::kNormal) | |
| 404 .ToLocalChecked(); | |
| 405 | |
| 406 CHECK( | |
| 407 counters_map->DefineOwnProperty(context, from_name_object, counters_row) | |
| 408 .IsJust()); | |
| 409 } | |
| 410 | |
| 411 return counters_map; | |
| 412 } | |
| 413 | 169 |
| 414 // LdaZero | 170 // LdaZero |
| 415 // | 171 // |
| 416 // Load literal '0' into the accumulator. | 172 // Load literal '0' into the accumulator. |
| 417 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) { | 173 void InterpreterGenerator::DoLdaZero(InterpreterAssembler* assembler) { |
| 418 Node* zero_value = __ NumberConstant(0.0); | 174 Node* zero_value = __ NumberConstant(0.0); |
| 419 __ SetAccumulator(zero_value); | 175 __ SetAccumulator(zero_value); |
| 420 __ Dispatch(); | 176 __ Dispatch(); |
| 421 } | 177 } |
| 422 | 178 |
| 423 // LdaSmi <imm> | 179 // LdaSmi <imm> |
| 424 // | 180 // |
| 425 // Load an integer literal into the accumulator as a Smi. | 181 // Load an integer literal into the accumulator as a Smi. |
| 426 void Interpreter::DoLdaSmi(InterpreterAssembler* assembler) { | 182 void InterpreterGenerator::DoLdaSmi(InterpreterAssembler* assembler) { |
| 427 Node* smi_int = __ BytecodeOperandImmSmi(0); | 183 Node* smi_int = __ BytecodeOperandImmSmi(0); |
| 428 __ SetAccumulator(smi_int); | 184 __ SetAccumulator(smi_int); |
| 429 __ Dispatch(); | 185 __ Dispatch(); |
| 430 } | 186 } |
| 431 | 187 |
| 432 // LdaConstant <idx> | 188 // LdaConstant <idx> |
| 433 // | 189 // |
| 434 // Load constant literal at |idx| in the constant pool into the accumulator. | 190 // Load constant literal at |idx| in the constant pool into the accumulator. |
| 435 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) { | 191 void InterpreterGenerator::DoLdaConstant(InterpreterAssembler* assembler) { |
| 436 Node* index = __ BytecodeOperandIdx(0); | 192 Node* index = __ BytecodeOperandIdx(0); |
| 437 Node* constant = __ LoadConstantPoolEntry(index); | 193 Node* constant = __ LoadConstantPoolEntry(index); |
| 438 __ SetAccumulator(constant); | 194 __ SetAccumulator(constant); |
| 439 __ Dispatch(); | 195 __ Dispatch(); |
| 440 } | 196 } |
| 441 | 197 |
| 442 // LdaUndefined | 198 // LdaUndefined |
| 443 // | 199 // |
| 444 // Load Undefined into the accumulator. | 200 // Load Undefined into the accumulator. |
| 445 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) { | 201 void InterpreterGenerator::DoLdaUndefined(InterpreterAssembler* assembler) { |
| 446 Node* undefined_value = | 202 Node* undefined_value = |
| 447 __ HeapConstant(isolate_->factory()->undefined_value()); | 203 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 448 __ SetAccumulator(undefined_value); | 204 __ SetAccumulator(undefined_value); |
| 449 __ Dispatch(); | 205 __ Dispatch(); |
| 450 } | 206 } |
| 451 | 207 |
| 452 // LdaNull | 208 // LdaNull |
| 453 // | 209 // |
| 454 // Load Null into the accumulator. | 210 // Load Null into the accumulator. |
| 455 void Interpreter::DoLdaNull(InterpreterAssembler* assembler) { | 211 void InterpreterGenerator::DoLdaNull(InterpreterAssembler* assembler) { |
| 456 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 212 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 457 __ SetAccumulator(null_value); | 213 __ SetAccumulator(null_value); |
| 458 __ Dispatch(); | 214 __ Dispatch(); |
| 459 } | 215 } |
| 460 | 216 |
| 461 // LdaTheHole | 217 // LdaTheHole |
| 462 // | 218 // |
| 463 // Load TheHole into the accumulator. | 219 // Load TheHole into the accumulator. |
| 464 void Interpreter::DoLdaTheHole(InterpreterAssembler* assembler) { | 220 void InterpreterGenerator::DoLdaTheHole(InterpreterAssembler* assembler) { |
| 465 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 221 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 466 __ SetAccumulator(the_hole_value); | 222 __ SetAccumulator(the_hole_value); |
| 467 __ Dispatch(); | 223 __ Dispatch(); |
| 468 } | 224 } |
| 469 | 225 |
| 470 // LdaTrue | 226 // LdaTrue |
| 471 // | 227 // |
| 472 // Load True into the accumulator. | 228 // Load True into the accumulator. |
| 473 void Interpreter::DoLdaTrue(InterpreterAssembler* assembler) { | 229 void InterpreterGenerator::DoLdaTrue(InterpreterAssembler* assembler) { |
| 474 Node* true_value = __ HeapConstant(isolate_->factory()->true_value()); | 230 Node* true_value = __ HeapConstant(isolate_->factory()->true_value()); |
| 475 __ SetAccumulator(true_value); | 231 __ SetAccumulator(true_value); |
| 476 __ Dispatch(); | 232 __ Dispatch(); |
| 477 } | 233 } |
| 478 | 234 |
| 479 // LdaFalse | 235 // LdaFalse |
| 480 // | 236 // |
| 481 // Load False into the accumulator. | 237 // Load False into the accumulator. |
| 482 void Interpreter::DoLdaFalse(InterpreterAssembler* assembler) { | 238 void InterpreterGenerator::DoLdaFalse(InterpreterAssembler* assembler) { |
| 483 Node* false_value = __ HeapConstant(isolate_->factory()->false_value()); | 239 Node* false_value = __ HeapConstant(isolate_->factory()->false_value()); |
| 484 __ SetAccumulator(false_value); | 240 __ SetAccumulator(false_value); |
| 485 __ Dispatch(); | 241 __ Dispatch(); |
| 486 } | 242 } |
| 487 | 243 |
| 488 // Ldar <src> | 244 // Ldar <src> |
| 489 // | 245 // |
| 490 // Load accumulator with value from register <src>. | 246 // Load accumulator with value from register <src>. |
| 491 void Interpreter::DoLdar(InterpreterAssembler* assembler) { | 247 void InterpreterGenerator::DoLdar(InterpreterAssembler* assembler) { |
| 492 Node* reg_index = __ BytecodeOperandReg(0); | 248 Node* reg_index = __ BytecodeOperandReg(0); |
| 493 Node* value = __ LoadRegister(reg_index); | 249 Node* value = __ LoadRegister(reg_index); |
| 494 __ SetAccumulator(value); | 250 __ SetAccumulator(value); |
| 495 __ Dispatch(); | 251 __ Dispatch(); |
| 496 } | 252 } |
| 497 | 253 |
| 498 // Star <dst> | 254 // Star <dst> |
| 499 // | 255 // |
| 500 // Store accumulator to register <dst>. | 256 // Store accumulator to register <dst>. |
| 501 void Interpreter::DoStar(InterpreterAssembler* assembler) { | 257 void InterpreterGenerator::DoStar(InterpreterAssembler* assembler) { |
| 502 Node* reg_index = __ BytecodeOperandReg(0); | 258 Node* reg_index = __ BytecodeOperandReg(0); |
| 503 Node* accumulator = __ GetAccumulator(); | 259 Node* accumulator = __ GetAccumulator(); |
| 504 __ StoreRegister(accumulator, reg_index); | 260 __ StoreRegister(accumulator, reg_index); |
| 505 __ Dispatch(); | 261 __ Dispatch(); |
| 506 } | 262 } |
| 507 | 263 |
| 508 // Mov <src> <dst> | 264 // Mov <src> <dst> |
| 509 // | 265 // |
| 510 // Stores the value of register <src> to register <dst>. | 266 // Stores the value of register <src> to register <dst>. |
| 511 void Interpreter::DoMov(InterpreterAssembler* assembler) { | 267 void InterpreterGenerator::DoMov(InterpreterAssembler* assembler) { |
| 512 Node* src_index = __ BytecodeOperandReg(0); | 268 Node* src_index = __ BytecodeOperandReg(0); |
| 513 Node* src_value = __ LoadRegister(src_index); | 269 Node* src_value = __ LoadRegister(src_index); |
| 514 Node* dst_index = __ BytecodeOperandReg(1); | 270 Node* dst_index = __ BytecodeOperandReg(1); |
| 515 __ StoreRegister(src_value, dst_index); | 271 __ StoreRegister(src_value, dst_index); |
| 516 __ Dispatch(); | 272 __ Dispatch(); |
| 517 } | 273 } |
| 518 | 274 |
| 519 void Interpreter::BuildLoadGlobalIC(int slot_operand_index, | 275 void InterpreterGenerator::BuildLoadGlobalIC(int slot_operand_index, |
| 520 int name_operand_index, | 276 int name_operand_index, |
| 521 TypeofMode typeof_mode, | 277 TypeofMode typeof_mode, |
| 522 InterpreterAssembler* assembler) { | 278 InterpreterAssembler* assembler) { |
| 523 // Must be kept in sync with AccessorAssembler::LoadGlobalIC. | 279 // Must be kept in sync with AccessorAssembler::LoadGlobalIC. |
| 524 | 280 |
| 525 // Load the global via the LoadGlobalIC. | 281 // Load the global via the LoadGlobalIC. |
| 526 Node* feedback_vector = __ LoadFeedbackVector(); | 282 Node* feedback_vector = __ LoadFeedbackVector(); |
| 527 Node* feedback_slot = __ BytecodeOperandIdx(slot_operand_index); | 283 Node* feedback_slot = __ BytecodeOperandIdx(slot_operand_index); |
| 528 | 284 |
| 529 AccessorAssembler accessor_asm(assembler->state()); | 285 AccessorAssembler accessor_asm(assembler->state()); |
| 530 | 286 |
| 531 Label try_handler(assembler, Label::kDeferred), | 287 Label try_handler(assembler, Label::kDeferred), |
| 532 miss(assembler, Label::kDeferred); | 288 miss(assembler, Label::kDeferred); |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 582 __ SetAccumulator(var_result.value()); | 338 __ SetAccumulator(var_result.value()); |
| 583 __ Dispatch(); | 339 __ Dispatch(); |
| 584 } | 340 } |
| 585 } | 341 } |
| 586 } | 342 } |
| 587 | 343 |
| 588 // LdaGlobal <name_index> <slot> | 344 // LdaGlobal <name_index> <slot> |
| 589 // | 345 // |
| 590 // Load the global with name in constant pool entry <name_index> into the | 346 // Load the global with name in constant pool entry <name_index> into the |
| 591 // accumulator using FeedBackVector slot <slot> outside of a typeof. | 347 // accumulator using FeedBackVector slot <slot> outside of a typeof. |
| 592 void Interpreter::DoLdaGlobal(InterpreterAssembler* assembler) { | 348 void InterpreterGenerator::DoLdaGlobal(InterpreterAssembler* assembler) { |
| 593 static const int kNameOperandIndex = 0; | 349 static const int kNameOperandIndex = 0; |
| 594 static const int kSlotOperandIndex = 1; | 350 static const int kSlotOperandIndex = 1; |
| 595 | 351 |
| 596 BuildLoadGlobalIC(kSlotOperandIndex, kNameOperandIndex, NOT_INSIDE_TYPEOF, | 352 BuildLoadGlobalIC(kSlotOperandIndex, kNameOperandIndex, NOT_INSIDE_TYPEOF, |
| 597 assembler); | 353 assembler); |
| 598 } | 354 } |
| 599 | 355 |
| 600 // LdaGlobalInsideTypeof <name_index> <slot> | 356 // LdaGlobalInsideTypeof <name_index> <slot> |
| 601 // | 357 // |
| 602 // Load the global with name in constant pool entry <name_index> into the | 358 // Load the global with name in constant pool entry <name_index> into the |
| 603 // accumulator using FeedBackVector slot <slot> inside of a typeof. | 359 // accumulator using FeedBackVector slot <slot> inside of a typeof. |
| 604 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) { | 360 void InterpreterGenerator::DoLdaGlobalInsideTypeof( |
| 361 InterpreterAssembler* assembler) { |
| 605 static const int kNameOperandIndex = 0; | 362 static const int kNameOperandIndex = 0; |
| 606 static const int kSlotOperandIndex = 1; | 363 static const int kSlotOperandIndex = 1; |
| 607 | 364 |
| 608 BuildLoadGlobalIC(kSlotOperandIndex, kNameOperandIndex, INSIDE_TYPEOF, | 365 BuildLoadGlobalIC(kSlotOperandIndex, kNameOperandIndex, INSIDE_TYPEOF, |
| 609 assembler); | 366 assembler); |
| 610 } | 367 } |
| 611 | 368 |
| 612 void Interpreter::DoStaGlobal(Callable ic, InterpreterAssembler* assembler) { | 369 void InterpreterGenerator::DoStaGlobal(Callable ic, |
| 370 InterpreterAssembler* assembler) { |
| 613 // Get the global object. | 371 // Get the global object. |
| 614 Node* context = __ GetContext(); | 372 Node* context = __ GetContext(); |
| 615 Node* native_context = __ LoadNativeContext(context); | 373 Node* native_context = __ LoadNativeContext(context); |
| 616 Node* global = | 374 Node* global = |
| 617 __ LoadContextElement(native_context, Context::EXTENSION_INDEX); | 375 __ LoadContextElement(native_context, Context::EXTENSION_INDEX); |
| 618 | 376 |
| 619 // Store the global via the StoreIC. | 377 // Store the global via the StoreIC. |
| 620 Node* code_target = __ HeapConstant(ic.code()); | 378 Node* code_target = __ HeapConstant(ic.code()); |
| 621 Node* constant_index = __ BytecodeOperandIdx(0); | 379 Node* constant_index = __ BytecodeOperandIdx(0); |
| 622 Node* name = __ LoadConstantPoolEntry(constant_index); | 380 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 623 Node* value = __ GetAccumulator(); | 381 Node* value = __ GetAccumulator(); |
| 624 Node* raw_slot = __ BytecodeOperandIdx(1); | 382 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 625 Node* smi_slot = __ SmiTag(raw_slot); | 383 Node* smi_slot = __ SmiTag(raw_slot); |
| 626 Node* feedback_vector = __ LoadFeedbackVector(); | 384 Node* feedback_vector = __ LoadFeedbackVector(); |
| 627 __ CallStub(ic.descriptor(), code_target, context, global, name, value, | 385 __ CallStub(ic.descriptor(), code_target, context, global, name, value, |
| 628 smi_slot, feedback_vector); | 386 smi_slot, feedback_vector); |
| 629 __ Dispatch(); | 387 __ Dispatch(); |
| 630 } | 388 } |
| 631 | 389 |
| 632 // StaGlobalSloppy <name_index> <slot> | 390 // StaGlobalSloppy <name_index> <slot> |
| 633 // | 391 // |
| 634 // Store the value in the accumulator into the global with name in constant pool | 392 // Store the value in the accumulator into the global with name in constant pool |
| 635 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. | 393 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. |
| 636 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) { | 394 void InterpreterGenerator::DoStaGlobalSloppy(InterpreterAssembler* assembler) { |
| 637 Callable ic = CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY); | 395 Callable ic = CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY); |
| 638 DoStaGlobal(ic, assembler); | 396 DoStaGlobal(ic, assembler); |
| 639 } | 397 } |
| 640 | 398 |
| 641 // StaGlobalStrict <name_index> <slot> | 399 // StaGlobalStrict <name_index> <slot> |
| 642 // | 400 // |
| 643 // Store the value in the accumulator into the global with name in constant pool | 401 // Store the value in the accumulator into the global with name in constant pool |
| 644 // entry <name_index> using FeedBackVector slot <slot> in strict mode. | 402 // entry <name_index> using FeedBackVector slot <slot> in strict mode. |
| 645 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) { | 403 void InterpreterGenerator::DoStaGlobalStrict(InterpreterAssembler* assembler) { |
| 646 Callable ic = CodeFactory::StoreICInOptimizedCode(isolate_, STRICT); | 404 Callable ic = CodeFactory::StoreICInOptimizedCode(isolate_, STRICT); |
| 647 DoStaGlobal(ic, assembler); | 405 DoStaGlobal(ic, assembler); |
| 648 } | 406 } |
| 649 | 407 |
| 650 // LdaContextSlot <context> <slot_index> <depth> | 408 // LdaContextSlot <context> <slot_index> <depth> |
| 651 // | 409 // |
| 652 // Load the object in |slot_index| of the context at |depth| in the context | 410 // Load the object in |slot_index| of the context at |depth| in the context |
| 653 // chain starting at |context| into the accumulator. | 411 // chain starting at |context| into the accumulator. |
| 654 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) { | 412 void InterpreterGenerator::DoLdaContextSlot(InterpreterAssembler* assembler) { |
| 655 Node* reg_index = __ BytecodeOperandReg(0); | 413 Node* reg_index = __ BytecodeOperandReg(0); |
| 656 Node* context = __ LoadRegister(reg_index); | 414 Node* context = __ LoadRegister(reg_index); |
| 657 Node* slot_index = __ BytecodeOperandIdx(1); | 415 Node* slot_index = __ BytecodeOperandIdx(1); |
| 658 Node* depth = __ BytecodeOperandUImm(2); | 416 Node* depth = __ BytecodeOperandUImm(2); |
| 659 Node* slot_context = __ GetContextAtDepth(context, depth); | 417 Node* slot_context = __ GetContextAtDepth(context, depth); |
| 660 Node* result = __ LoadContextElement(slot_context, slot_index); | 418 Node* result = __ LoadContextElement(slot_context, slot_index); |
| 661 __ SetAccumulator(result); | 419 __ SetAccumulator(result); |
| 662 __ Dispatch(); | 420 __ Dispatch(); |
| 663 } | 421 } |
| 664 | 422 |
| 665 // LdaImmutableContextSlot <context> <slot_index> <depth> | 423 // LdaImmutableContextSlot <context> <slot_index> <depth> |
| 666 // | 424 // |
| 667 // Load the object in |slot_index| of the context at |depth| in the context | 425 // Load the object in |slot_index| of the context at |depth| in the context |
| 668 // chain starting at |context| into the accumulator. | 426 // chain starting at |context| into the accumulator. |
| 669 void Interpreter::DoLdaImmutableContextSlot(InterpreterAssembler* assembler) { | 427 void InterpreterGenerator::DoLdaImmutableContextSlot( |
| 428 InterpreterAssembler* assembler) { |
| 670 // TODO(danno) Share the actual code object rather creating a duplicate one. | 429 // TODO(danno) Share the actual code object rather creating a duplicate one. |
| 671 DoLdaContextSlot(assembler); | 430 DoLdaContextSlot(assembler); |
| 672 } | 431 } |
| 673 | 432 |
| 674 // LdaCurrentContextSlot <slot_index> | 433 // LdaCurrentContextSlot <slot_index> |
| 675 // | 434 // |
| 676 // Load the object in |slot_index| of the current context into the accumulator. | 435 // Load the object in |slot_index| of the current context into the accumulator. |
| 677 void Interpreter::DoLdaCurrentContextSlot(InterpreterAssembler* assembler) { | 436 void InterpreterGenerator::DoLdaCurrentContextSlot( |
| 437 InterpreterAssembler* assembler) { |
| 678 Node* slot_index = __ BytecodeOperandIdx(0); | 438 Node* slot_index = __ BytecodeOperandIdx(0); |
| 679 Node* slot_context = __ GetContext(); | 439 Node* slot_context = __ GetContext(); |
| 680 Node* result = __ LoadContextElement(slot_context, slot_index); | 440 Node* result = __ LoadContextElement(slot_context, slot_index); |
| 681 __ SetAccumulator(result); | 441 __ SetAccumulator(result); |
| 682 __ Dispatch(); | 442 __ Dispatch(); |
| 683 } | 443 } |
| 684 | 444 |
| 685 // LdaImmutableCurrentContextSlot <slot_index> | 445 // LdaImmutableCurrentContextSlot <slot_index> |
| 686 // | 446 // |
| 687 // Load the object in |slot_index| of the current context into the accumulator. | 447 // Load the object in |slot_index| of the current context into the accumulator. |
| 688 void Interpreter::DoLdaImmutableCurrentContextSlot( | 448 void InterpreterGenerator::DoLdaImmutableCurrentContextSlot( |
| 689 InterpreterAssembler* assembler) { | 449 InterpreterAssembler* assembler) { |
| 690 // TODO(danno) Share the actual code object rather creating a duplicate one. | 450 // TODO(danno) Share the actual code object rather creating a duplicate one. |
| 691 DoLdaCurrentContextSlot(assembler); | 451 DoLdaCurrentContextSlot(assembler); |
| 692 } | 452 } |
| 693 | 453 |
| 694 // StaContextSlot <context> <slot_index> <depth> | 454 // StaContextSlot <context> <slot_index> <depth> |
| 695 // | 455 // |
| 696 // Stores the object in the accumulator into |slot_index| of the context at | 456 // Stores the object in the accumulator into |slot_index| of the context at |
| 697 // |depth| in the context chain starting at |context|. | 457 // |depth| in the context chain starting at |context|. |
| 698 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) { | 458 void InterpreterGenerator::DoStaContextSlot(InterpreterAssembler* assembler) { |
| 699 Node* value = __ GetAccumulator(); | 459 Node* value = __ GetAccumulator(); |
| 700 Node* reg_index = __ BytecodeOperandReg(0); | 460 Node* reg_index = __ BytecodeOperandReg(0); |
| 701 Node* context = __ LoadRegister(reg_index); | 461 Node* context = __ LoadRegister(reg_index); |
| 702 Node* slot_index = __ BytecodeOperandIdx(1); | 462 Node* slot_index = __ BytecodeOperandIdx(1); |
| 703 Node* depth = __ BytecodeOperandUImm(2); | 463 Node* depth = __ BytecodeOperandUImm(2); |
| 704 Node* slot_context = __ GetContextAtDepth(context, depth); | 464 Node* slot_context = __ GetContextAtDepth(context, depth); |
| 705 __ StoreContextElement(slot_context, slot_index, value); | 465 __ StoreContextElement(slot_context, slot_index, value); |
| 706 __ Dispatch(); | 466 __ Dispatch(); |
| 707 } | 467 } |
| 708 | 468 |
| 709 // StaCurrentContextSlot <slot_index> | 469 // StaCurrentContextSlot <slot_index> |
| 710 // | 470 // |
| 711 // Stores the object in the accumulator into |slot_index| of the current | 471 // Stores the object in the accumulator into |slot_index| of the current |
| 712 // context. | 472 // context. |
| 713 void Interpreter::DoStaCurrentContextSlot(InterpreterAssembler* assembler) { | 473 void InterpreterGenerator::DoStaCurrentContextSlot( |
| 474 InterpreterAssembler* assembler) { |
| 714 Node* value = __ GetAccumulator(); | 475 Node* value = __ GetAccumulator(); |
| 715 Node* slot_index = __ BytecodeOperandIdx(0); | 476 Node* slot_index = __ BytecodeOperandIdx(0); |
| 716 Node* slot_context = __ GetContext(); | 477 Node* slot_context = __ GetContext(); |
| 717 __ StoreContextElement(slot_context, slot_index, value); | 478 __ StoreContextElement(slot_context, slot_index, value); |
| 718 __ Dispatch(); | 479 __ Dispatch(); |
| 719 } | 480 } |
| 720 | 481 |
| 721 void Interpreter::DoLdaLookupSlot(Runtime::FunctionId function_id, | 482 void InterpreterGenerator::DoLdaLookupSlot(Runtime::FunctionId function_id, |
| 722 InterpreterAssembler* assembler) { | 483 InterpreterAssembler* assembler) { |
| 723 Node* name_index = __ BytecodeOperandIdx(0); | 484 Node* name_index = __ BytecodeOperandIdx(0); |
| 724 Node* name = __ LoadConstantPoolEntry(name_index); | 485 Node* name = __ LoadConstantPoolEntry(name_index); |
| 725 Node* context = __ GetContext(); | 486 Node* context = __ GetContext(); |
| 726 Node* result = __ CallRuntime(function_id, context, name); | 487 Node* result = __ CallRuntime(function_id, context, name); |
| 727 __ SetAccumulator(result); | 488 __ SetAccumulator(result); |
| 728 __ Dispatch(); | 489 __ Dispatch(); |
| 729 } | 490 } |
| 730 | 491 |
| 731 // LdaLookupSlot <name_index> | 492 // LdaLookupSlot <name_index> |
| 732 // | 493 // |
| 733 // Lookup the object with the name in constant pool entry |name_index| | 494 // Lookup the object with the name in constant pool entry |name_index| |
| 734 // dynamically. | 495 // dynamically. |
| 735 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { | 496 void InterpreterGenerator::DoLdaLookupSlot(InterpreterAssembler* assembler) { |
| 736 DoLdaLookupSlot(Runtime::kLoadLookupSlot, assembler); | 497 DoLdaLookupSlot(Runtime::kLoadLookupSlot, assembler); |
| 737 } | 498 } |
| 738 | 499 |
| 739 // LdaLookupSlotInsideTypeof <name_index> | 500 // LdaLookupSlotInsideTypeof <name_index> |
| 740 // | 501 // |
| 741 // Lookup the object with the name in constant pool entry |name_index| | 502 // Lookup the object with the name in constant pool entry |name_index| |
| 742 // dynamically without causing a NoReferenceError. | 503 // dynamically without causing a NoReferenceError. |
| 743 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) { | 504 void InterpreterGenerator::DoLdaLookupSlotInsideTypeof( |
| 505 InterpreterAssembler* assembler) { |
| 744 DoLdaLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); | 506 DoLdaLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); |
| 745 } | 507 } |
| 746 | 508 |
| 747 void Interpreter::DoLdaLookupContextSlot(Runtime::FunctionId function_id, | 509 void InterpreterGenerator::DoLdaLookupContextSlot( |
| 748 InterpreterAssembler* assembler) { | 510 Runtime::FunctionId function_id, InterpreterAssembler* assembler) { |
| 749 Node* context = __ GetContext(); | 511 Node* context = __ GetContext(); |
| 750 Node* name_index = __ BytecodeOperandIdx(0); | 512 Node* name_index = __ BytecodeOperandIdx(0); |
| 751 Node* slot_index = __ BytecodeOperandIdx(1); | 513 Node* slot_index = __ BytecodeOperandIdx(1); |
| 752 Node* depth = __ BytecodeOperandUImm(2); | 514 Node* depth = __ BytecodeOperandUImm(2); |
| 753 | 515 |
| 754 Label slowpath(assembler, Label::kDeferred); | 516 Label slowpath(assembler, Label::kDeferred); |
| 755 | 517 |
| 756 // Check for context extensions to allow the fast path. | 518 // Check for context extensions to allow the fast path. |
| 757 __ GotoIfHasContextExtensionUpToDepth(context, depth, &slowpath); | 519 __ GotoIfHasContextExtensionUpToDepth(context, depth, &slowpath); |
| 758 | 520 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 771 Node* result = __ CallRuntime(function_id, context, name); | 533 Node* result = __ CallRuntime(function_id, context, name); |
| 772 __ SetAccumulator(result); | 534 __ SetAccumulator(result); |
| 773 __ Dispatch(); | 535 __ Dispatch(); |
| 774 } | 536 } |
| 775 } | 537 } |
| 776 | 538 |
| 777 // LdaLookupSlot <name_index> | 539 // LdaLookupSlot <name_index> |
| 778 // | 540 // |
| 779 // Lookup the object with the name in constant pool entry |name_index| | 541 // Lookup the object with the name in constant pool entry |name_index| |
| 780 // dynamically. | 542 // dynamically. |
| 781 void Interpreter::DoLdaLookupContextSlot(InterpreterAssembler* assembler) { | 543 void InterpreterGenerator::DoLdaLookupContextSlot( |
| 544 InterpreterAssembler* assembler) { |
| 782 DoLdaLookupContextSlot(Runtime::kLoadLookupSlot, assembler); | 545 DoLdaLookupContextSlot(Runtime::kLoadLookupSlot, assembler); |
| 783 } | 546 } |
| 784 | 547 |
| 785 // LdaLookupSlotInsideTypeof <name_index> | 548 // LdaLookupSlotInsideTypeof <name_index> |
| 786 // | 549 // |
| 787 // Lookup the object with the name in constant pool entry |name_index| | 550 // Lookup the object with the name in constant pool entry |name_index| |
| 788 // dynamically without causing a NoReferenceError. | 551 // dynamically without causing a NoReferenceError. |
| 789 void Interpreter::DoLdaLookupContextSlotInsideTypeof( | 552 void InterpreterGenerator::DoLdaLookupContextSlotInsideTypeof( |
| 790 InterpreterAssembler* assembler) { | 553 InterpreterAssembler* assembler) { |
| 791 DoLdaLookupContextSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); | 554 DoLdaLookupContextSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); |
| 792 } | 555 } |
| 793 | 556 |
| 794 void Interpreter::DoLdaLookupGlobalSlot(Runtime::FunctionId function_id, | 557 void InterpreterGenerator::DoLdaLookupGlobalSlot( |
| 795 InterpreterAssembler* assembler) { | 558 Runtime::FunctionId function_id, InterpreterAssembler* assembler) { |
| 796 Node* context = __ GetContext(); | 559 Node* context = __ GetContext(); |
| 797 Node* depth = __ BytecodeOperandUImm(2); | 560 Node* depth = __ BytecodeOperandUImm(2); |
| 798 | 561 |
| 799 Label slowpath(assembler, Label::kDeferred); | 562 Label slowpath(assembler, Label::kDeferred); |
| 800 | 563 |
| 801 // Check for context extensions to allow the fast path | 564 // Check for context extensions to allow the fast path |
| 802 __ GotoIfHasContextExtensionUpToDepth(context, depth, &slowpath); | 565 __ GotoIfHasContextExtensionUpToDepth(context, depth, &slowpath); |
| 803 | 566 |
| 804 // Fast path does a normal load global | 567 // Fast path does a normal load global |
| 805 { | 568 { |
| (...skipping 16 matching lines...) Expand all Loading... |
| 822 Node* result = __ CallRuntime(function_id, context, name); | 585 Node* result = __ CallRuntime(function_id, context, name); |
| 823 __ SetAccumulator(result); | 586 __ SetAccumulator(result); |
| 824 __ Dispatch(); | 587 __ Dispatch(); |
| 825 } | 588 } |
| 826 } | 589 } |
| 827 | 590 |
| 828 // LdaLookupGlobalSlot <name_index> <feedback_slot> <depth> | 591 // LdaLookupGlobalSlot <name_index> <feedback_slot> <depth> |
| 829 // | 592 // |
| 830 // Lookup the object with the name in constant pool entry |name_index| | 593 // Lookup the object with the name in constant pool entry |name_index| |
| 831 // dynamically. | 594 // dynamically. |
| 832 void Interpreter::DoLdaLookupGlobalSlot(InterpreterAssembler* assembler) { | 595 void InterpreterGenerator::DoLdaLookupGlobalSlot( |
| 596 InterpreterAssembler* assembler) { |
| 833 DoLdaLookupGlobalSlot(Runtime::kLoadLookupSlot, assembler); | 597 DoLdaLookupGlobalSlot(Runtime::kLoadLookupSlot, assembler); |
| 834 } | 598 } |
| 835 | 599 |
| 836 // LdaLookupGlobalSlotInsideTypeof <name_index> <feedback_slot> <depth> | 600 // LdaLookupGlobalSlotInsideTypeof <name_index> <feedback_slot> <depth> |
| 837 // | 601 // |
| 838 // Lookup the object with the name in constant pool entry |name_index| | 602 // Lookup the object with the name in constant pool entry |name_index| |
| 839 // dynamically without causing a NoReferenceError. | 603 // dynamically without causing a NoReferenceError. |
| 840 void Interpreter::DoLdaLookupGlobalSlotInsideTypeof( | 604 void InterpreterGenerator::DoLdaLookupGlobalSlotInsideTypeof( |
| 841 InterpreterAssembler* assembler) { | 605 InterpreterAssembler* assembler) { |
| 842 DoLdaLookupGlobalSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); | 606 DoLdaLookupGlobalSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); |
| 843 } | 607 } |
| 844 | 608 |
| 845 void Interpreter::DoStaLookupSlot(LanguageMode language_mode, | 609 void InterpreterGenerator::DoStaLookupSlot(LanguageMode language_mode, |
| 846 InterpreterAssembler* assembler) { | 610 InterpreterAssembler* assembler) { |
| 847 Node* value = __ GetAccumulator(); | 611 Node* value = __ GetAccumulator(); |
| 848 Node* index = __ BytecodeOperandIdx(0); | 612 Node* index = __ BytecodeOperandIdx(0); |
| 849 Node* name = __ LoadConstantPoolEntry(index); | 613 Node* name = __ LoadConstantPoolEntry(index); |
| 850 Node* context = __ GetContext(); | 614 Node* context = __ GetContext(); |
| 851 Node* result = __ CallRuntime(is_strict(language_mode) | 615 Node* result = __ CallRuntime(is_strict(language_mode) |
| 852 ? Runtime::kStoreLookupSlot_Strict | 616 ? Runtime::kStoreLookupSlot_Strict |
| 853 : Runtime::kStoreLookupSlot_Sloppy, | 617 : Runtime::kStoreLookupSlot_Sloppy, |
| 854 context, name, value); | 618 context, name, value); |
| 855 __ SetAccumulator(result); | 619 __ SetAccumulator(result); |
| 856 __ Dispatch(); | 620 __ Dispatch(); |
| 857 } | 621 } |
| 858 | 622 |
| 859 // StaLookupSlotSloppy <name_index> | 623 // StaLookupSlotSloppy <name_index> |
| 860 // | 624 // |
| 861 // Store the object in accumulator to the object with the name in constant | 625 // Store the object in accumulator to the object with the name in constant |
| 862 // pool entry |name_index| in sloppy mode. | 626 // pool entry |name_index| in sloppy mode. |
| 863 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) { | 627 void InterpreterGenerator::DoStaLookupSlotSloppy( |
| 628 InterpreterAssembler* assembler) { |
| 864 DoStaLookupSlot(LanguageMode::SLOPPY, assembler); | 629 DoStaLookupSlot(LanguageMode::SLOPPY, assembler); |
| 865 } | 630 } |
| 866 | 631 |
| 867 // StaLookupSlotStrict <name_index> | 632 // StaLookupSlotStrict <name_index> |
| 868 // | 633 // |
| 869 // Store the object in accumulator to the object with the name in constant | 634 // Store the object in accumulator to the object with the name in constant |
| 870 // pool entry |name_index| in strict mode. | 635 // pool entry |name_index| in strict mode. |
| 871 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) { | 636 void InterpreterGenerator::DoStaLookupSlotStrict( |
| 637 InterpreterAssembler* assembler) { |
| 872 DoStaLookupSlot(LanguageMode::STRICT, assembler); | 638 DoStaLookupSlot(LanguageMode::STRICT, assembler); |
| 873 } | 639 } |
| 874 | 640 |
| 875 void Interpreter::BuildLoadIC(int recv_operand_index, int slot_operand_index, | 641 void InterpreterGenerator::BuildLoadIC(int recv_operand_index, |
| 876 int name_operand_index, | 642 int slot_operand_index, |
| 877 InterpreterAssembler* assembler) { | 643 int name_operand_index, |
| 644 InterpreterAssembler* assembler) { |
| 878 __ Comment("BuildLoadIC"); | 645 __ Comment("BuildLoadIC"); |
| 879 | 646 |
| 880 // Load vector and slot. | 647 // Load vector and slot. |
| 881 Node* feedback_vector = __ LoadFeedbackVector(); | 648 Node* feedback_vector = __ LoadFeedbackVector(); |
| 882 Node* feedback_slot = __ BytecodeOperandIdx(slot_operand_index); | 649 Node* feedback_slot = __ BytecodeOperandIdx(slot_operand_index); |
| 883 Node* smi_slot = __ SmiTag(feedback_slot); | 650 Node* smi_slot = __ SmiTag(feedback_slot); |
| 884 | 651 |
| 885 // Load receiver. | 652 // Load receiver. |
| 886 Node* register_index = __ BytecodeOperandReg(recv_operand_index); | 653 Node* register_index = __ BytecodeOperandReg(recv_operand_index); |
| 887 Node* recv = __ LoadRegister(register_index); | 654 Node* recv = __ LoadRegister(register_index); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 906 { | 673 { |
| 907 __ SetAccumulator(var_result.value()); | 674 __ SetAccumulator(var_result.value()); |
| 908 __ Dispatch(); | 675 __ Dispatch(); |
| 909 } | 676 } |
| 910 } | 677 } |
| 911 | 678 |
| 912 // LdaNamedProperty <object> <name_index> <slot> | 679 // LdaNamedProperty <object> <name_index> <slot> |
| 913 // | 680 // |
| 914 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at | 681 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at |
| 915 // constant pool entry <name_index>. | 682 // constant pool entry <name_index>. |
| 916 void Interpreter::DoLdaNamedProperty(InterpreterAssembler* assembler) { | 683 void InterpreterGenerator::DoLdaNamedProperty(InterpreterAssembler* assembler) { |
| 917 static const int kRecvOperandIndex = 0; | 684 static const int kRecvOperandIndex = 0; |
| 918 static const int kNameOperandIndex = 1; | 685 static const int kNameOperandIndex = 1; |
| 919 static const int kSlotOperandIndex = 2; | 686 static const int kSlotOperandIndex = 2; |
| 920 | 687 |
| 921 BuildLoadIC(kRecvOperandIndex, kSlotOperandIndex, kNameOperandIndex, | 688 BuildLoadIC(kRecvOperandIndex, kSlotOperandIndex, kNameOperandIndex, |
| 922 assembler); | 689 assembler); |
| 923 } | 690 } |
| 924 | 691 |
| 925 // KeyedLoadIC <object> <slot> | 692 // KeyedLoadIC <object> <slot> |
| 926 // | 693 // |
| 927 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key | 694 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key |
| 928 // in the accumulator. | 695 // in the accumulator. |
| 929 void Interpreter::DoLdaKeyedProperty(InterpreterAssembler* assembler) { | 696 void InterpreterGenerator::DoLdaKeyedProperty(InterpreterAssembler* assembler) { |
| 930 Callable ic = CodeFactory::KeyedLoadICInOptimizedCode(isolate_); | 697 Callable ic = CodeFactory::KeyedLoadICInOptimizedCode(isolate_); |
| 931 Node* code_target = __ HeapConstant(ic.code()); | 698 Node* code_target = __ HeapConstant(ic.code()); |
| 932 Node* reg_index = __ BytecodeOperandReg(0); | 699 Node* reg_index = __ BytecodeOperandReg(0); |
| 933 Node* object = __ LoadRegister(reg_index); | 700 Node* object = __ LoadRegister(reg_index); |
| 934 Node* name = __ GetAccumulator(); | 701 Node* name = __ GetAccumulator(); |
| 935 Node* raw_slot = __ BytecodeOperandIdx(1); | 702 Node* raw_slot = __ BytecodeOperandIdx(1); |
| 936 Node* smi_slot = __ SmiTag(raw_slot); | 703 Node* smi_slot = __ SmiTag(raw_slot); |
| 937 Node* feedback_vector = __ LoadFeedbackVector(); | 704 Node* feedback_vector = __ LoadFeedbackVector(); |
| 938 Node* context = __ GetContext(); | 705 Node* context = __ GetContext(); |
| 939 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, | 706 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, |
| 940 name, smi_slot, feedback_vector); | 707 name, smi_slot, feedback_vector); |
| 941 __ SetAccumulator(result); | 708 __ SetAccumulator(result); |
| 942 __ Dispatch(); | 709 __ Dispatch(); |
| 943 } | 710 } |
| 944 | 711 |
| 945 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) { | 712 void InterpreterGenerator::DoStoreIC(Callable ic, |
| 713 InterpreterAssembler* assembler) { |
| 946 Node* code_target = __ HeapConstant(ic.code()); | 714 Node* code_target = __ HeapConstant(ic.code()); |
| 947 Node* object_reg_index = __ BytecodeOperandReg(0); | 715 Node* object_reg_index = __ BytecodeOperandReg(0); |
| 948 Node* object = __ LoadRegister(object_reg_index); | 716 Node* object = __ LoadRegister(object_reg_index); |
| 949 Node* constant_index = __ BytecodeOperandIdx(1); | 717 Node* constant_index = __ BytecodeOperandIdx(1); |
| 950 Node* name = __ LoadConstantPoolEntry(constant_index); | 718 Node* name = __ LoadConstantPoolEntry(constant_index); |
| 951 Node* value = __ GetAccumulator(); | 719 Node* value = __ GetAccumulator(); |
| 952 Node* raw_slot = __ BytecodeOperandIdx(2); | 720 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 953 Node* smi_slot = __ SmiTag(raw_slot); | 721 Node* smi_slot = __ SmiTag(raw_slot); |
| 954 Node* feedback_vector = __ LoadFeedbackVector(); | 722 Node* feedback_vector = __ LoadFeedbackVector(); |
| 955 Node* context = __ GetContext(); | 723 Node* context = __ GetContext(); |
| 956 __ CallStub(ic.descriptor(), code_target, context, object, name, value, | 724 __ CallStub(ic.descriptor(), code_target, context, object, name, value, |
| 957 smi_slot, feedback_vector); | 725 smi_slot, feedback_vector); |
| 958 __ Dispatch(); | 726 __ Dispatch(); |
| 959 } | 727 } |
| 960 | 728 |
| 961 // StaNamedPropertySloppy <object> <name_index> <slot> | 729 // StaNamedPropertySloppy <object> <name_index> <slot> |
| 962 // | 730 // |
| 963 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and | 731 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and |
| 964 // the name in constant pool entry <name_index> with the value in the | 732 // the name in constant pool entry <name_index> with the value in the |
| 965 // accumulator. | 733 // accumulator. |
| 966 void Interpreter::DoStaNamedPropertySloppy(InterpreterAssembler* assembler) { | 734 void InterpreterGenerator::DoStaNamedPropertySloppy( |
| 735 InterpreterAssembler* assembler) { |
| 967 Callable ic = CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY); | 736 Callable ic = CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY); |
| 968 DoStoreIC(ic, assembler); | 737 DoStoreIC(ic, assembler); |
| 969 } | 738 } |
| 970 | 739 |
| 971 // StaNamedPropertyStrict <object> <name_index> <slot> | 740 // StaNamedPropertyStrict <object> <name_index> <slot> |
| 972 // | 741 // |
| 973 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and | 742 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and |
| 974 // the name in constant pool entry <name_index> with the value in the | 743 // the name in constant pool entry <name_index> with the value in the |
| 975 // accumulator. | 744 // accumulator. |
| 976 void Interpreter::DoStaNamedPropertyStrict(InterpreterAssembler* assembler) { | 745 void InterpreterGenerator::DoStaNamedPropertyStrict( |
| 746 InterpreterAssembler* assembler) { |
| 977 Callable ic = CodeFactory::StoreICInOptimizedCode(isolate_, STRICT); | 747 Callable ic = CodeFactory::StoreICInOptimizedCode(isolate_, STRICT); |
| 978 DoStoreIC(ic, assembler); | 748 DoStoreIC(ic, assembler); |
| 979 } | 749 } |
| 980 | 750 |
| 981 // StaNamedOwnProperty <object> <name_index> <slot> | 751 // StaNamedOwnProperty <object> <name_index> <slot> |
| 982 // | 752 // |
| 983 // Calls the StoreOwnIC at FeedBackVector slot <slot> for <object> and | 753 // Calls the StoreOwnIC at FeedBackVector slot <slot> for <object> and |
| 984 // the name in constant pool entry <name_index> with the value in the | 754 // the name in constant pool entry <name_index> with the value in the |
| 985 // accumulator. | 755 // accumulator. |
| 986 void Interpreter::DoStaNamedOwnProperty(InterpreterAssembler* assembler) { | 756 void InterpreterGenerator::DoStaNamedOwnProperty( |
| 757 InterpreterAssembler* assembler) { |
| 987 Callable ic = CodeFactory::StoreOwnICInOptimizedCode(isolate_); | 758 Callable ic = CodeFactory::StoreOwnICInOptimizedCode(isolate_); |
| 988 DoStoreIC(ic, assembler); | 759 DoStoreIC(ic, assembler); |
| 989 } | 760 } |
| 990 | 761 |
| 991 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) { | 762 void InterpreterGenerator::DoKeyedStoreIC(Callable ic, |
| 763 InterpreterAssembler* assembler) { |
| 992 Node* code_target = __ HeapConstant(ic.code()); | 764 Node* code_target = __ HeapConstant(ic.code()); |
| 993 Node* object_reg_index = __ BytecodeOperandReg(0); | 765 Node* object_reg_index = __ BytecodeOperandReg(0); |
| 994 Node* object = __ LoadRegister(object_reg_index); | 766 Node* object = __ LoadRegister(object_reg_index); |
| 995 Node* name_reg_index = __ BytecodeOperandReg(1); | 767 Node* name_reg_index = __ BytecodeOperandReg(1); |
| 996 Node* name = __ LoadRegister(name_reg_index); | 768 Node* name = __ LoadRegister(name_reg_index); |
| 997 Node* value = __ GetAccumulator(); | 769 Node* value = __ GetAccumulator(); |
| 998 Node* raw_slot = __ BytecodeOperandIdx(2); | 770 Node* raw_slot = __ BytecodeOperandIdx(2); |
| 999 Node* smi_slot = __ SmiTag(raw_slot); | 771 Node* smi_slot = __ SmiTag(raw_slot); |
| 1000 Node* feedback_vector = __ LoadFeedbackVector(); | 772 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1001 Node* context = __ GetContext(); | 773 Node* context = __ GetContext(); |
| 1002 __ CallStub(ic.descriptor(), code_target, context, object, name, value, | 774 __ CallStub(ic.descriptor(), code_target, context, object, name, value, |
| 1003 smi_slot, feedback_vector); | 775 smi_slot, feedback_vector); |
| 1004 __ Dispatch(); | 776 __ Dispatch(); |
| 1005 } | 777 } |
| 1006 | 778 |
| 1007 // StaKeyedPropertySloppy <object> <key> <slot> | 779 // StaKeyedPropertySloppy <object> <key> <slot> |
| 1008 // | 780 // |
| 1009 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object> | 781 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object> |
| 1010 // and the key <key> with the value in the accumulator. | 782 // and the key <key> with the value in the accumulator. |
| 1011 void Interpreter::DoStaKeyedPropertySloppy(InterpreterAssembler* assembler) { | 783 void InterpreterGenerator::DoStaKeyedPropertySloppy( |
| 784 InterpreterAssembler* assembler) { |
| 1012 Callable ic = CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY); | 785 Callable ic = CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY); |
| 1013 DoKeyedStoreIC(ic, assembler); | 786 DoKeyedStoreIC(ic, assembler); |
| 1014 } | 787 } |
| 1015 | 788 |
| 1016 // StaKeyedPropertyStrict <object> <key> <slot> | 789 // StaKeyedPropertyStrict <object> <key> <slot> |
| 1017 // | 790 // |
| 1018 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> | 791 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> |
| 1019 // and the key <key> with the value in the accumulator. | 792 // and the key <key> with the value in the accumulator. |
| 1020 void Interpreter::DoStaKeyedPropertyStrict(InterpreterAssembler* assembler) { | 793 void InterpreterGenerator::DoStaKeyedPropertyStrict( |
| 794 InterpreterAssembler* assembler) { |
| 1021 Callable ic = CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT); | 795 Callable ic = CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT); |
| 1022 DoKeyedStoreIC(ic, assembler); | 796 DoKeyedStoreIC(ic, assembler); |
| 1023 } | 797 } |
| 1024 | 798 |
| 1025 // StaDataPropertyInLiteral <object> <name> <flags> | 799 // StaDataPropertyInLiteral <object> <name> <flags> |
| 1026 // | 800 // |
| 1027 // Define a property <name> with value from the accumulator in <object>. | 801 // Define a property <name> with value from the accumulator in <object>. |
| 1028 // Property attributes and whether set_function_name are stored in | 802 // Property attributes and whether set_function_name are stored in |
| 1029 // DataPropertyInLiteralFlags <flags>. | 803 // DataPropertyInLiteralFlags <flags>. |
| 1030 // | 804 // |
| 1031 // This definition is not observable and is used only for definitions | 805 // This definition is not observable and is used only for definitions |
| 1032 // in object or class literals. | 806 // in object or class literals. |
| 1033 void Interpreter::DoStaDataPropertyInLiteral(InterpreterAssembler* assembler) { | 807 void InterpreterGenerator::DoStaDataPropertyInLiteral( |
| 808 InterpreterAssembler* assembler) { |
| 1034 Node* object = __ LoadRegister(__ BytecodeOperandReg(0)); | 809 Node* object = __ LoadRegister(__ BytecodeOperandReg(0)); |
| 1035 Node* name = __ LoadRegister(__ BytecodeOperandReg(1)); | 810 Node* name = __ LoadRegister(__ BytecodeOperandReg(1)); |
| 1036 Node* value = __ GetAccumulator(); | 811 Node* value = __ GetAccumulator(); |
| 1037 Node* flags = __ SmiFromWord32(__ BytecodeOperandFlag(2)); | 812 Node* flags = __ SmiFromWord32(__ BytecodeOperandFlag(2)); |
| 1038 Node* vector_index = __ SmiTag(__ BytecodeOperandIdx(3)); | 813 Node* vector_index = __ SmiTag(__ BytecodeOperandIdx(3)); |
| 1039 | 814 |
| 1040 Node* feedback_vector = __ LoadFeedbackVector(); | 815 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1041 Node* context = __ GetContext(); | 816 Node* context = __ GetContext(); |
| 1042 | 817 |
| 1043 __ CallRuntime(Runtime::kDefineDataPropertyInLiteral, context, object, name, | 818 __ CallRuntime(Runtime::kDefineDataPropertyInLiteral, context, object, name, |
| 1044 value, flags, feedback_vector, vector_index); | 819 value, flags, feedback_vector, vector_index); |
| 1045 __ Dispatch(); | 820 __ Dispatch(); |
| 1046 } | 821 } |
| 1047 | 822 |
| 1048 void Interpreter::DoCollectTypeProfile(InterpreterAssembler* assembler) { | 823 void InterpreterGenerator::DoCollectTypeProfile( |
| 824 InterpreterAssembler* assembler) { |
| 1049 Node* name = __ LoadRegister(__ BytecodeOperandReg(0)); | 825 Node* name = __ LoadRegister(__ BytecodeOperandReg(0)); |
| 1050 Node* value = __ GetAccumulator(); | 826 Node* value = __ GetAccumulator(); |
| 1051 Node* vector_index = __ SmiTag(__ BytecodeOperandIdx(1)); | 827 Node* vector_index = __ SmiTag(__ BytecodeOperandIdx(1)); |
| 1052 | 828 |
| 1053 Node* feedback_vector = __ LoadFeedbackVector(); | 829 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1054 Node* context = __ GetContext(); | 830 Node* context = __ GetContext(); |
| 1055 | 831 |
| 1056 __ CallRuntime(Runtime::kCollectTypeProfile, context, name, value, | 832 __ CallRuntime(Runtime::kCollectTypeProfile, context, name, value, |
| 1057 feedback_vector, vector_index); | 833 feedback_vector, vector_index); |
| 1058 __ Dispatch(); | 834 __ Dispatch(); |
| 1059 } | 835 } |
| 1060 | 836 |
| 1061 // LdaModuleVariable <cell_index> <depth> | 837 // LdaModuleVariable <cell_index> <depth> |
| 1062 // | 838 // |
| 1063 // Load the contents of a module variable into the accumulator. The variable is | 839 // Load the contents of a module variable into the accumulator. The variable is |
| 1064 // identified by <cell_index>. <depth> is the depth of the current context | 840 // identified by <cell_index>. <depth> is the depth of the current context |
| 1065 // relative to the module context. | 841 // relative to the module context. |
| 1066 void Interpreter::DoLdaModuleVariable(InterpreterAssembler* assembler) { | 842 void InterpreterGenerator::DoLdaModuleVariable( |
| 843 InterpreterAssembler* assembler) { |
| 1067 Node* cell_index = __ BytecodeOperandImmIntPtr(0); | 844 Node* cell_index = __ BytecodeOperandImmIntPtr(0); |
| 1068 Node* depth = __ BytecodeOperandUImm(1); | 845 Node* depth = __ BytecodeOperandUImm(1); |
| 1069 | 846 |
| 1070 Node* module_context = __ GetContextAtDepth(__ GetContext(), depth); | 847 Node* module_context = __ GetContextAtDepth(__ GetContext(), depth); |
| 1071 Node* module = | 848 Node* module = |
| 1072 __ LoadContextElement(module_context, Context::EXTENSION_INDEX); | 849 __ LoadContextElement(module_context, Context::EXTENSION_INDEX); |
| 1073 | 850 |
| 1074 Label if_export(assembler), if_import(assembler), end(assembler); | 851 Label if_export(assembler), if_import(assembler), end(assembler); |
| 1075 __ Branch(__ IntPtrGreaterThan(cell_index, __ IntPtrConstant(0)), &if_export, | 852 __ Branch(__ IntPtrGreaterThan(cell_index, __ IntPtrConstant(0)), &if_export, |
| 1076 &if_import); | 853 &if_import); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 1098 } | 875 } |
| 1099 | 876 |
| 1100 __ Bind(&end); | 877 __ Bind(&end); |
| 1101 __ Dispatch(); | 878 __ Dispatch(); |
| 1102 } | 879 } |
| 1103 | 880 |
| 1104 // StaModuleVariable <cell_index> <depth> | 881 // StaModuleVariable <cell_index> <depth> |
| 1105 // | 882 // |
| 1106 // Store accumulator to the module variable identified by <cell_index>. | 883 // Store accumulator to the module variable identified by <cell_index>. |
| 1107 // <depth> is the depth of the current context relative to the module context. | 884 // <depth> is the depth of the current context relative to the module context. |
| 1108 void Interpreter::DoStaModuleVariable(InterpreterAssembler* assembler) { | 885 void InterpreterGenerator::DoStaModuleVariable( |
| 886 InterpreterAssembler* assembler) { |
| 1109 Node* value = __ GetAccumulator(); | 887 Node* value = __ GetAccumulator(); |
| 1110 Node* cell_index = __ BytecodeOperandImmIntPtr(0); | 888 Node* cell_index = __ BytecodeOperandImmIntPtr(0); |
| 1111 Node* depth = __ BytecodeOperandUImm(1); | 889 Node* depth = __ BytecodeOperandUImm(1); |
| 1112 | 890 |
| 1113 Node* module_context = __ GetContextAtDepth(__ GetContext(), depth); | 891 Node* module_context = __ GetContextAtDepth(__ GetContext(), depth); |
| 1114 Node* module = | 892 Node* module = |
| 1115 __ LoadContextElement(module_context, Context::EXTENSION_INDEX); | 893 __ LoadContextElement(module_context, Context::EXTENSION_INDEX); |
| 1116 | 894 |
| 1117 Label if_export(assembler), if_import(assembler), end(assembler); | 895 Label if_export(assembler), if_import(assembler), end(assembler); |
| 1118 __ Branch(__ IntPtrGreaterThan(cell_index, __ IntPtrConstant(0)), &if_export, | 896 __ Branch(__ IntPtrGreaterThan(cell_index, __ IntPtrConstant(0)), &if_export, |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1137 } | 915 } |
| 1138 | 916 |
| 1139 __ Bind(&end); | 917 __ Bind(&end); |
| 1140 __ Dispatch(); | 918 __ Dispatch(); |
| 1141 } | 919 } |
| 1142 | 920 |
| 1143 // PushContext <context> | 921 // PushContext <context> |
| 1144 // | 922 // |
| 1145 // Saves the current context in <context>, and pushes the accumulator as the | 923 // Saves the current context in <context>, and pushes the accumulator as the |
| 1146 // new current context. | 924 // new current context. |
| 1147 void Interpreter::DoPushContext(InterpreterAssembler* assembler) { | 925 void InterpreterGenerator::DoPushContext(InterpreterAssembler* assembler) { |
| 1148 Node* reg_index = __ BytecodeOperandReg(0); | 926 Node* reg_index = __ BytecodeOperandReg(0); |
| 1149 Node* new_context = __ GetAccumulator(); | 927 Node* new_context = __ GetAccumulator(); |
| 1150 Node* old_context = __ GetContext(); | 928 Node* old_context = __ GetContext(); |
| 1151 __ StoreRegister(old_context, reg_index); | 929 __ StoreRegister(old_context, reg_index); |
| 1152 __ SetContext(new_context); | 930 __ SetContext(new_context); |
| 1153 __ Dispatch(); | 931 __ Dispatch(); |
| 1154 } | 932 } |
| 1155 | 933 |
| 1156 // PopContext <context> | 934 // PopContext <context> |
| 1157 // | 935 // |
| 1158 // Pops the current context and sets <context> as the new context. | 936 // Pops the current context and sets <context> as the new context. |
| 1159 void Interpreter::DoPopContext(InterpreterAssembler* assembler) { | 937 void InterpreterGenerator::DoPopContext(InterpreterAssembler* assembler) { |
| 1160 Node* reg_index = __ BytecodeOperandReg(0); | 938 Node* reg_index = __ BytecodeOperandReg(0); |
| 1161 Node* context = __ LoadRegister(reg_index); | 939 Node* context = __ LoadRegister(reg_index); |
| 1162 __ SetContext(context); | 940 __ SetContext(context); |
| 1163 __ Dispatch(); | 941 __ Dispatch(); |
| 1164 } | 942 } |
| 1165 | 943 |
| 1166 // TODO(mythria): Remove this function once all CompareOps record type feedback. | 944 // TODO(mythria): Remove this function once all CompareOps record type feedback. |
| 1167 void Interpreter::DoCompareOp(Token::Value compare_op, | 945 void InterpreterGenerator::DoCompareOp(Token::Value compare_op, |
| 1168 InterpreterAssembler* assembler) { | 946 InterpreterAssembler* assembler) { |
| 1169 Node* reg_index = __ BytecodeOperandReg(0); | 947 Node* reg_index = __ BytecodeOperandReg(0); |
| 1170 Node* lhs = __ LoadRegister(reg_index); | 948 Node* lhs = __ LoadRegister(reg_index); |
| 1171 Node* rhs = __ GetAccumulator(); | 949 Node* rhs = __ GetAccumulator(); |
| 1172 Node* context = __ GetContext(); | 950 Node* context = __ GetContext(); |
| 1173 Node* result; | 951 Node* result; |
| 1174 switch (compare_op) { | 952 switch (compare_op) { |
| 1175 case Token::IN: | 953 case Token::IN: |
| 1176 result = assembler->HasProperty(rhs, lhs, context); | 954 result = assembler->HasProperty(rhs, lhs, context); |
| 1177 break; | 955 break; |
| 1178 case Token::INSTANCEOF: | 956 case Token::INSTANCEOF: |
| 1179 result = assembler->InstanceOf(lhs, rhs, context); | 957 result = assembler->InstanceOf(lhs, rhs, context); |
| 1180 break; | 958 break; |
| 1181 default: | 959 default: |
| 1182 UNREACHABLE(); | 960 UNREACHABLE(); |
| 1183 } | 961 } |
| 1184 __ SetAccumulator(result); | 962 __ SetAccumulator(result); |
| 1185 __ Dispatch(); | 963 __ Dispatch(); |
| 1186 } | 964 } |
| 1187 | 965 |
| 1188 template <class Generator> | 966 template <class Generator> |
| 1189 void Interpreter::DoBinaryOpWithFeedback(InterpreterAssembler* assembler) { | 967 void InterpreterGenerator::DoBinaryOpWithFeedback( |
| 968 InterpreterAssembler* assembler) { |
| 1190 Node* reg_index = __ BytecodeOperandReg(0); | 969 Node* reg_index = __ BytecodeOperandReg(0); |
| 1191 Node* lhs = __ LoadRegister(reg_index); | 970 Node* lhs = __ LoadRegister(reg_index); |
| 1192 Node* rhs = __ GetAccumulator(); | 971 Node* rhs = __ GetAccumulator(); |
| 1193 Node* context = __ GetContext(); | 972 Node* context = __ GetContext(); |
| 1194 Node* slot_index = __ BytecodeOperandIdx(1); | 973 Node* slot_index = __ BytecodeOperandIdx(1); |
| 1195 Node* feedback_vector = __ LoadFeedbackVector(); | 974 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1196 Node* result = Generator::Generate(assembler, lhs, rhs, slot_index, | 975 Node* result = Generator::Generate(assembler, lhs, rhs, slot_index, |
| 1197 feedback_vector, context); | 976 feedback_vector, context); |
| 1198 __ SetAccumulator(result); | 977 __ SetAccumulator(result); |
| 1199 __ Dispatch(); | 978 __ Dispatch(); |
| 1200 } | 979 } |
| 1201 | 980 |
| 1202 void Interpreter::DoCompareOpWithFeedback(Token::Value compare_op, | 981 void InterpreterGenerator::DoCompareOpWithFeedback( |
| 1203 InterpreterAssembler* assembler) { | 982 Token::Value compare_op, InterpreterAssembler* assembler) { |
| 1204 Node* reg_index = __ BytecodeOperandReg(0); | 983 Node* reg_index = __ BytecodeOperandReg(0); |
| 1205 Node* lhs = __ LoadRegister(reg_index); | 984 Node* lhs = __ LoadRegister(reg_index); |
| 1206 Node* rhs = __ GetAccumulator(); | 985 Node* rhs = __ GetAccumulator(); |
| 1207 Node* context = __ GetContext(); | 986 Node* context = __ GetContext(); |
| 1208 Node* slot_index = __ BytecodeOperandIdx(1); | 987 Node* slot_index = __ BytecodeOperandIdx(1); |
| 1209 Node* feedback_vector = __ LoadFeedbackVector(); | 988 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1210 | 989 |
| 1211 // TODO(interpreter): the only reason this check is here is because we | 990 // TODO(interpreter): the only reason this check is here is because we |
| 1212 // sometimes emit comparisons that shouldn't collect feedback (e.g. | 991 // sometimes emit comparisons that shouldn't collect feedback (e.g. |
| 1213 // try-finally blocks and generators), and we could get rid of this by | 992 // try-finally blocks and generators), and we could get rid of this by |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1396 default: | 1175 default: |
| 1397 UNREACHABLE(); | 1176 UNREACHABLE(); |
| 1398 } | 1177 } |
| 1399 __ SetAccumulator(result); | 1178 __ SetAccumulator(result); |
| 1400 __ Dispatch(); | 1179 __ Dispatch(); |
| 1401 } | 1180 } |
| 1402 | 1181 |
| 1403 // Add <src> | 1182 // Add <src> |
| 1404 // | 1183 // |
| 1405 // Add register <src> to accumulator. | 1184 // Add register <src> to accumulator. |
| 1406 void Interpreter::DoAdd(InterpreterAssembler* assembler) { | 1185 void InterpreterGenerator::DoAdd(InterpreterAssembler* assembler) { |
| 1407 DoBinaryOpWithFeedback<AddWithFeedbackStub>(assembler); | 1186 DoBinaryOpWithFeedback<AddWithFeedbackStub>(assembler); |
| 1408 } | 1187 } |
| 1409 | 1188 |
| 1410 // Sub <src> | 1189 // Sub <src> |
| 1411 // | 1190 // |
| 1412 // Subtract register <src> from accumulator. | 1191 // Subtract register <src> from accumulator. |
| 1413 void Interpreter::DoSub(InterpreterAssembler* assembler) { | 1192 void InterpreterGenerator::DoSub(InterpreterAssembler* assembler) { |
| 1414 DoBinaryOpWithFeedback<SubtractWithFeedbackStub>(assembler); | 1193 DoBinaryOpWithFeedback<SubtractWithFeedbackStub>(assembler); |
| 1415 } | 1194 } |
| 1416 | 1195 |
| 1417 // Mul <src> | 1196 // Mul <src> |
| 1418 // | 1197 // |
| 1419 // Multiply accumulator by register <src>. | 1198 // Multiply accumulator by register <src>. |
| 1420 void Interpreter::DoMul(InterpreterAssembler* assembler) { | 1199 void InterpreterGenerator::DoMul(InterpreterAssembler* assembler) { |
| 1421 DoBinaryOpWithFeedback<MultiplyWithFeedbackStub>(assembler); | 1200 DoBinaryOpWithFeedback<MultiplyWithFeedbackStub>(assembler); |
| 1422 } | 1201 } |
| 1423 | 1202 |
| 1424 // Div <src> | 1203 // Div <src> |
| 1425 // | 1204 // |
| 1426 // Divide register <src> by accumulator. | 1205 // Divide register <src> by accumulator. |
| 1427 void Interpreter::DoDiv(InterpreterAssembler* assembler) { | 1206 void InterpreterGenerator::DoDiv(InterpreterAssembler* assembler) { |
| 1428 DoBinaryOpWithFeedback<DivideWithFeedbackStub>(assembler); | 1207 DoBinaryOpWithFeedback<DivideWithFeedbackStub>(assembler); |
| 1429 } | 1208 } |
| 1430 | 1209 |
| 1431 // Mod <src> | 1210 // Mod <src> |
| 1432 // | 1211 // |
| 1433 // Modulo register <src> by accumulator. | 1212 // Modulo register <src> by accumulator. |
| 1434 void Interpreter::DoMod(InterpreterAssembler* assembler) { | 1213 void InterpreterGenerator::DoMod(InterpreterAssembler* assembler) { |
| 1435 DoBinaryOpWithFeedback<ModulusWithFeedbackStub>(assembler); | 1214 DoBinaryOpWithFeedback<ModulusWithFeedbackStub>(assembler); |
| 1436 } | 1215 } |
| 1437 | 1216 |
| 1438 void Interpreter::DoBitwiseBinaryOp(Token::Value bitwise_op, | 1217 void InterpreterGenerator::DoBitwiseBinaryOp(Token::Value bitwise_op, |
| 1439 InterpreterAssembler* assembler) { | 1218 InterpreterAssembler* assembler) { |
| 1440 Node* reg_index = __ BytecodeOperandReg(0); | 1219 Node* reg_index = __ BytecodeOperandReg(0); |
| 1441 Node* lhs = __ LoadRegister(reg_index); | 1220 Node* lhs = __ LoadRegister(reg_index); |
| 1442 Node* rhs = __ GetAccumulator(); | 1221 Node* rhs = __ GetAccumulator(); |
| 1443 Node* context = __ GetContext(); | 1222 Node* context = __ GetContext(); |
| 1444 Node* slot_index = __ BytecodeOperandIdx(1); | 1223 Node* slot_index = __ BytecodeOperandIdx(1); |
| 1445 Node* feedback_vector = __ LoadFeedbackVector(); | 1224 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1446 | 1225 |
| 1447 Variable var_lhs_type_feedback(assembler, | 1226 Variable var_lhs_type_feedback(assembler, |
| 1448 MachineRepresentation::kTaggedSigned), | 1227 MachineRepresentation::kTaggedSigned), |
| 1449 var_rhs_type_feedback(assembler, MachineRepresentation::kTaggedSigned); | 1228 var_rhs_type_feedback(assembler, MachineRepresentation::kTaggedSigned); |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1503 __ SmiOr(var_lhs_type_feedback.value(), var_rhs_type_feedback.value()); | 1282 __ SmiOr(var_lhs_type_feedback.value(), var_rhs_type_feedback.value()); |
| 1504 __ UpdateFeedback(__ SmiOr(result_type, input_feedback), feedback_vector, | 1283 __ UpdateFeedback(__ SmiOr(result_type, input_feedback), feedback_vector, |
| 1505 slot_index); | 1284 slot_index); |
| 1506 __ SetAccumulator(result); | 1285 __ SetAccumulator(result); |
| 1507 __ Dispatch(); | 1286 __ Dispatch(); |
| 1508 } | 1287 } |
| 1509 | 1288 |
| 1510 // BitwiseOr <src> | 1289 // BitwiseOr <src> |
| 1511 // | 1290 // |
| 1512 // BitwiseOr register <src> to accumulator. | 1291 // BitwiseOr register <src> to accumulator. |
| 1513 void Interpreter::DoBitwiseOr(InterpreterAssembler* assembler) { | 1292 void InterpreterGenerator::DoBitwiseOr(InterpreterAssembler* assembler) { |
| 1514 DoBitwiseBinaryOp(Token::BIT_OR, assembler); | 1293 DoBitwiseBinaryOp(Token::BIT_OR, assembler); |
| 1515 } | 1294 } |
| 1516 | 1295 |
| 1517 // BitwiseXor <src> | 1296 // BitwiseXor <src> |
| 1518 // | 1297 // |
| 1519 // BitwiseXor register <src> to accumulator. | 1298 // BitwiseXor register <src> to accumulator. |
| 1520 void Interpreter::DoBitwiseXor(InterpreterAssembler* assembler) { | 1299 void InterpreterGenerator::DoBitwiseXor(InterpreterAssembler* assembler) { |
| 1521 DoBitwiseBinaryOp(Token::BIT_XOR, assembler); | 1300 DoBitwiseBinaryOp(Token::BIT_XOR, assembler); |
| 1522 } | 1301 } |
| 1523 | 1302 |
| 1524 // BitwiseAnd <src> | 1303 // BitwiseAnd <src> |
| 1525 // | 1304 // |
| 1526 // BitwiseAnd register <src> to accumulator. | 1305 // BitwiseAnd register <src> to accumulator. |
| 1527 void Interpreter::DoBitwiseAnd(InterpreterAssembler* assembler) { | 1306 void InterpreterGenerator::DoBitwiseAnd(InterpreterAssembler* assembler) { |
| 1528 DoBitwiseBinaryOp(Token::BIT_AND, assembler); | 1307 DoBitwiseBinaryOp(Token::BIT_AND, assembler); |
| 1529 } | 1308 } |
| 1530 | 1309 |
| 1531 // ShiftLeft <src> | 1310 // ShiftLeft <src> |
| 1532 // | 1311 // |
| 1533 // Left shifts register <src> by the count specified in the accumulator. | 1312 // Left shifts register <src> by the count specified in the accumulator. |
| 1534 // Register <src> is converted to an int32 and the accumulator to uint32 | 1313 // Register <src> is converted to an int32 and the accumulator to uint32 |
| 1535 // before the operation. 5 lsb bits from the accumulator are used as count | 1314 // before the operation. 5 lsb bits from the accumulator are used as count |
| 1536 // i.e. <src> << (accumulator & 0x1F). | 1315 // i.e. <src> << (accumulator & 0x1F). |
| 1537 void Interpreter::DoShiftLeft(InterpreterAssembler* assembler) { | 1316 void InterpreterGenerator::DoShiftLeft(InterpreterAssembler* assembler) { |
| 1538 DoBitwiseBinaryOp(Token::SHL, assembler); | 1317 DoBitwiseBinaryOp(Token::SHL, assembler); |
| 1539 } | 1318 } |
| 1540 | 1319 |
| 1541 // ShiftRight <src> | 1320 // ShiftRight <src> |
| 1542 // | 1321 // |
| 1543 // Right shifts register <src> by the count specified in the accumulator. | 1322 // Right shifts register <src> by the count specified in the accumulator. |
| 1544 // Result is sign extended. Register <src> is converted to an int32 and the | 1323 // Result is sign extended. Register <src> is converted to an int32 and the |
| 1545 // accumulator to uint32 before the operation. 5 lsb bits from the accumulator | 1324 // accumulator to uint32 before the operation. 5 lsb bits from the accumulator |
| 1546 // are used as count i.e. <src> >> (accumulator & 0x1F). | 1325 // are used as count i.e. <src> >> (accumulator & 0x1F). |
| 1547 void Interpreter::DoShiftRight(InterpreterAssembler* assembler) { | 1326 void InterpreterGenerator::DoShiftRight(InterpreterAssembler* assembler) { |
| 1548 DoBitwiseBinaryOp(Token::SAR, assembler); | 1327 DoBitwiseBinaryOp(Token::SAR, assembler); |
| 1549 } | 1328 } |
| 1550 | 1329 |
| 1551 // ShiftRightLogical <src> | 1330 // ShiftRightLogical <src> |
| 1552 // | 1331 // |
| 1553 // Right Shifts register <src> by the count specified in the accumulator. | 1332 // Right Shifts register <src> by the count specified in the accumulator. |
| 1554 // Result is zero-filled. The accumulator and register <src> are converted to | 1333 // Result is zero-filled. The accumulator and register <src> are converted to |
| 1555 // uint32 before the operation 5 lsb bits from the accumulator are used as | 1334 // uint32 before the operation 5 lsb bits from the accumulator are used as |
| 1556 // count i.e. <src> << (accumulator & 0x1F). | 1335 // count i.e. <src> << (accumulator & 0x1F). |
| 1557 void Interpreter::DoShiftRightLogical(InterpreterAssembler* assembler) { | 1336 void InterpreterGenerator::DoShiftRightLogical( |
| 1337 InterpreterAssembler* assembler) { |
| 1558 DoBitwiseBinaryOp(Token::SHR, assembler); | 1338 DoBitwiseBinaryOp(Token::SHR, assembler); |
| 1559 } | 1339 } |
| 1560 | 1340 |
| 1561 // AddSmi <imm> <reg> | 1341 // AddSmi <imm> <reg> |
| 1562 // | 1342 // |
| 1563 // Adds an immediate value <imm> to register <reg>. For this | 1343 // Adds an immediate value <imm> to register <reg>. For this |
| 1564 // operation <reg> is the lhs operand and <imm> is the <rhs> operand. | 1344 // operation <reg> is the lhs operand and <imm> is the <rhs> operand. |
| 1565 void Interpreter::DoAddSmi(InterpreterAssembler* assembler) { | 1345 void InterpreterGenerator::DoAddSmi(InterpreterAssembler* assembler) { |
| 1566 Variable var_result(assembler, MachineRepresentation::kTagged); | 1346 Variable var_result(assembler, MachineRepresentation::kTagged); |
| 1567 Label fastpath(assembler), slowpath(assembler, Label::kDeferred), | 1347 Label fastpath(assembler), slowpath(assembler, Label::kDeferred), |
| 1568 end(assembler); | 1348 end(assembler); |
| 1569 | 1349 |
| 1570 Node* reg_index = __ BytecodeOperandReg(1); | 1350 Node* reg_index = __ BytecodeOperandReg(1); |
| 1571 Node* left = __ LoadRegister(reg_index); | 1351 Node* left = __ LoadRegister(reg_index); |
| 1572 Node* right = __ BytecodeOperandImmSmi(0); | 1352 Node* right = __ BytecodeOperandImmSmi(0); |
| 1573 Node* slot_index = __ BytecodeOperandIdx(2); | 1353 Node* slot_index = __ BytecodeOperandIdx(2); |
| 1574 Node* feedback_vector = __ LoadFeedbackVector(); | 1354 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1575 | 1355 |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1609 { | 1389 { |
| 1610 __ SetAccumulator(var_result.value()); | 1390 __ SetAccumulator(var_result.value()); |
| 1611 __ Dispatch(); | 1391 __ Dispatch(); |
| 1612 } | 1392 } |
| 1613 } | 1393 } |
| 1614 | 1394 |
| 1615 // SubSmi <imm> <reg> | 1395 // SubSmi <imm> <reg> |
| 1616 // | 1396 // |
| 1617 // Subtracts an immediate value <imm> to register <reg>. For this | 1397 // Subtracts an immediate value <imm> to register <reg>. For this |
| 1618 // operation <reg> is the lhs operand and <imm> is the rhs operand. | 1398 // operation <reg> is the lhs operand and <imm> is the rhs operand. |
| 1619 void Interpreter::DoSubSmi(InterpreterAssembler* assembler) { | 1399 void InterpreterGenerator::DoSubSmi(InterpreterAssembler* assembler) { |
| 1620 Variable var_result(assembler, MachineRepresentation::kTagged); | 1400 Variable var_result(assembler, MachineRepresentation::kTagged); |
| 1621 Label fastpath(assembler), slowpath(assembler, Label::kDeferred), | 1401 Label fastpath(assembler), slowpath(assembler, Label::kDeferred), |
| 1622 end(assembler); | 1402 end(assembler); |
| 1623 | 1403 |
| 1624 Node* reg_index = __ BytecodeOperandReg(1); | 1404 Node* reg_index = __ BytecodeOperandReg(1); |
| 1625 Node* left = __ LoadRegister(reg_index); | 1405 Node* left = __ LoadRegister(reg_index); |
| 1626 Node* right = __ BytecodeOperandImmSmi(0); | 1406 Node* right = __ BytecodeOperandImmSmi(0); |
| 1627 Node* slot_index = __ BytecodeOperandIdx(2); | 1407 Node* slot_index = __ BytecodeOperandIdx(2); |
| 1628 Node* feedback_vector = __ LoadFeedbackVector(); | 1408 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1629 | 1409 |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1663 { | 1443 { |
| 1664 __ SetAccumulator(var_result.value()); | 1444 __ SetAccumulator(var_result.value()); |
| 1665 __ Dispatch(); | 1445 __ Dispatch(); |
| 1666 } | 1446 } |
| 1667 } | 1447 } |
| 1668 | 1448 |
| 1669 // BitwiseOr <imm> <reg> | 1449 // BitwiseOr <imm> <reg> |
| 1670 // | 1450 // |
| 1671 // BitwiseOr <reg> with <imm>. For this operation <reg> is the lhs | 1451 // BitwiseOr <reg> with <imm>. For this operation <reg> is the lhs |
| 1672 // operand and <imm> is the rhs operand. | 1452 // operand and <imm> is the rhs operand. |
| 1673 void Interpreter::DoBitwiseOrSmi(InterpreterAssembler* assembler) { | 1453 void InterpreterGenerator::DoBitwiseOrSmi(InterpreterAssembler* assembler) { |
| 1674 Node* reg_index = __ BytecodeOperandReg(1); | 1454 Node* reg_index = __ BytecodeOperandReg(1); |
| 1675 Node* left = __ LoadRegister(reg_index); | 1455 Node* left = __ LoadRegister(reg_index); |
| 1676 Node* right = __ BytecodeOperandImmSmi(0); | 1456 Node* right = __ BytecodeOperandImmSmi(0); |
| 1677 Node* context = __ GetContext(); | 1457 Node* context = __ GetContext(); |
| 1678 Node* slot_index = __ BytecodeOperandIdx(2); | 1458 Node* slot_index = __ BytecodeOperandIdx(2); |
| 1679 Node* feedback_vector = __ LoadFeedbackVector(); | 1459 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1680 Variable var_lhs_type_feedback(assembler, | 1460 Variable var_lhs_type_feedback(assembler, |
| 1681 MachineRepresentation::kTaggedSigned); | 1461 MachineRepresentation::kTaggedSigned); |
| 1682 Node* lhs_value = __ TruncateTaggedToWord32WithFeedback( | 1462 Node* lhs_value = __ TruncateTaggedToWord32WithFeedback( |
| 1683 context, left, &var_lhs_type_feedback); | 1463 context, left, &var_lhs_type_feedback); |
| 1684 Node* rhs_value = __ SmiToWord32(right); | 1464 Node* rhs_value = __ SmiToWord32(right); |
| 1685 Node* value = __ Word32Or(lhs_value, rhs_value); | 1465 Node* value = __ Word32Or(lhs_value, rhs_value); |
| 1686 Node* result = __ ChangeInt32ToTagged(value); | 1466 Node* result = __ ChangeInt32ToTagged(value); |
| 1687 Node* result_type = __ SelectSmiConstant( | 1467 Node* result_type = __ SelectSmiConstant( |
| 1688 __ TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, | 1468 __ TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, |
| 1689 BinaryOperationFeedback::kNumber); | 1469 BinaryOperationFeedback::kNumber); |
| 1690 __ UpdateFeedback(__ SmiOr(result_type, var_lhs_type_feedback.value()), | 1470 __ UpdateFeedback(__ SmiOr(result_type, var_lhs_type_feedback.value()), |
| 1691 feedback_vector, slot_index); | 1471 feedback_vector, slot_index); |
| 1692 __ SetAccumulator(result); | 1472 __ SetAccumulator(result); |
| 1693 __ Dispatch(); | 1473 __ Dispatch(); |
| 1694 } | 1474 } |
| 1695 | 1475 |
| 1696 // BitwiseAnd <imm> <reg> | 1476 // BitwiseAnd <imm> <reg> |
| 1697 // | 1477 // |
| 1698 // BitwiseAnd <reg> with <imm>. For this operation <reg> is the lhs | 1478 // BitwiseAnd <reg> with <imm>. For this operation <reg> is the lhs |
| 1699 // operand and <imm> is the rhs operand. | 1479 // operand and <imm> is the rhs operand. |
| 1700 void Interpreter::DoBitwiseAndSmi(InterpreterAssembler* assembler) { | 1480 void InterpreterGenerator::DoBitwiseAndSmi(InterpreterAssembler* assembler) { |
| 1701 Node* reg_index = __ BytecodeOperandReg(1); | 1481 Node* reg_index = __ BytecodeOperandReg(1); |
| 1702 Node* left = __ LoadRegister(reg_index); | 1482 Node* left = __ LoadRegister(reg_index); |
| 1703 Node* right = __ BytecodeOperandImmSmi(0); | 1483 Node* right = __ BytecodeOperandImmSmi(0); |
| 1704 Node* context = __ GetContext(); | 1484 Node* context = __ GetContext(); |
| 1705 Node* slot_index = __ BytecodeOperandIdx(2); | 1485 Node* slot_index = __ BytecodeOperandIdx(2); |
| 1706 Node* feedback_vector = __ LoadFeedbackVector(); | 1486 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1707 Variable var_lhs_type_feedback(assembler, | 1487 Variable var_lhs_type_feedback(assembler, |
| 1708 MachineRepresentation::kTaggedSigned); | 1488 MachineRepresentation::kTaggedSigned); |
| 1709 Node* lhs_value = __ TruncateTaggedToWord32WithFeedback( | 1489 Node* lhs_value = __ TruncateTaggedToWord32WithFeedback( |
| 1710 context, left, &var_lhs_type_feedback); | 1490 context, left, &var_lhs_type_feedback); |
| 1711 Node* rhs_value = __ SmiToWord32(right); | 1491 Node* rhs_value = __ SmiToWord32(right); |
| 1712 Node* value = __ Word32And(lhs_value, rhs_value); | 1492 Node* value = __ Word32And(lhs_value, rhs_value); |
| 1713 Node* result = __ ChangeInt32ToTagged(value); | 1493 Node* result = __ ChangeInt32ToTagged(value); |
| 1714 Node* result_type = __ SelectSmiConstant( | 1494 Node* result_type = __ SelectSmiConstant( |
| 1715 __ TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, | 1495 __ TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, |
| 1716 BinaryOperationFeedback::kNumber); | 1496 BinaryOperationFeedback::kNumber); |
| 1717 __ UpdateFeedback(__ SmiOr(result_type, var_lhs_type_feedback.value()), | 1497 __ UpdateFeedback(__ SmiOr(result_type, var_lhs_type_feedback.value()), |
| 1718 feedback_vector, slot_index); | 1498 feedback_vector, slot_index); |
| 1719 __ SetAccumulator(result); | 1499 __ SetAccumulator(result); |
| 1720 __ Dispatch(); | 1500 __ Dispatch(); |
| 1721 } | 1501 } |
| 1722 | 1502 |
| 1723 // ShiftLeftSmi <imm> <reg> | 1503 // ShiftLeftSmi <imm> <reg> |
| 1724 // | 1504 // |
| 1725 // Left shifts register <src> by the count specified in <imm>. | 1505 // Left shifts register <src> by the count specified in <imm>. |
| 1726 // Register <src> is converted to an int32 before the operation. The 5 | 1506 // Register <src> is converted to an int32 before the operation. The 5 |
| 1727 // lsb bits from <imm> are used as count i.e. <src> << (<imm> & 0x1F). | 1507 // lsb bits from <imm> are used as count i.e. <src> << (<imm> & 0x1F). |
| 1728 void Interpreter::DoShiftLeftSmi(InterpreterAssembler* assembler) { | 1508 void InterpreterGenerator::DoShiftLeftSmi(InterpreterAssembler* assembler) { |
| 1729 Node* reg_index = __ BytecodeOperandReg(1); | 1509 Node* reg_index = __ BytecodeOperandReg(1); |
| 1730 Node* left = __ LoadRegister(reg_index); | 1510 Node* left = __ LoadRegister(reg_index); |
| 1731 Node* right = __ BytecodeOperandImmSmi(0); | 1511 Node* right = __ BytecodeOperandImmSmi(0); |
| 1732 Node* context = __ GetContext(); | 1512 Node* context = __ GetContext(); |
| 1733 Node* slot_index = __ BytecodeOperandIdx(2); | 1513 Node* slot_index = __ BytecodeOperandIdx(2); |
| 1734 Node* feedback_vector = __ LoadFeedbackVector(); | 1514 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1735 Variable var_lhs_type_feedback(assembler, | 1515 Variable var_lhs_type_feedback(assembler, |
| 1736 MachineRepresentation::kTaggedSigned); | 1516 MachineRepresentation::kTaggedSigned); |
| 1737 Node* lhs_value = __ TruncateTaggedToWord32WithFeedback( | 1517 Node* lhs_value = __ TruncateTaggedToWord32WithFeedback( |
| 1738 context, left, &var_lhs_type_feedback); | 1518 context, left, &var_lhs_type_feedback); |
| 1739 Node* rhs_value = __ SmiToWord32(right); | 1519 Node* rhs_value = __ SmiToWord32(right); |
| 1740 Node* shift_count = __ Word32And(rhs_value, __ Int32Constant(0x1f)); | 1520 Node* shift_count = __ Word32And(rhs_value, __ Int32Constant(0x1f)); |
| 1741 Node* value = __ Word32Shl(lhs_value, shift_count); | 1521 Node* value = __ Word32Shl(lhs_value, shift_count); |
| 1742 Node* result = __ ChangeInt32ToTagged(value); | 1522 Node* result = __ ChangeInt32ToTagged(value); |
| 1743 Node* result_type = __ SelectSmiConstant( | 1523 Node* result_type = __ SelectSmiConstant( |
| 1744 __ TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, | 1524 __ TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, |
| 1745 BinaryOperationFeedback::kNumber); | 1525 BinaryOperationFeedback::kNumber); |
| 1746 __ UpdateFeedback(__ SmiOr(result_type, var_lhs_type_feedback.value()), | 1526 __ UpdateFeedback(__ SmiOr(result_type, var_lhs_type_feedback.value()), |
| 1747 feedback_vector, slot_index); | 1527 feedback_vector, slot_index); |
| 1748 __ SetAccumulator(result); | 1528 __ SetAccumulator(result); |
| 1749 __ Dispatch(); | 1529 __ Dispatch(); |
| 1750 } | 1530 } |
| 1751 | 1531 |
| 1752 // ShiftRightSmi <imm> <reg> | 1532 // ShiftRightSmi <imm> <reg> |
| 1753 // | 1533 // |
| 1754 // Right shifts register <src> by the count specified in <imm>. | 1534 // Right shifts register <src> by the count specified in <imm>. |
| 1755 // Register <src> is converted to an int32 before the operation. The 5 | 1535 // Register <src> is converted to an int32 before the operation. The 5 |
| 1756 // lsb bits from <imm> are used as count i.e. <src> << (<imm> & 0x1F). | 1536 // lsb bits from <imm> are used as count i.e. <src> << (<imm> & 0x1F). |
| 1757 void Interpreter::DoShiftRightSmi(InterpreterAssembler* assembler) { | 1537 void InterpreterGenerator::DoShiftRightSmi(InterpreterAssembler* assembler) { |
| 1758 Node* reg_index = __ BytecodeOperandReg(1); | 1538 Node* reg_index = __ BytecodeOperandReg(1); |
| 1759 Node* left = __ LoadRegister(reg_index); | 1539 Node* left = __ LoadRegister(reg_index); |
| 1760 Node* right = __ BytecodeOperandImmSmi(0); | 1540 Node* right = __ BytecodeOperandImmSmi(0); |
| 1761 Node* context = __ GetContext(); | 1541 Node* context = __ GetContext(); |
| 1762 Node* slot_index = __ BytecodeOperandIdx(2); | 1542 Node* slot_index = __ BytecodeOperandIdx(2); |
| 1763 Node* feedback_vector = __ LoadFeedbackVector(); | 1543 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1764 Variable var_lhs_type_feedback(assembler, | 1544 Variable var_lhs_type_feedback(assembler, |
| 1765 MachineRepresentation::kTaggedSigned); | 1545 MachineRepresentation::kTaggedSigned); |
| 1766 Node* lhs_value = __ TruncateTaggedToWord32WithFeedback( | 1546 Node* lhs_value = __ TruncateTaggedToWord32WithFeedback( |
| 1767 context, left, &var_lhs_type_feedback); | 1547 context, left, &var_lhs_type_feedback); |
| 1768 Node* rhs_value = __ SmiToWord32(right); | 1548 Node* rhs_value = __ SmiToWord32(right); |
| 1769 Node* shift_count = __ Word32And(rhs_value, __ Int32Constant(0x1f)); | 1549 Node* shift_count = __ Word32And(rhs_value, __ Int32Constant(0x1f)); |
| 1770 Node* value = __ Word32Sar(lhs_value, shift_count); | 1550 Node* value = __ Word32Sar(lhs_value, shift_count); |
| 1771 Node* result = __ ChangeInt32ToTagged(value); | 1551 Node* result = __ ChangeInt32ToTagged(value); |
| 1772 Node* result_type = __ SelectSmiConstant( | 1552 Node* result_type = __ SelectSmiConstant( |
| 1773 __ TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, | 1553 __ TaggedIsSmi(result), BinaryOperationFeedback::kSignedSmall, |
| 1774 BinaryOperationFeedback::kNumber); | 1554 BinaryOperationFeedback::kNumber); |
| 1775 __ UpdateFeedback(__ SmiOr(result_type, var_lhs_type_feedback.value()), | 1555 __ UpdateFeedback(__ SmiOr(result_type, var_lhs_type_feedback.value()), |
| 1776 feedback_vector, slot_index); | 1556 feedback_vector, slot_index); |
| 1777 __ SetAccumulator(result); | 1557 __ SetAccumulator(result); |
| 1778 __ Dispatch(); | 1558 __ Dispatch(); |
| 1779 } | 1559 } |
| 1780 | 1560 |
| 1781 Node* Interpreter::BuildUnaryOp(Callable callable, | 1561 Node* InterpreterGenerator::BuildUnaryOp(Callable callable, |
| 1782 InterpreterAssembler* assembler) { | 1562 InterpreterAssembler* assembler) { |
| 1783 Node* target = __ HeapConstant(callable.code()); | 1563 Node* target = __ HeapConstant(callable.code()); |
| 1784 Node* accumulator = __ GetAccumulator(); | 1564 Node* accumulator = __ GetAccumulator(); |
| 1785 Node* context = __ GetContext(); | 1565 Node* context = __ GetContext(); |
| 1786 return __ CallStub(callable.descriptor(), target, context, accumulator); | 1566 return __ CallStub(callable.descriptor(), target, context, accumulator); |
| 1787 } | 1567 } |
| 1788 | 1568 |
| 1789 template <class Generator> | 1569 template <class Generator> |
| 1790 void Interpreter::DoUnaryOpWithFeedback(InterpreterAssembler* assembler) { | 1570 void InterpreterGenerator::DoUnaryOpWithFeedback( |
| 1571 InterpreterAssembler* assembler) { |
| 1791 Node* value = __ GetAccumulator(); | 1572 Node* value = __ GetAccumulator(); |
| 1792 Node* context = __ GetContext(); | 1573 Node* context = __ GetContext(); |
| 1793 Node* slot_index = __ BytecodeOperandIdx(0); | 1574 Node* slot_index = __ BytecodeOperandIdx(0); |
| 1794 Node* feedback_vector = __ LoadFeedbackVector(); | 1575 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1795 Node* result = Generator::Generate(assembler, value, context, feedback_vector, | 1576 Node* result = Generator::Generate(assembler, value, context, feedback_vector, |
| 1796 slot_index); | 1577 slot_index); |
| 1797 __ SetAccumulator(result); | 1578 __ SetAccumulator(result); |
| 1798 __ Dispatch(); | 1579 __ Dispatch(); |
| 1799 } | 1580 } |
| 1800 | 1581 |
| 1801 // ToName | 1582 // ToName |
| 1802 // | 1583 // |
| 1803 // Convert the object referenced by the accumulator to a name. | 1584 // Convert the object referenced by the accumulator to a name. |
| 1804 void Interpreter::DoToName(InterpreterAssembler* assembler) { | 1585 void InterpreterGenerator::DoToName(InterpreterAssembler* assembler) { |
| 1805 Node* object = __ GetAccumulator(); | 1586 Node* object = __ GetAccumulator(); |
| 1806 Node* context = __ GetContext(); | 1587 Node* context = __ GetContext(); |
| 1807 Node* result = __ ToName(context, object); | 1588 Node* result = __ ToName(context, object); |
| 1808 __ StoreRegister(result, __ BytecodeOperandReg(0)); | 1589 __ StoreRegister(result, __ BytecodeOperandReg(0)); |
| 1809 __ Dispatch(); | 1590 __ Dispatch(); |
| 1810 } | 1591 } |
| 1811 | 1592 |
| 1812 // ToNumber | 1593 // ToNumber |
| 1813 // | 1594 // |
| 1814 // Convert the object referenced by the accumulator to a number. | 1595 // Convert the object referenced by the accumulator to a number. |
| 1815 void Interpreter::DoToNumber(InterpreterAssembler* assembler) { | 1596 void InterpreterGenerator::DoToNumber(InterpreterAssembler* assembler) { |
| 1816 Node* object = __ GetAccumulator(); | 1597 Node* object = __ GetAccumulator(); |
| 1817 Node* context = __ GetContext(); | 1598 Node* context = __ GetContext(); |
| 1818 Node* result = __ ToNumber(context, object); | 1599 Node* result = __ ToNumber(context, object); |
| 1819 __ StoreRegister(result, __ BytecodeOperandReg(0)); | 1600 __ StoreRegister(result, __ BytecodeOperandReg(0)); |
| 1820 __ Dispatch(); | 1601 __ Dispatch(); |
| 1821 } | 1602 } |
| 1822 | 1603 |
| 1823 // ToObject | 1604 // ToObject |
| 1824 // | 1605 // |
| 1825 // Convert the object referenced by the accumulator to a JSReceiver. | 1606 // Convert the object referenced by the accumulator to a JSReceiver. |
| 1826 void Interpreter::DoToObject(InterpreterAssembler* assembler) { | 1607 void InterpreterGenerator::DoToObject(InterpreterAssembler* assembler) { |
| 1827 Node* result = BuildUnaryOp(CodeFactory::ToObject(isolate_), assembler); | 1608 Node* result = BuildUnaryOp(CodeFactory::ToObject(isolate_), assembler); |
| 1828 __ StoreRegister(result, __ BytecodeOperandReg(0)); | 1609 __ StoreRegister(result, __ BytecodeOperandReg(0)); |
| 1829 __ Dispatch(); | 1610 __ Dispatch(); |
| 1830 } | 1611 } |
| 1831 | 1612 |
| 1832 // Inc | 1613 // Inc |
| 1833 // | 1614 // |
| 1834 // Increments value in the accumulator by one. | 1615 // Increments value in the accumulator by one. |
| 1835 void Interpreter::DoInc(InterpreterAssembler* assembler) { | 1616 void InterpreterGenerator::DoInc(InterpreterAssembler* assembler) { |
| 1836 typedef CodeStubAssembler::Label Label; | 1617 typedef CodeStubAssembler::Label Label; |
| 1837 typedef compiler::Node Node; | 1618 typedef compiler::Node Node; |
| 1838 typedef CodeStubAssembler::Variable Variable; | 1619 typedef CodeStubAssembler::Variable Variable; |
| 1839 | 1620 |
| 1840 Node* value = __ GetAccumulator(); | 1621 Node* value = __ GetAccumulator(); |
| 1841 Node* context = __ GetContext(); | 1622 Node* context = __ GetContext(); |
| 1842 Node* slot_index = __ BytecodeOperandIdx(0); | 1623 Node* slot_index = __ BytecodeOperandIdx(0); |
| 1843 Node* feedback_vector = __ LoadFeedbackVector(); | 1624 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1844 | 1625 |
| 1845 // Shared entry for floating point increment. | 1626 // Shared entry for floating point increment. |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1963 assembler->UpdateFeedback(var_type_feedback.value(), feedback_vector, | 1744 assembler->UpdateFeedback(var_type_feedback.value(), feedback_vector, |
| 1964 slot_index); | 1745 slot_index); |
| 1965 | 1746 |
| 1966 __ SetAccumulator(result_var.value()); | 1747 __ SetAccumulator(result_var.value()); |
| 1967 __ Dispatch(); | 1748 __ Dispatch(); |
| 1968 } | 1749 } |
| 1969 | 1750 |
| 1970 // Dec | 1751 // Dec |
| 1971 // | 1752 // |
| 1972 // Decrements value in the accumulator by one. | 1753 // Decrements value in the accumulator by one. |
| 1973 void Interpreter::DoDec(InterpreterAssembler* assembler) { | 1754 void InterpreterGenerator::DoDec(InterpreterAssembler* assembler) { |
| 1974 typedef CodeStubAssembler::Label Label; | 1755 typedef CodeStubAssembler::Label Label; |
| 1975 typedef compiler::Node Node; | 1756 typedef compiler::Node Node; |
| 1976 typedef CodeStubAssembler::Variable Variable; | 1757 typedef CodeStubAssembler::Variable Variable; |
| 1977 | 1758 |
| 1978 Node* value = __ GetAccumulator(); | 1759 Node* value = __ GetAccumulator(); |
| 1979 Node* context = __ GetContext(); | 1760 Node* context = __ GetContext(); |
| 1980 Node* slot_index = __ BytecodeOperandIdx(0); | 1761 Node* slot_index = __ BytecodeOperandIdx(0); |
| 1981 Node* feedback_vector = __ LoadFeedbackVector(); | 1762 Node* feedback_vector = __ LoadFeedbackVector(); |
| 1982 | 1763 |
| 1983 // Shared entry for floating point decrement. | 1764 // Shared entry for floating point decrement. |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2103 | 1884 |
| 2104 __ SetAccumulator(result_var.value()); | 1885 __ SetAccumulator(result_var.value()); |
| 2105 __ Dispatch(); | 1886 __ Dispatch(); |
| 2106 } | 1887 } |
| 2107 | 1888 |
| 2108 // LogicalNot | 1889 // LogicalNot |
| 2109 // | 1890 // |
| 2110 // Perform logical-not on the accumulator, first casting the | 1891 // Perform logical-not on the accumulator, first casting the |
| 2111 // accumulator to a boolean value if required. | 1892 // accumulator to a boolean value if required. |
| 2112 // ToBooleanLogicalNot | 1893 // ToBooleanLogicalNot |
| 2113 void Interpreter::DoToBooleanLogicalNot(InterpreterAssembler* assembler) { | 1894 void InterpreterGenerator::DoToBooleanLogicalNot( |
| 1895 InterpreterAssembler* assembler) { |
| 2114 Node* value = __ GetAccumulator(); | 1896 Node* value = __ GetAccumulator(); |
| 2115 Variable result(assembler, MachineRepresentation::kTagged); | 1897 Variable result(assembler, MachineRepresentation::kTagged); |
| 2116 Label if_true(assembler), if_false(assembler), end(assembler); | 1898 Label if_true(assembler), if_false(assembler), end(assembler); |
| 2117 Node* true_value = __ BooleanConstant(true); | 1899 Node* true_value = __ BooleanConstant(true); |
| 2118 Node* false_value = __ BooleanConstant(false); | 1900 Node* false_value = __ BooleanConstant(false); |
| 2119 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); | 1901 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); |
| 2120 __ Bind(&if_true); | 1902 __ Bind(&if_true); |
| 2121 { | 1903 { |
| 2122 result.Bind(false_value); | 1904 result.Bind(false_value); |
| 2123 __ Goto(&end); | 1905 __ Goto(&end); |
| 2124 } | 1906 } |
| 2125 __ Bind(&if_false); | 1907 __ Bind(&if_false); |
| 2126 { | 1908 { |
| 2127 result.Bind(true_value); | 1909 result.Bind(true_value); |
| 2128 __ Goto(&end); | 1910 __ Goto(&end); |
| 2129 } | 1911 } |
| 2130 __ Bind(&end); | 1912 __ Bind(&end); |
| 2131 __ SetAccumulator(result.value()); | 1913 __ SetAccumulator(result.value()); |
| 2132 __ Dispatch(); | 1914 __ Dispatch(); |
| 2133 } | 1915 } |
| 2134 | 1916 |
| 2135 // LogicalNot | 1917 // LogicalNot |
| 2136 // | 1918 // |
| 2137 // Perform logical-not on the accumulator, which must already be a boolean | 1919 // Perform logical-not on the accumulator, which must already be a boolean |
| 2138 // value. | 1920 // value. |
| 2139 void Interpreter::DoLogicalNot(InterpreterAssembler* assembler) { | 1921 void InterpreterGenerator::DoLogicalNot(InterpreterAssembler* assembler) { |
| 2140 Node* value = __ GetAccumulator(); | 1922 Node* value = __ GetAccumulator(); |
| 2141 Variable result(assembler, MachineRepresentation::kTagged); | 1923 Variable result(assembler, MachineRepresentation::kTagged); |
| 2142 Label if_true(assembler), if_false(assembler), end(assembler); | 1924 Label if_true(assembler), if_false(assembler), end(assembler); |
| 2143 Node* true_value = __ BooleanConstant(true); | 1925 Node* true_value = __ BooleanConstant(true); |
| 2144 Node* false_value = __ BooleanConstant(false); | 1926 Node* false_value = __ BooleanConstant(false); |
| 2145 __ Branch(__ WordEqual(value, true_value), &if_true, &if_false); | 1927 __ Branch(__ WordEqual(value, true_value), &if_true, &if_false); |
| 2146 __ Bind(&if_true); | 1928 __ Bind(&if_true); |
| 2147 { | 1929 { |
| 2148 result.Bind(false_value); | 1930 result.Bind(false_value); |
| 2149 __ Goto(&end); | 1931 __ Goto(&end); |
| 2150 } | 1932 } |
| 2151 __ Bind(&if_false); | 1933 __ Bind(&if_false); |
| 2152 { | 1934 { |
| 2153 if (FLAG_debug_code) { | 1935 if (FLAG_debug_code) { |
| 2154 __ AbortIfWordNotEqual(value, false_value, | 1936 __ AbortIfWordNotEqual(value, false_value, |
| 2155 BailoutReason::kExpectedBooleanValue); | 1937 BailoutReason::kExpectedBooleanValue); |
| 2156 } | 1938 } |
| 2157 result.Bind(true_value); | 1939 result.Bind(true_value); |
| 2158 __ Goto(&end); | 1940 __ Goto(&end); |
| 2159 } | 1941 } |
| 2160 __ Bind(&end); | 1942 __ Bind(&end); |
| 2161 __ SetAccumulator(result.value()); | 1943 __ SetAccumulator(result.value()); |
| 2162 __ Dispatch(); | 1944 __ Dispatch(); |
| 2163 } | 1945 } |
| 2164 | 1946 |
| 2165 // TypeOf | 1947 // TypeOf |
| 2166 // | 1948 // |
| 2167 // Load the accumulator with the string representating type of the | 1949 // Load the accumulator with the string representating type of the |
| 2168 // object in the accumulator. | 1950 // object in the accumulator. |
| 2169 void Interpreter::DoTypeOf(InterpreterAssembler* assembler) { | 1951 void InterpreterGenerator::DoTypeOf(InterpreterAssembler* assembler) { |
| 2170 Node* value = __ GetAccumulator(); | 1952 Node* value = __ GetAccumulator(); |
| 2171 Node* result = assembler->Typeof(value); | 1953 Node* result = assembler->Typeof(value); |
| 2172 __ SetAccumulator(result); | 1954 __ SetAccumulator(result); |
| 2173 __ Dispatch(); | 1955 __ Dispatch(); |
| 2174 } | 1956 } |
| 2175 | 1957 |
| 2176 void Interpreter::DoDelete(Runtime::FunctionId function_id, | 1958 void InterpreterGenerator::DoDelete(Runtime::FunctionId function_id, |
| 2177 InterpreterAssembler* assembler) { | 1959 InterpreterAssembler* assembler) { |
| 2178 Node* reg_index = __ BytecodeOperandReg(0); | 1960 Node* reg_index = __ BytecodeOperandReg(0); |
| 2179 Node* object = __ LoadRegister(reg_index); | 1961 Node* object = __ LoadRegister(reg_index); |
| 2180 Node* key = __ GetAccumulator(); | 1962 Node* key = __ GetAccumulator(); |
| 2181 Node* context = __ GetContext(); | 1963 Node* context = __ GetContext(); |
| 2182 Node* result = __ CallRuntime(function_id, context, object, key); | 1964 Node* result = __ CallRuntime(function_id, context, object, key); |
| 2183 __ SetAccumulator(result); | 1965 __ SetAccumulator(result); |
| 2184 __ Dispatch(); | 1966 __ Dispatch(); |
| 2185 } | 1967 } |
| 2186 | 1968 |
| 2187 // DeletePropertyStrict | 1969 // DeletePropertyStrict |
| 2188 // | 1970 // |
| 2189 // Delete the property specified in the accumulator from the object | 1971 // Delete the property specified in the accumulator from the object |
| 2190 // referenced by the register operand following strict mode semantics. | 1972 // referenced by the register operand following strict mode semantics. |
| 2191 void Interpreter::DoDeletePropertyStrict(InterpreterAssembler* assembler) { | 1973 void InterpreterGenerator::DoDeletePropertyStrict( |
| 1974 InterpreterAssembler* assembler) { |
| 2192 DoDelete(Runtime::kDeleteProperty_Strict, assembler); | 1975 DoDelete(Runtime::kDeleteProperty_Strict, assembler); |
| 2193 } | 1976 } |
| 2194 | 1977 |
| 2195 // DeletePropertySloppy | 1978 // DeletePropertySloppy |
| 2196 // | 1979 // |
| 2197 // Delete the property specified in the accumulator from the object | 1980 // Delete the property specified in the accumulator from the object |
| 2198 // referenced by the register operand following sloppy mode semantics. | 1981 // referenced by the register operand following sloppy mode semantics. |
| 2199 void Interpreter::DoDeletePropertySloppy(InterpreterAssembler* assembler) { | 1982 void InterpreterGenerator::DoDeletePropertySloppy( |
| 1983 InterpreterAssembler* assembler) { |
| 2200 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler); | 1984 DoDelete(Runtime::kDeleteProperty_Sloppy, assembler); |
| 2201 } | 1985 } |
| 2202 | 1986 |
| 2203 // GetSuperConstructor | 1987 // GetSuperConstructor |
| 2204 // | 1988 // |
| 2205 // Get the super constructor from the object referenced by the accumulator. | 1989 // Get the super constructor from the object referenced by the accumulator. |
| 2206 // The result is stored in register |reg|. | 1990 // The result is stored in register |reg|. |
| 2207 void Interpreter::DoGetSuperConstructor(InterpreterAssembler* assembler) { | 1991 void InterpreterGenerator::DoGetSuperConstructor( |
| 1992 InterpreterAssembler* assembler) { |
| 2208 Node* active_function = __ GetAccumulator(); | 1993 Node* active_function = __ GetAccumulator(); |
| 2209 Node* context = __ GetContext(); | 1994 Node* context = __ GetContext(); |
| 2210 Node* result = __ GetSuperConstructor(active_function, context); | 1995 Node* result = __ GetSuperConstructor(active_function, context); |
| 2211 Node* reg = __ BytecodeOperandReg(0); | 1996 Node* reg = __ BytecodeOperandReg(0); |
| 2212 __ StoreRegister(result, reg); | 1997 __ StoreRegister(result, reg); |
| 2213 __ Dispatch(); | 1998 __ Dispatch(); |
| 2214 } | 1999 } |
| 2215 | 2000 |
| 2216 void Interpreter::DoJSCall(InterpreterAssembler* assembler, | 2001 void InterpreterGenerator::DoJSCall(InterpreterAssembler* assembler, |
| 2217 TailCallMode tail_call_mode) { | 2002 TailCallMode tail_call_mode) { |
| 2218 Node* function_reg = __ BytecodeOperandReg(0); | 2003 Node* function_reg = __ BytecodeOperandReg(0); |
| 2219 Node* function = __ LoadRegister(function_reg); | 2004 Node* function = __ LoadRegister(function_reg); |
| 2220 Node* receiver_reg = __ BytecodeOperandReg(1); | 2005 Node* receiver_reg = __ BytecodeOperandReg(1); |
| 2221 Node* receiver_arg = __ RegisterLocation(receiver_reg); | 2006 Node* receiver_arg = __ RegisterLocation(receiver_reg); |
| 2222 Node* receiver_args_count = __ BytecodeOperandCount(2); | 2007 Node* receiver_args_count = __ BytecodeOperandCount(2); |
| 2223 Node* receiver_count = __ Int32Constant(1); | 2008 Node* receiver_count = __ Int32Constant(1); |
| 2224 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); | 2009 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); |
| 2225 Node* slot_id = __ BytecodeOperandIdx(3); | 2010 Node* slot_id = __ BytecodeOperandIdx(3); |
| 2226 Node* feedback_vector = __ LoadFeedbackVector(); | 2011 Node* feedback_vector = __ LoadFeedbackVector(); |
| 2227 Node* context = __ GetContext(); | 2012 Node* context = __ GetContext(); |
| 2228 Node* result = | 2013 Node* result = |
| 2229 __ CallJSWithFeedback(function, context, receiver_arg, args_count, | 2014 __ CallJSWithFeedback(function, context, receiver_arg, args_count, |
| 2230 slot_id, feedback_vector, tail_call_mode); | 2015 slot_id, feedback_vector, tail_call_mode); |
| 2231 __ SetAccumulator(result); | 2016 __ SetAccumulator(result); |
| 2232 __ Dispatch(); | 2017 __ Dispatch(); |
| 2233 } | 2018 } |
| 2234 | 2019 |
| 2235 void Interpreter::DoJSCallN(InterpreterAssembler* assembler, int arg_count) { | 2020 void InterpreterGenerator::DoJSCallN(InterpreterAssembler* assembler, |
| 2021 int arg_count) { |
| 2236 const int kReceiverOperandIndex = 1; | 2022 const int kReceiverOperandIndex = 1; |
| 2237 const int kReceiverOperandCount = 1; | 2023 const int kReceiverOperandCount = 1; |
| 2238 const int kSlotOperandIndex = | 2024 const int kSlotOperandIndex = |
| 2239 kReceiverOperandIndex + kReceiverOperandCount + arg_count; | 2025 kReceiverOperandIndex + kReceiverOperandCount + arg_count; |
| 2240 const int kBoilerplatParameterCount = 7; | 2026 const int kBoilerplatParameterCount = 7; |
| 2241 const int kReceiverParameterIndex = 5; | 2027 const int kReceiverParameterIndex = 5; |
| 2242 | 2028 |
| 2243 Node* function_reg = __ BytecodeOperandReg(0); | 2029 Node* function_reg = __ BytecodeOperandReg(0); |
| 2244 Node* function = __ LoadRegister(function_reg); | 2030 Node* function = __ LoadRegister(function_reg); |
| 2245 std::array<Node*, Bytecodes::kMaxOperands + kBoilerplatParameterCount> temp; | 2031 std::array<Node*, Bytecodes::kMaxOperands + kBoilerplatParameterCount> temp; |
| (...skipping 13 matching lines...) Expand all Loading... |
| 2259 arg_count + kBoilerplatParameterCount, &temp[0]); | 2045 arg_count + kBoilerplatParameterCount, &temp[0]); |
| 2260 __ SetAccumulator(result); | 2046 __ SetAccumulator(result); |
| 2261 __ Dispatch(); | 2047 __ Dispatch(); |
| 2262 } | 2048 } |
| 2263 | 2049 |
| 2264 // Call <callable> <receiver> <arg_count> <feedback_slot_id> | 2050 // Call <callable> <receiver> <arg_count> <feedback_slot_id> |
| 2265 // | 2051 // |
| 2266 // Call a JSfunction or Callable in |callable| with the |receiver| and | 2052 // Call a JSfunction or Callable in |callable| with the |receiver| and |
| 2267 // |arg_count| arguments in subsequent registers. Collect type feedback | 2053 // |arg_count| arguments in subsequent registers. Collect type feedback |
| 2268 // into |feedback_slot_id| | 2054 // into |feedback_slot_id| |
| 2269 void Interpreter::DoCall(InterpreterAssembler* assembler) { | 2055 void InterpreterGenerator::DoCall(InterpreterAssembler* assembler) { |
| 2270 DoJSCall(assembler, TailCallMode::kDisallow); | 2056 DoJSCall(assembler, TailCallMode::kDisallow); |
| 2271 } | 2057 } |
| 2272 | 2058 |
| 2273 void Interpreter::DoCall0(InterpreterAssembler* assembler) { | 2059 void InterpreterGenerator::DoCall0(InterpreterAssembler* assembler) { |
| 2274 DoJSCallN(assembler, 0); | 2060 DoJSCallN(assembler, 0); |
| 2275 } | 2061 } |
| 2276 | 2062 |
| 2277 void Interpreter::DoCall1(InterpreterAssembler* assembler) { | 2063 void InterpreterGenerator::DoCall1(InterpreterAssembler* assembler) { |
| 2278 DoJSCallN(assembler, 1); | 2064 DoJSCallN(assembler, 1); |
| 2279 } | 2065 } |
| 2280 | 2066 |
| 2281 void Interpreter::DoCall2(InterpreterAssembler* assembler) { | 2067 void InterpreterGenerator::DoCall2(InterpreterAssembler* assembler) { |
| 2282 DoJSCallN(assembler, 2); | 2068 DoJSCallN(assembler, 2); |
| 2283 } | 2069 } |
| 2284 | 2070 |
| 2285 void Interpreter::DoCallProperty(InterpreterAssembler* assembler) { | 2071 void InterpreterGenerator::DoCallProperty(InterpreterAssembler* assembler) { |
| 2286 // Same as Call | 2072 // Same as Call |
| 2287 UNREACHABLE(); | 2073 UNREACHABLE(); |
| 2288 } | 2074 } |
| 2289 | 2075 |
| 2290 void Interpreter::DoCallProperty0(InterpreterAssembler* assembler) { | 2076 void InterpreterGenerator::DoCallProperty0(InterpreterAssembler* assembler) { |
| 2291 // Same as Call0 | 2077 // Same as Call0 |
| 2292 UNREACHABLE(); | 2078 UNREACHABLE(); |
| 2293 } | 2079 } |
| 2294 | 2080 |
| 2295 void Interpreter::DoCallProperty1(InterpreterAssembler* assembler) { | 2081 void InterpreterGenerator::DoCallProperty1(InterpreterAssembler* assembler) { |
| 2296 // Same as Call1 | 2082 // Same as Call1 |
| 2297 UNREACHABLE(); | 2083 UNREACHABLE(); |
| 2298 } | 2084 } |
| 2299 | 2085 |
| 2300 void Interpreter::DoCallProperty2(InterpreterAssembler* assembler) { | 2086 void InterpreterGenerator::DoCallProperty2(InterpreterAssembler* assembler) { |
| 2301 // Same as Call2 | 2087 // Same as Call2 |
| 2302 UNREACHABLE(); | 2088 UNREACHABLE(); |
| 2303 } | 2089 } |
| 2304 | 2090 |
| 2305 // TailCall <callable> <receiver> <arg_count> <feedback_slot_id> | 2091 // TailCall <callable> <receiver> <arg_count> <feedback_slot_id> |
| 2306 // | 2092 // |
| 2307 // Tail call a JSfunction or Callable in |callable| with the |receiver| and | 2093 // Tail call a JSfunction or Callable in |callable| with the |receiver| and |
| 2308 // |arg_count| arguments in subsequent registers. Collect type feedback | 2094 // |arg_count| arguments in subsequent registers. Collect type feedback |
| 2309 // into |feedback_slot_id| | 2095 // into |feedback_slot_id| |
| 2310 void Interpreter::DoTailCall(InterpreterAssembler* assembler) { | 2096 void InterpreterGenerator::DoTailCall(InterpreterAssembler* assembler) { |
| 2311 DoJSCall(assembler, TailCallMode::kAllow); | 2097 DoJSCall(assembler, TailCallMode::kAllow); |
| 2312 } | 2098 } |
| 2313 | 2099 |
| 2314 // CallRuntime <function_id> <first_arg> <arg_count> | 2100 // CallRuntime <function_id> <first_arg> <arg_count> |
| 2315 // | 2101 // |
| 2316 // Call the runtime function |function_id| with the first argument in | 2102 // Call the runtime function |function_id| with the first argument in |
| 2317 // register |first_arg| and |arg_count| arguments in subsequent | 2103 // register |first_arg| and |arg_count| arguments in subsequent |
| 2318 // registers. | 2104 // registers. |
| 2319 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) { | 2105 void InterpreterGenerator::DoCallRuntime(InterpreterAssembler* assembler) { |
| 2320 Node* function_id = __ BytecodeOperandRuntimeId(0); | 2106 Node* function_id = __ BytecodeOperandRuntimeId(0); |
| 2321 Node* first_arg_reg = __ BytecodeOperandReg(1); | 2107 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 2322 Node* first_arg = __ RegisterLocation(first_arg_reg); | 2108 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 2323 Node* args_count = __ BytecodeOperandCount(2); | 2109 Node* args_count = __ BytecodeOperandCount(2); |
| 2324 Node* context = __ GetContext(); | 2110 Node* context = __ GetContext(); |
| 2325 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); | 2111 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); |
| 2326 __ SetAccumulator(result); | 2112 __ SetAccumulator(result); |
| 2327 __ Dispatch(); | 2113 __ Dispatch(); |
| 2328 } | 2114 } |
| 2329 | 2115 |
| 2330 // InvokeIntrinsic <function_id> <first_arg> <arg_count> | 2116 // InvokeIntrinsic <function_id> <first_arg> <arg_count> |
| 2331 // | 2117 // |
| 2332 // Implements the semantic equivalent of calling the runtime function | 2118 // Implements the semantic equivalent of calling the runtime function |
| 2333 // |function_id| with the first argument in |first_arg| and |arg_count| | 2119 // |function_id| with the first argument in |first_arg| and |arg_count| |
| 2334 // arguments in subsequent registers. | 2120 // arguments in subsequent registers. |
| 2335 void Interpreter::DoInvokeIntrinsic(InterpreterAssembler* assembler) { | 2121 void InterpreterGenerator::DoInvokeIntrinsic(InterpreterAssembler* assembler) { |
| 2336 Node* function_id = __ BytecodeOperandIntrinsicId(0); | 2122 Node* function_id = __ BytecodeOperandIntrinsicId(0); |
| 2337 Node* first_arg_reg = __ BytecodeOperandReg(1); | 2123 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 2338 Node* arg_count = __ BytecodeOperandCount(2); | 2124 Node* arg_count = __ BytecodeOperandCount(2); |
| 2339 Node* context = __ GetContext(); | 2125 Node* context = __ GetContext(); |
| 2340 IntrinsicsHelper helper(assembler); | 2126 IntrinsicsHelper helper(assembler); |
| 2341 Node* result = | 2127 Node* result = |
| 2342 helper.InvokeIntrinsic(function_id, context, first_arg_reg, arg_count); | 2128 helper.InvokeIntrinsic(function_id, context, first_arg_reg, arg_count); |
| 2343 __ SetAccumulator(result); | 2129 __ SetAccumulator(result); |
| 2344 __ Dispatch(); | 2130 __ Dispatch(); |
| 2345 } | 2131 } |
| 2346 | 2132 |
| 2347 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> | 2133 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> |
| 2348 // | 2134 // |
| 2349 // Call the runtime function |function_id| which returns a pair, with the | 2135 // Call the runtime function |function_id| which returns a pair, with the |
| 2350 // first argument in register |first_arg| and |arg_count| arguments in | 2136 // first argument in register |first_arg| and |arg_count| arguments in |
| 2351 // subsequent registers. Returns the result in <first_return> and | 2137 // subsequent registers. Returns the result in <first_return> and |
| 2352 // <first_return + 1> | 2138 // <first_return + 1> |
| 2353 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) { | 2139 void InterpreterGenerator::DoCallRuntimeForPair( |
| 2140 InterpreterAssembler* assembler) { |
| 2354 // Call the runtime function. | 2141 // Call the runtime function. |
| 2355 Node* function_id = __ BytecodeOperandRuntimeId(0); | 2142 Node* function_id = __ BytecodeOperandRuntimeId(0); |
| 2356 Node* first_arg_reg = __ BytecodeOperandReg(1); | 2143 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 2357 Node* first_arg = __ RegisterLocation(first_arg_reg); | 2144 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 2358 Node* args_count = __ BytecodeOperandCount(2); | 2145 Node* args_count = __ BytecodeOperandCount(2); |
| 2359 Node* context = __ GetContext(); | 2146 Node* context = __ GetContext(); |
| 2360 Node* result_pair = | 2147 Node* result_pair = |
| 2361 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); | 2148 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); |
| 2362 // Store the results in <first_return> and <first_return + 1> | 2149 // Store the results in <first_return> and <first_return + 1> |
| 2363 Node* first_return_reg = __ BytecodeOperandReg(3); | 2150 Node* first_return_reg = __ BytecodeOperandReg(3); |
| 2364 Node* second_return_reg = __ NextRegister(first_return_reg); | 2151 Node* second_return_reg = __ NextRegister(first_return_reg); |
| 2365 Node* result0 = __ Projection(0, result_pair); | 2152 Node* result0 = __ Projection(0, result_pair); |
| 2366 Node* result1 = __ Projection(1, result_pair); | 2153 Node* result1 = __ Projection(1, result_pair); |
| 2367 __ StoreRegister(result0, first_return_reg); | 2154 __ StoreRegister(result0, first_return_reg); |
| 2368 __ StoreRegister(result1, second_return_reg); | 2155 __ StoreRegister(result1, second_return_reg); |
| 2369 __ Dispatch(); | 2156 __ Dispatch(); |
| 2370 } | 2157 } |
| 2371 | 2158 |
| 2372 // CallJSRuntime <context_index> <receiver> <arg_count> | 2159 // CallJSRuntime <context_index> <receiver> <arg_count> |
| 2373 // | 2160 // |
| 2374 // Call the JS runtime function that has the |context_index| with the receiver | 2161 // Call the JS runtime function that has the |context_index| with the receiver |
| 2375 // in register |receiver| and |arg_count| arguments in subsequent registers. | 2162 // in register |receiver| and |arg_count| arguments in subsequent registers. |
| 2376 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { | 2163 void InterpreterGenerator::DoCallJSRuntime(InterpreterAssembler* assembler) { |
| 2377 Node* context_index = __ BytecodeOperandIdx(0); | 2164 Node* context_index = __ BytecodeOperandIdx(0); |
| 2378 Node* receiver_reg = __ BytecodeOperandReg(1); | 2165 Node* receiver_reg = __ BytecodeOperandReg(1); |
| 2379 Node* first_arg = __ RegisterLocation(receiver_reg); | 2166 Node* first_arg = __ RegisterLocation(receiver_reg); |
| 2380 Node* receiver_args_count = __ BytecodeOperandCount(2); | 2167 Node* receiver_args_count = __ BytecodeOperandCount(2); |
| 2381 Node* receiver_count = __ Int32Constant(1); | 2168 Node* receiver_count = __ Int32Constant(1); |
| 2382 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); | 2169 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); |
| 2383 | 2170 |
| 2384 // Get the function to call from the native context. | 2171 // Get the function to call from the native context. |
| 2385 Node* context = __ GetContext(); | 2172 Node* context = __ GetContext(); |
| 2386 Node* native_context = __ LoadNativeContext(context); | 2173 Node* native_context = __ LoadNativeContext(context); |
| 2387 Node* function = __ LoadContextElement(native_context, context_index); | 2174 Node* function = __ LoadContextElement(native_context, context_index); |
| 2388 | 2175 |
| 2389 // Call the function. | 2176 // Call the function. |
| 2390 Node* result = __ CallJS(function, context, first_arg, args_count, | 2177 Node* result = __ CallJS(function, context, first_arg, args_count, |
| 2391 TailCallMode::kDisallow); | 2178 TailCallMode::kDisallow); |
| 2392 __ SetAccumulator(result); | 2179 __ SetAccumulator(result); |
| 2393 __ Dispatch(); | 2180 __ Dispatch(); |
| 2394 } | 2181 } |
| 2395 | 2182 |
| 2396 // CallWithSpread <callable> <first_arg> <arg_count> | 2183 // CallWithSpread <callable> <first_arg> <arg_count> |
| 2397 // | 2184 // |
| 2398 // Call a JSfunction or Callable in |callable| with the receiver in | 2185 // Call a JSfunction or Callable in |callable| with the receiver in |
| 2399 // |first_arg| and |arg_count - 1| arguments in subsequent registers. The | 2186 // |first_arg| and |arg_count - 1| arguments in subsequent registers. The |
| 2400 // final argument is always a spread. | 2187 // final argument is always a spread. |
| 2401 // | 2188 // |
| 2402 void Interpreter::DoCallWithSpread(InterpreterAssembler* assembler) { | 2189 void InterpreterGenerator::DoCallWithSpread(InterpreterAssembler* assembler) { |
| 2403 Node* callable_reg = __ BytecodeOperandReg(0); | 2190 Node* callable_reg = __ BytecodeOperandReg(0); |
| 2404 Node* callable = __ LoadRegister(callable_reg); | 2191 Node* callable = __ LoadRegister(callable_reg); |
| 2405 Node* receiver_reg = __ BytecodeOperandReg(1); | 2192 Node* receiver_reg = __ BytecodeOperandReg(1); |
| 2406 Node* receiver_arg = __ RegisterLocation(receiver_reg); | 2193 Node* receiver_arg = __ RegisterLocation(receiver_reg); |
| 2407 Node* receiver_args_count = __ BytecodeOperandCount(2); | 2194 Node* receiver_args_count = __ BytecodeOperandCount(2); |
| 2408 Node* receiver_count = __ Int32Constant(1); | 2195 Node* receiver_count = __ Int32Constant(1); |
| 2409 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); | 2196 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); |
| 2410 Node* context = __ GetContext(); | 2197 Node* context = __ GetContext(); |
| 2411 | 2198 |
| 2412 // Call into Runtime function CallWithSpread which does everything. | 2199 // Call into Runtime function CallWithSpread which does everything. |
| 2413 Node* result = | 2200 Node* result = |
| 2414 __ CallJSWithSpread(callable, context, receiver_arg, args_count); | 2201 __ CallJSWithSpread(callable, context, receiver_arg, args_count); |
| 2415 __ SetAccumulator(result); | 2202 __ SetAccumulator(result); |
| 2416 __ Dispatch(); | 2203 __ Dispatch(); |
| 2417 } | 2204 } |
| 2418 | 2205 |
| 2419 // ConstructWithSpread <first_arg> <arg_count> | 2206 // ConstructWithSpread <first_arg> <arg_count> |
| 2420 // | 2207 // |
| 2421 // Call the constructor in |constructor| with the first argument in register | 2208 // Call the constructor in |constructor| with the first argument in register |
| 2422 // |first_arg| and |arg_count| arguments in subsequent registers. The final | 2209 // |first_arg| and |arg_count| arguments in subsequent registers. The final |
| 2423 // argument is always a spread. The new.target is in the accumulator. | 2210 // argument is always a spread. The new.target is in the accumulator. |
| 2424 // | 2211 // |
| 2425 void Interpreter::DoConstructWithSpread(InterpreterAssembler* assembler) { | 2212 void InterpreterGenerator::DoConstructWithSpread( |
| 2213 InterpreterAssembler* assembler) { |
| 2426 Node* new_target = __ GetAccumulator(); | 2214 Node* new_target = __ GetAccumulator(); |
| 2427 Node* constructor_reg = __ BytecodeOperandReg(0); | 2215 Node* constructor_reg = __ BytecodeOperandReg(0); |
| 2428 Node* constructor = __ LoadRegister(constructor_reg); | 2216 Node* constructor = __ LoadRegister(constructor_reg); |
| 2429 Node* first_arg_reg = __ BytecodeOperandReg(1); | 2217 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 2430 Node* first_arg = __ RegisterLocation(first_arg_reg); | 2218 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 2431 Node* args_count = __ BytecodeOperandCount(2); | 2219 Node* args_count = __ BytecodeOperandCount(2); |
| 2432 Node* context = __ GetContext(); | 2220 Node* context = __ GetContext(); |
| 2433 Node* result = __ ConstructWithSpread(constructor, context, new_target, | 2221 Node* result = __ ConstructWithSpread(constructor, context, new_target, |
| 2434 first_arg, args_count); | 2222 first_arg, args_count); |
| 2435 __ SetAccumulator(result); | 2223 __ SetAccumulator(result); |
| 2436 __ Dispatch(); | 2224 __ Dispatch(); |
| 2437 } | 2225 } |
| 2438 | 2226 |
| 2439 // Construct <constructor> <first_arg> <arg_count> | 2227 // Construct <constructor> <first_arg> <arg_count> |
| 2440 // | 2228 // |
| 2441 // Call operator construct with |constructor| and the first argument in | 2229 // Call operator construct with |constructor| and the first argument in |
| 2442 // register |first_arg| and |arg_count| arguments in subsequent | 2230 // register |first_arg| and |arg_count| arguments in subsequent |
| 2443 // registers. The new.target is in the accumulator. | 2231 // registers. The new.target is in the accumulator. |
| 2444 // | 2232 // |
| 2445 void Interpreter::DoConstruct(InterpreterAssembler* assembler) { | 2233 void InterpreterGenerator::DoConstruct(InterpreterAssembler* assembler) { |
| 2446 Node* new_target = __ GetAccumulator(); | 2234 Node* new_target = __ GetAccumulator(); |
| 2447 Node* constructor_reg = __ BytecodeOperandReg(0); | 2235 Node* constructor_reg = __ BytecodeOperandReg(0); |
| 2448 Node* constructor = __ LoadRegister(constructor_reg); | 2236 Node* constructor = __ LoadRegister(constructor_reg); |
| 2449 Node* first_arg_reg = __ BytecodeOperandReg(1); | 2237 Node* first_arg_reg = __ BytecodeOperandReg(1); |
| 2450 Node* first_arg = __ RegisterLocation(first_arg_reg); | 2238 Node* first_arg = __ RegisterLocation(first_arg_reg); |
| 2451 Node* args_count = __ BytecodeOperandCount(2); | 2239 Node* args_count = __ BytecodeOperandCount(2); |
| 2452 Node* slot_id = __ BytecodeOperandIdx(3); | 2240 Node* slot_id = __ BytecodeOperandIdx(3); |
| 2453 Node* feedback_vector = __ LoadFeedbackVector(); | 2241 Node* feedback_vector = __ LoadFeedbackVector(); |
| 2454 Node* context = __ GetContext(); | 2242 Node* context = __ GetContext(); |
| 2455 Node* result = __ Construct(constructor, context, new_target, first_arg, | 2243 Node* result = __ Construct(constructor, context, new_target, first_arg, |
| 2456 args_count, slot_id, feedback_vector); | 2244 args_count, slot_id, feedback_vector); |
| 2457 __ SetAccumulator(result); | 2245 __ SetAccumulator(result); |
| 2458 __ Dispatch(); | 2246 __ Dispatch(); |
| 2459 } | 2247 } |
| 2460 | 2248 |
| 2461 // TestEqual <src> | 2249 // TestEqual <src> |
| 2462 // | 2250 // |
| 2463 // Test if the value in the <src> register equals the accumulator. | 2251 // Test if the value in the <src> register equals the accumulator. |
| 2464 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) { | 2252 void InterpreterGenerator::DoTestEqual(InterpreterAssembler* assembler) { |
| 2465 DoCompareOpWithFeedback(Token::Value::EQ, assembler); | 2253 DoCompareOpWithFeedback(Token::Value::EQ, assembler); |
| 2466 } | 2254 } |
| 2467 | 2255 |
| 2468 // TestEqualStrict <src> | 2256 // TestEqualStrict <src> |
| 2469 // | 2257 // |
| 2470 // Test if the value in the <src> register is strictly equal to the accumulator. | 2258 // Test if the value in the <src> register is strictly equal to the accumulator. |
| 2471 void Interpreter::DoTestEqualStrict(InterpreterAssembler* assembler) { | 2259 void InterpreterGenerator::DoTestEqualStrict(InterpreterAssembler* assembler) { |
| 2472 DoCompareOpWithFeedback(Token::Value::EQ_STRICT, assembler); | 2260 DoCompareOpWithFeedback(Token::Value::EQ_STRICT, assembler); |
| 2473 } | 2261 } |
| 2474 | 2262 |
| 2475 // TestLessThan <src> | 2263 // TestLessThan <src> |
| 2476 // | 2264 // |
| 2477 // Test if the value in the <src> register is less than the accumulator. | 2265 // Test if the value in the <src> register is less than the accumulator. |
| 2478 void Interpreter::DoTestLessThan(InterpreterAssembler* assembler) { | 2266 void InterpreterGenerator::DoTestLessThan(InterpreterAssembler* assembler) { |
| 2479 DoCompareOpWithFeedback(Token::Value::LT, assembler); | 2267 DoCompareOpWithFeedback(Token::Value::LT, assembler); |
| 2480 } | 2268 } |
| 2481 | 2269 |
| 2482 // TestGreaterThan <src> | 2270 // TestGreaterThan <src> |
| 2483 // | 2271 // |
| 2484 // Test if the value in the <src> register is greater than the accumulator. | 2272 // Test if the value in the <src> register is greater than the accumulator. |
| 2485 void Interpreter::DoTestGreaterThan(InterpreterAssembler* assembler) { | 2273 void InterpreterGenerator::DoTestGreaterThan(InterpreterAssembler* assembler) { |
| 2486 DoCompareOpWithFeedback(Token::Value::GT, assembler); | 2274 DoCompareOpWithFeedback(Token::Value::GT, assembler); |
| 2487 } | 2275 } |
| 2488 | 2276 |
| 2489 // TestLessThanOrEqual <src> | 2277 // TestLessThanOrEqual <src> |
| 2490 // | 2278 // |
| 2491 // Test if the value in the <src> register is less than or equal to the | 2279 // Test if the value in the <src> register is less than or equal to the |
| 2492 // accumulator. | 2280 // accumulator. |
| 2493 void Interpreter::DoTestLessThanOrEqual(InterpreterAssembler* assembler) { | 2281 void InterpreterGenerator::DoTestLessThanOrEqual( |
| 2282 InterpreterAssembler* assembler) { |
| 2494 DoCompareOpWithFeedback(Token::Value::LTE, assembler); | 2283 DoCompareOpWithFeedback(Token::Value::LTE, assembler); |
| 2495 } | 2284 } |
| 2496 | 2285 |
| 2497 // TestGreaterThanOrEqual <src> | 2286 // TestGreaterThanOrEqual <src> |
| 2498 // | 2287 // |
| 2499 // Test if the value in the <src> register is greater than or equal to the | 2288 // Test if the value in the <src> register is greater than or equal to the |
| 2500 // accumulator. | 2289 // accumulator. |
| 2501 void Interpreter::DoTestGreaterThanOrEqual(InterpreterAssembler* assembler) { | 2290 void InterpreterGenerator::DoTestGreaterThanOrEqual( |
| 2291 InterpreterAssembler* assembler) { |
| 2502 DoCompareOpWithFeedback(Token::Value::GTE, assembler); | 2292 DoCompareOpWithFeedback(Token::Value::GTE, assembler); |
| 2503 } | 2293 } |
| 2504 | 2294 |
| 2505 // TestIn <src> | 2295 // TestIn <src> |
| 2506 // | 2296 // |
| 2507 // Test if the object referenced by the register operand is a property of the | 2297 // Test if the object referenced by the register operand is a property of the |
| 2508 // object referenced by the accumulator. | 2298 // object referenced by the accumulator. |
| 2509 void Interpreter::DoTestIn(InterpreterAssembler* assembler) { | 2299 void InterpreterGenerator::DoTestIn(InterpreterAssembler* assembler) { |
| 2510 DoCompareOp(Token::IN, assembler); | 2300 DoCompareOp(Token::IN, assembler); |
| 2511 } | 2301 } |
| 2512 | 2302 |
| 2513 // TestInstanceOf <src> | 2303 // TestInstanceOf <src> |
| 2514 // | 2304 // |
| 2515 // Test if the object referenced by the <src> register is an an instance of type | 2305 // Test if the object referenced by the <src> register is an an instance of type |
| 2516 // referenced by the accumulator. | 2306 // referenced by the accumulator. |
| 2517 void Interpreter::DoTestInstanceOf(InterpreterAssembler* assembler) { | 2307 void InterpreterGenerator::DoTestInstanceOf(InterpreterAssembler* assembler) { |
| 2518 DoCompareOp(Token::INSTANCEOF, assembler); | 2308 DoCompareOp(Token::INSTANCEOF, assembler); |
| 2519 } | 2309 } |
| 2520 | 2310 |
| 2521 // TestUndetectable <src> | 2311 // TestUndetectable <src> |
| 2522 // | 2312 // |
| 2523 // Test if the value in the <src> register equals to null/undefined. This is | 2313 // Test if the value in the <src> register equals to null/undefined. This is |
| 2524 // done by checking undetectable bit on the map of the object. | 2314 // done by checking undetectable bit on the map of the object. |
| 2525 void Interpreter::DoTestUndetectable(InterpreterAssembler* assembler) { | 2315 void InterpreterGenerator::DoTestUndetectable(InterpreterAssembler* assembler) { |
| 2526 Node* reg_index = __ BytecodeOperandReg(0); | 2316 Node* reg_index = __ BytecodeOperandReg(0); |
| 2527 Node* object = __ LoadRegister(reg_index); | 2317 Node* object = __ LoadRegister(reg_index); |
| 2528 | 2318 |
| 2529 Label not_equal(assembler), end(assembler); | 2319 Label not_equal(assembler), end(assembler); |
| 2530 // If the object is an Smi then return false. | 2320 // If the object is an Smi then return false. |
| 2531 __ GotoIf(__ TaggedIsSmi(object), ¬_equal); | 2321 __ GotoIf(__ TaggedIsSmi(object), ¬_equal); |
| 2532 | 2322 |
| 2533 // If it is a HeapObject, load the map and check for undetectable bit. | 2323 // If it is a HeapObject, load the map and check for undetectable bit. |
| 2534 Node* map = __ LoadMap(object); | 2324 Node* map = __ LoadMap(object); |
| 2535 Node* map_bitfield = __ LoadMapBitField(map); | 2325 Node* map_bitfield = __ LoadMapBitField(map); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 2546 __ Goto(&end); | 2336 __ Goto(&end); |
| 2547 } | 2337 } |
| 2548 | 2338 |
| 2549 __ Bind(&end); | 2339 __ Bind(&end); |
| 2550 __ Dispatch(); | 2340 __ Dispatch(); |
| 2551 } | 2341 } |
| 2552 | 2342 |
| 2553 // TestNull <src> | 2343 // TestNull <src> |
| 2554 // | 2344 // |
| 2555 // Test if the value in the <src> register is strictly equal to null. | 2345 // Test if the value in the <src> register is strictly equal to null. |
| 2556 void Interpreter::DoTestNull(InterpreterAssembler* assembler) { | 2346 void InterpreterGenerator::DoTestNull(InterpreterAssembler* assembler) { |
| 2557 Node* reg_index = __ BytecodeOperandReg(0); | 2347 Node* reg_index = __ BytecodeOperandReg(0); |
| 2558 Node* object = __ LoadRegister(reg_index); | 2348 Node* object = __ LoadRegister(reg_index); |
| 2559 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 2349 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 2560 | 2350 |
| 2561 Label equal(assembler), end(assembler); | 2351 Label equal(assembler), end(assembler); |
| 2562 __ GotoIf(__ WordEqual(object, null_value), &equal); | 2352 __ GotoIf(__ WordEqual(object, null_value), &equal); |
| 2563 __ SetAccumulator(__ BooleanConstant(false)); | 2353 __ SetAccumulator(__ BooleanConstant(false)); |
| 2564 __ Goto(&end); | 2354 __ Goto(&end); |
| 2565 | 2355 |
| 2566 __ Bind(&equal); | 2356 __ Bind(&equal); |
| 2567 { | 2357 { |
| 2568 __ SetAccumulator(__ BooleanConstant(true)); | 2358 __ SetAccumulator(__ BooleanConstant(true)); |
| 2569 __ Goto(&end); | 2359 __ Goto(&end); |
| 2570 } | 2360 } |
| 2571 | 2361 |
| 2572 __ Bind(&end); | 2362 __ Bind(&end); |
| 2573 __ Dispatch(); | 2363 __ Dispatch(); |
| 2574 } | 2364 } |
| 2575 | 2365 |
| 2576 // TestUndefined <src> | 2366 // TestUndefined <src> |
| 2577 // | 2367 // |
| 2578 // Test if the value in the <src> register is strictly equal to undefined. | 2368 // Test if the value in the <src> register is strictly equal to undefined. |
| 2579 void Interpreter::DoTestUndefined(InterpreterAssembler* assembler) { | 2369 void InterpreterGenerator::DoTestUndefined(InterpreterAssembler* assembler) { |
| 2580 Node* reg_index = __ BytecodeOperandReg(0); | 2370 Node* reg_index = __ BytecodeOperandReg(0); |
| 2581 Node* object = __ LoadRegister(reg_index); | 2371 Node* object = __ LoadRegister(reg_index); |
| 2582 Node* undefined_value = | 2372 Node* undefined_value = |
| 2583 __ HeapConstant(isolate_->factory()->undefined_value()); | 2373 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 2584 | 2374 |
| 2585 Label equal(assembler), end(assembler); | 2375 Label equal(assembler), end(assembler); |
| 2586 __ GotoIf(__ WordEqual(object, undefined_value), &equal); | 2376 __ GotoIf(__ WordEqual(object, undefined_value), &equal); |
| 2587 __ SetAccumulator(__ BooleanConstant(false)); | 2377 __ SetAccumulator(__ BooleanConstant(false)); |
| 2588 __ Goto(&end); | 2378 __ Goto(&end); |
| 2589 | 2379 |
| 2590 __ Bind(&equal); | 2380 __ Bind(&equal); |
| 2591 { | 2381 { |
| 2592 __ SetAccumulator(__ BooleanConstant(true)); | 2382 __ SetAccumulator(__ BooleanConstant(true)); |
| 2593 __ Goto(&end); | 2383 __ Goto(&end); |
| 2594 } | 2384 } |
| 2595 | 2385 |
| 2596 __ Bind(&end); | 2386 __ Bind(&end); |
| 2597 __ Dispatch(); | 2387 __ Dispatch(); |
| 2598 } | 2388 } |
| 2599 | 2389 |
| 2600 // TestTypeOf <literal_flag> | 2390 // TestTypeOf <literal_flag> |
| 2601 // | 2391 // |
| 2602 // Tests if the object in the <accumulator> is typeof the literal represented | 2392 // Tests if the object in the <accumulator> is typeof the literal represented |
| 2603 // by |literal_flag|. | 2393 // by |literal_flag|. |
| 2604 void Interpreter::DoTestTypeOf(InterpreterAssembler* assembler) { | 2394 void InterpreterGenerator::DoTestTypeOf(InterpreterAssembler* assembler) { |
| 2605 Node* object = __ GetAccumulator(); | 2395 Node* object = __ GetAccumulator(); |
| 2606 Node* literal_flag = __ BytecodeOperandFlag(0); | 2396 Node* literal_flag = __ BytecodeOperandFlag(0); |
| 2607 | 2397 |
| 2608 #define MAKE_LABEL(name, lower_case) Label if_##lower_case(assembler); | 2398 #define MAKE_LABEL(name, lower_case) Label if_##lower_case(assembler); |
| 2609 TYPEOF_LITERAL_LIST(MAKE_LABEL) | 2399 TYPEOF_LITERAL_LIST(MAKE_LABEL) |
| 2610 #undef MAKE_LABEL | 2400 #undef MAKE_LABEL |
| 2611 | 2401 |
| 2612 #define LABEL_POINTER(name, lower_case) &if_##lower_case, | 2402 #define LABEL_POINTER(name, lower_case) &if_##lower_case, |
| 2613 Label* labels[] = {TYPEOF_LITERAL_LIST(LABEL_POINTER)}; | 2403 Label* labels[] = {TYPEOF_LITERAL_LIST(LABEL_POINTER)}; |
| 2614 #undef LABEL_POINTER | 2404 #undef LABEL_POINTER |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2713 __ SetAccumulator(__ BooleanConstant(true)); | 2503 __ SetAccumulator(__ BooleanConstant(true)); |
| 2714 __ Goto(&end); | 2504 __ Goto(&end); |
| 2715 } | 2505 } |
| 2716 __ Bind(&end); | 2506 __ Bind(&end); |
| 2717 __ Dispatch(); | 2507 __ Dispatch(); |
| 2718 } | 2508 } |
| 2719 | 2509 |
| 2720 // Jump <imm> | 2510 // Jump <imm> |
| 2721 // | 2511 // |
| 2722 // Jump by number of bytes represented by the immediate operand |imm|. | 2512 // Jump by number of bytes represented by the immediate operand |imm|. |
| 2723 void Interpreter::DoJump(InterpreterAssembler* assembler) { | 2513 void InterpreterGenerator::DoJump(InterpreterAssembler* assembler) { |
| 2724 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2514 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2725 __ Jump(relative_jump); | 2515 __ Jump(relative_jump); |
| 2726 } | 2516 } |
| 2727 | 2517 |
| 2728 // JumpConstant <idx> | 2518 // JumpConstant <idx> |
| 2729 // | 2519 // |
| 2730 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool. | 2520 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool. |
| 2731 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) { | 2521 void InterpreterGenerator::DoJumpConstant(InterpreterAssembler* assembler) { |
| 2732 Node* index = __ BytecodeOperandIdx(0); | 2522 Node* index = __ BytecodeOperandIdx(0); |
| 2733 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2523 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2734 __ Jump(relative_jump); | 2524 __ Jump(relative_jump); |
| 2735 } | 2525 } |
| 2736 | 2526 |
| 2737 // JumpIfTrue <imm> | 2527 // JumpIfTrue <imm> |
| 2738 // | 2528 // |
| 2739 // Jump by number of bytes represented by an immediate operand if the | 2529 // Jump by number of bytes represented by an immediate operand if the |
| 2740 // accumulator contains true. This only works for boolean inputs, and | 2530 // accumulator contains true. This only works for boolean inputs, and |
| 2741 // will misbehave if passed arbitrary input values. | 2531 // will misbehave if passed arbitrary input values. |
| 2742 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) { | 2532 void InterpreterGenerator::DoJumpIfTrue(InterpreterAssembler* assembler) { |
| 2743 Node* accumulator = __ GetAccumulator(); | 2533 Node* accumulator = __ GetAccumulator(); |
| 2744 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2534 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2745 Node* true_value = __ BooleanConstant(true); | 2535 Node* true_value = __ BooleanConstant(true); |
| 2746 CSA_ASSERT(assembler, assembler->TaggedIsNotSmi(accumulator)); | 2536 CSA_ASSERT(assembler, assembler->TaggedIsNotSmi(accumulator)); |
| 2747 CSA_ASSERT(assembler, assembler->IsBoolean(accumulator)); | 2537 CSA_ASSERT(assembler, assembler->IsBoolean(accumulator)); |
| 2748 __ JumpIfWordEqual(accumulator, true_value, relative_jump); | 2538 __ JumpIfWordEqual(accumulator, true_value, relative_jump); |
| 2749 } | 2539 } |
| 2750 | 2540 |
| 2751 // JumpIfTrueConstant <idx> | 2541 // JumpIfTrueConstant <idx> |
| 2752 // | 2542 // |
| 2753 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool | 2543 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 2754 // if the accumulator contains true. This only works for boolean inputs, and | 2544 // if the accumulator contains true. This only works for boolean inputs, and |
| 2755 // will misbehave if passed arbitrary input values. | 2545 // will misbehave if passed arbitrary input values. |
| 2756 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) { | 2546 void InterpreterGenerator::DoJumpIfTrueConstant( |
| 2547 InterpreterAssembler* assembler) { |
| 2757 Node* accumulator = __ GetAccumulator(); | 2548 Node* accumulator = __ GetAccumulator(); |
| 2758 Node* index = __ BytecodeOperandIdx(0); | 2549 Node* index = __ BytecodeOperandIdx(0); |
| 2759 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2550 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2760 Node* true_value = __ BooleanConstant(true); | 2551 Node* true_value = __ BooleanConstant(true); |
| 2761 CSA_ASSERT(assembler, assembler->TaggedIsNotSmi(accumulator)); | 2552 CSA_ASSERT(assembler, assembler->TaggedIsNotSmi(accumulator)); |
| 2762 CSA_ASSERT(assembler, assembler->IsBoolean(accumulator)); | 2553 CSA_ASSERT(assembler, assembler->IsBoolean(accumulator)); |
| 2763 __ JumpIfWordEqual(accumulator, true_value, relative_jump); | 2554 __ JumpIfWordEqual(accumulator, true_value, relative_jump); |
| 2764 } | 2555 } |
| 2765 | 2556 |
| 2766 // JumpIfFalse <imm> | 2557 // JumpIfFalse <imm> |
| 2767 // | 2558 // |
| 2768 // Jump by number of bytes represented by an immediate operand if the | 2559 // Jump by number of bytes represented by an immediate operand if the |
| 2769 // accumulator contains false. This only works for boolean inputs, and | 2560 // accumulator contains false. This only works for boolean inputs, and |
| 2770 // will misbehave if passed arbitrary input values. | 2561 // will misbehave if passed arbitrary input values. |
| 2771 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) { | 2562 void InterpreterGenerator::DoJumpIfFalse(InterpreterAssembler* assembler) { |
| 2772 Node* accumulator = __ GetAccumulator(); | 2563 Node* accumulator = __ GetAccumulator(); |
| 2773 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2564 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2774 Node* false_value = __ BooleanConstant(false); | 2565 Node* false_value = __ BooleanConstant(false); |
| 2775 CSA_ASSERT(assembler, assembler->TaggedIsNotSmi(accumulator)); | 2566 CSA_ASSERT(assembler, assembler->TaggedIsNotSmi(accumulator)); |
| 2776 CSA_ASSERT(assembler, assembler->IsBoolean(accumulator)); | 2567 CSA_ASSERT(assembler, assembler->IsBoolean(accumulator)); |
| 2777 __ JumpIfWordEqual(accumulator, false_value, relative_jump); | 2568 __ JumpIfWordEqual(accumulator, false_value, relative_jump); |
| 2778 } | 2569 } |
| 2779 | 2570 |
| 2780 // JumpIfFalseConstant <idx> | 2571 // JumpIfFalseConstant <idx> |
| 2781 // | 2572 // |
| 2782 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool | 2573 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 2783 // if the accumulator contains false. This only works for boolean inputs, and | 2574 // if the accumulator contains false. This only works for boolean inputs, and |
| 2784 // will misbehave if passed arbitrary input values. | 2575 // will misbehave if passed arbitrary input values. |
| 2785 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) { | 2576 void InterpreterGenerator::DoJumpIfFalseConstant( |
| 2577 InterpreterAssembler* assembler) { |
| 2786 Node* accumulator = __ GetAccumulator(); | 2578 Node* accumulator = __ GetAccumulator(); |
| 2787 Node* index = __ BytecodeOperandIdx(0); | 2579 Node* index = __ BytecodeOperandIdx(0); |
| 2788 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2580 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2789 Node* false_value = __ BooleanConstant(false); | 2581 Node* false_value = __ BooleanConstant(false); |
| 2790 CSA_ASSERT(assembler, assembler->TaggedIsNotSmi(accumulator)); | 2582 CSA_ASSERT(assembler, assembler->TaggedIsNotSmi(accumulator)); |
| 2791 CSA_ASSERT(assembler, assembler->IsBoolean(accumulator)); | 2583 CSA_ASSERT(assembler, assembler->IsBoolean(accumulator)); |
| 2792 __ JumpIfWordEqual(accumulator, false_value, relative_jump); | 2584 __ JumpIfWordEqual(accumulator, false_value, relative_jump); |
| 2793 } | 2585 } |
| 2794 | 2586 |
| 2795 // JumpIfToBooleanTrue <imm> | 2587 // JumpIfToBooleanTrue <imm> |
| 2796 // | 2588 // |
| 2797 // Jump by number of bytes represented by an immediate operand if the object | 2589 // Jump by number of bytes represented by an immediate operand if the object |
| 2798 // referenced by the accumulator is true when the object is cast to boolean. | 2590 // referenced by the accumulator is true when the object is cast to boolean. |
| 2799 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) { | 2591 void InterpreterGenerator::DoJumpIfToBooleanTrue( |
| 2592 InterpreterAssembler* assembler) { |
| 2800 Node* value = __ GetAccumulator(); | 2593 Node* value = __ GetAccumulator(); |
| 2801 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2594 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2802 Label if_true(assembler), if_false(assembler); | 2595 Label if_true(assembler), if_false(assembler); |
| 2803 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); | 2596 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); |
| 2804 __ Bind(&if_true); | 2597 __ Bind(&if_true); |
| 2805 __ Jump(relative_jump); | 2598 __ Jump(relative_jump); |
| 2806 __ Bind(&if_false); | 2599 __ Bind(&if_false); |
| 2807 __ Dispatch(); | 2600 __ Dispatch(); |
| 2808 } | 2601 } |
| 2809 | 2602 |
| 2810 // JumpIfToBooleanTrueConstant <idx> | 2603 // JumpIfToBooleanTrueConstant <idx> |
| 2811 // | 2604 // |
| 2812 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool | 2605 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 2813 // if the object referenced by the accumulator is true when the object is cast | 2606 // if the object referenced by the accumulator is true when the object is cast |
| 2814 // to boolean. | 2607 // to boolean. |
| 2815 void Interpreter::DoJumpIfToBooleanTrueConstant( | 2608 void InterpreterGenerator::DoJumpIfToBooleanTrueConstant( |
| 2816 InterpreterAssembler* assembler) { | 2609 InterpreterAssembler* assembler) { |
| 2817 Node* value = __ GetAccumulator(); | 2610 Node* value = __ GetAccumulator(); |
| 2818 Node* index = __ BytecodeOperandIdx(0); | 2611 Node* index = __ BytecodeOperandIdx(0); |
| 2819 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2612 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2820 Label if_true(assembler), if_false(assembler); | 2613 Label if_true(assembler), if_false(assembler); |
| 2821 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); | 2614 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); |
| 2822 __ Bind(&if_true); | 2615 __ Bind(&if_true); |
| 2823 __ Jump(relative_jump); | 2616 __ Jump(relative_jump); |
| 2824 __ Bind(&if_false); | 2617 __ Bind(&if_false); |
| 2825 __ Dispatch(); | 2618 __ Dispatch(); |
| 2826 } | 2619 } |
| 2827 | 2620 |
| 2828 // JumpIfToBooleanFalse <imm> | 2621 // JumpIfToBooleanFalse <imm> |
| 2829 // | 2622 // |
| 2830 // Jump by number of bytes represented by an immediate operand if the object | 2623 // Jump by number of bytes represented by an immediate operand if the object |
| 2831 // referenced by the accumulator is false when the object is cast to boolean. | 2624 // referenced by the accumulator is false when the object is cast to boolean. |
| 2832 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) { | 2625 void InterpreterGenerator::DoJumpIfToBooleanFalse( |
| 2626 InterpreterAssembler* assembler) { |
| 2833 Node* value = __ GetAccumulator(); | 2627 Node* value = __ GetAccumulator(); |
| 2834 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2628 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2835 Label if_true(assembler), if_false(assembler); | 2629 Label if_true(assembler), if_false(assembler); |
| 2836 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); | 2630 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); |
| 2837 __ Bind(&if_true); | 2631 __ Bind(&if_true); |
| 2838 __ Dispatch(); | 2632 __ Dispatch(); |
| 2839 __ Bind(&if_false); | 2633 __ Bind(&if_false); |
| 2840 __ Jump(relative_jump); | 2634 __ Jump(relative_jump); |
| 2841 } | 2635 } |
| 2842 | 2636 |
| 2843 // JumpIfToBooleanFalseConstant <idx> | 2637 // JumpIfToBooleanFalseConstant <idx> |
| 2844 // | 2638 // |
| 2845 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool | 2639 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 2846 // if the object referenced by the accumulator is false when the object is cast | 2640 // if the object referenced by the accumulator is false when the object is cast |
| 2847 // to boolean. | 2641 // to boolean. |
| 2848 void Interpreter::DoJumpIfToBooleanFalseConstant( | 2642 void InterpreterGenerator::DoJumpIfToBooleanFalseConstant( |
| 2849 InterpreterAssembler* assembler) { | 2643 InterpreterAssembler* assembler) { |
| 2850 Node* value = __ GetAccumulator(); | 2644 Node* value = __ GetAccumulator(); |
| 2851 Node* index = __ BytecodeOperandIdx(0); | 2645 Node* index = __ BytecodeOperandIdx(0); |
| 2852 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2646 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2853 Label if_true(assembler), if_false(assembler); | 2647 Label if_true(assembler), if_false(assembler); |
| 2854 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); | 2648 __ BranchIfToBooleanIsTrue(value, &if_true, &if_false); |
| 2855 __ Bind(&if_true); | 2649 __ Bind(&if_true); |
| 2856 __ Dispatch(); | 2650 __ Dispatch(); |
| 2857 __ Bind(&if_false); | 2651 __ Bind(&if_false); |
| 2858 __ Jump(relative_jump); | 2652 __ Jump(relative_jump); |
| 2859 } | 2653 } |
| 2860 | 2654 |
| 2861 // JumpIfNull <imm> | 2655 // JumpIfNull <imm> |
| 2862 // | 2656 // |
| 2863 // Jump by number of bytes represented by an immediate operand if the object | 2657 // Jump by number of bytes represented by an immediate operand if the object |
| 2864 // referenced by the accumulator is the null constant. | 2658 // referenced by the accumulator is the null constant. |
| 2865 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) { | 2659 void InterpreterGenerator::DoJumpIfNull(InterpreterAssembler* assembler) { |
| 2866 Node* accumulator = __ GetAccumulator(); | 2660 Node* accumulator = __ GetAccumulator(); |
| 2867 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 2661 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 2868 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2662 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2869 __ JumpIfWordEqual(accumulator, null_value, relative_jump); | 2663 __ JumpIfWordEqual(accumulator, null_value, relative_jump); |
| 2870 } | 2664 } |
| 2871 | 2665 |
| 2872 // JumpIfNullConstant <idx> | 2666 // JumpIfNullConstant <idx> |
| 2873 // | 2667 // |
| 2874 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool | 2668 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 2875 // if the object referenced by the accumulator is the null constant. | 2669 // if the object referenced by the accumulator is the null constant. |
| 2876 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) { | 2670 void InterpreterGenerator::DoJumpIfNullConstant( |
| 2671 InterpreterAssembler* assembler) { |
| 2877 Node* accumulator = __ GetAccumulator(); | 2672 Node* accumulator = __ GetAccumulator(); |
| 2878 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); | 2673 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); |
| 2879 Node* index = __ BytecodeOperandIdx(0); | 2674 Node* index = __ BytecodeOperandIdx(0); |
| 2880 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2675 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2881 __ JumpIfWordEqual(accumulator, null_value, relative_jump); | 2676 __ JumpIfWordEqual(accumulator, null_value, relative_jump); |
| 2882 } | 2677 } |
| 2883 | 2678 |
| 2884 // JumpIfUndefined <imm> | 2679 // JumpIfUndefined <imm> |
| 2885 // | 2680 // |
| 2886 // Jump by number of bytes represented by an immediate operand if the object | 2681 // Jump by number of bytes represented by an immediate operand if the object |
| 2887 // referenced by the accumulator is the undefined constant. | 2682 // referenced by the accumulator is the undefined constant. |
| 2888 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) { | 2683 void InterpreterGenerator::DoJumpIfUndefined(InterpreterAssembler* assembler) { |
| 2889 Node* accumulator = __ GetAccumulator(); | 2684 Node* accumulator = __ GetAccumulator(); |
| 2890 Node* undefined_value = | 2685 Node* undefined_value = |
| 2891 __ HeapConstant(isolate_->factory()->undefined_value()); | 2686 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 2892 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2687 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2893 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); | 2688 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); |
| 2894 } | 2689 } |
| 2895 | 2690 |
| 2896 // JumpIfUndefinedConstant <idx> | 2691 // JumpIfUndefinedConstant <idx> |
| 2897 // | 2692 // |
| 2898 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool | 2693 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 2899 // if the object referenced by the accumulator is the undefined constant. | 2694 // if the object referenced by the accumulator is the undefined constant. |
| 2900 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) { | 2695 void InterpreterGenerator::DoJumpIfUndefinedConstant( |
| 2696 InterpreterAssembler* assembler) { |
| 2901 Node* accumulator = __ GetAccumulator(); | 2697 Node* accumulator = __ GetAccumulator(); |
| 2902 Node* undefined_value = | 2698 Node* undefined_value = |
| 2903 __ HeapConstant(isolate_->factory()->undefined_value()); | 2699 __ HeapConstant(isolate_->factory()->undefined_value()); |
| 2904 Node* index = __ BytecodeOperandIdx(0); | 2700 Node* index = __ BytecodeOperandIdx(0); |
| 2905 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2701 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2906 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); | 2702 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); |
| 2907 } | 2703 } |
| 2908 | 2704 |
| 2909 // JumpIfJSReceiver <imm> | 2705 // JumpIfJSReceiver <imm> |
| 2910 // | 2706 // |
| 2911 // Jump by number of bytes represented by an immediate operand if the object | 2707 // Jump by number of bytes represented by an immediate operand if the object |
| 2912 // referenced by the accumulator is a JSReceiver. | 2708 // referenced by the accumulator is a JSReceiver. |
| 2913 void Interpreter::DoJumpIfJSReceiver(InterpreterAssembler* assembler) { | 2709 void InterpreterGenerator::DoJumpIfJSReceiver(InterpreterAssembler* assembler) { |
| 2914 Node* accumulator = __ GetAccumulator(); | 2710 Node* accumulator = __ GetAccumulator(); |
| 2915 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2711 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2916 | 2712 |
| 2917 Label if_object(assembler), if_notobject(assembler, Label::kDeferred), | 2713 Label if_object(assembler), if_notobject(assembler, Label::kDeferred), |
| 2918 if_notsmi(assembler); | 2714 if_notsmi(assembler); |
| 2919 __ Branch(__ TaggedIsSmi(accumulator), &if_notobject, &if_notsmi); | 2715 __ Branch(__ TaggedIsSmi(accumulator), &if_notobject, &if_notsmi); |
| 2920 | 2716 |
| 2921 __ Bind(&if_notsmi); | 2717 __ Bind(&if_notsmi); |
| 2922 __ Branch(__ IsJSReceiver(accumulator), &if_object, &if_notobject); | 2718 __ Branch(__ IsJSReceiver(accumulator), &if_object, &if_notobject); |
| 2923 __ Bind(&if_object); | 2719 __ Bind(&if_object); |
| 2924 __ Jump(relative_jump); | 2720 __ Jump(relative_jump); |
| 2925 | 2721 |
| 2926 __ Bind(&if_notobject); | 2722 __ Bind(&if_notobject); |
| 2927 __ Dispatch(); | 2723 __ Dispatch(); |
| 2928 } | 2724 } |
| 2929 | 2725 |
| 2930 // JumpIfJSReceiverConstant <idx> | 2726 // JumpIfJSReceiverConstant <idx> |
| 2931 // | 2727 // |
| 2932 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool if | 2728 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool if |
| 2933 // the object referenced by the accumulator is a JSReceiver. | 2729 // the object referenced by the accumulator is a JSReceiver. |
| 2934 void Interpreter::DoJumpIfJSReceiverConstant(InterpreterAssembler* assembler) { | 2730 void InterpreterGenerator::DoJumpIfJSReceiverConstant( |
| 2731 InterpreterAssembler* assembler) { |
| 2935 Node* accumulator = __ GetAccumulator(); | 2732 Node* accumulator = __ GetAccumulator(); |
| 2936 Node* index = __ BytecodeOperandIdx(0); | 2733 Node* index = __ BytecodeOperandIdx(0); |
| 2937 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2734 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2938 | 2735 |
| 2939 Label if_object(assembler), if_notobject(assembler), if_notsmi(assembler); | 2736 Label if_object(assembler), if_notobject(assembler), if_notsmi(assembler); |
| 2940 __ Branch(__ TaggedIsSmi(accumulator), &if_notobject, &if_notsmi); | 2737 __ Branch(__ TaggedIsSmi(accumulator), &if_notobject, &if_notsmi); |
| 2941 | 2738 |
| 2942 __ Bind(&if_notsmi); | 2739 __ Bind(&if_notsmi); |
| 2943 __ Branch(__ IsJSReceiver(accumulator), &if_object, &if_notobject); | 2740 __ Branch(__ IsJSReceiver(accumulator), &if_object, &if_notobject); |
| 2944 | 2741 |
| 2945 __ Bind(&if_object); | 2742 __ Bind(&if_object); |
| 2946 __ Jump(relative_jump); | 2743 __ Jump(relative_jump); |
| 2947 | 2744 |
| 2948 __ Bind(&if_notobject); | 2745 __ Bind(&if_notobject); |
| 2949 __ Dispatch(); | 2746 __ Dispatch(); |
| 2950 } | 2747 } |
| 2951 | 2748 |
| 2952 // JumpIfNotHole <imm> | 2749 // JumpIfNotHole <imm> |
| 2953 // | 2750 // |
| 2954 // Jump by number of bytes represented by an immediate operand if the object | 2751 // Jump by number of bytes represented by an immediate operand if the object |
| 2955 // referenced by the accumulator is the hole. | 2752 // referenced by the accumulator is the hole. |
| 2956 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) { | 2753 void InterpreterGenerator::DoJumpIfNotHole(InterpreterAssembler* assembler) { |
| 2957 Node* accumulator = __ GetAccumulator(); | 2754 Node* accumulator = __ GetAccumulator(); |
| 2958 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 2755 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 2959 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2756 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2960 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); | 2757 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); |
| 2961 } | 2758 } |
| 2962 | 2759 |
| 2963 // JumpIfNotHoleConstant <idx> | 2760 // JumpIfNotHoleConstant <idx> |
| 2964 // | 2761 // |
| 2965 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool | 2762 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool |
| 2966 // if the object referenced by the accumulator is the hole constant. | 2763 // if the object referenced by the accumulator is the hole constant. |
| 2967 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) { | 2764 void InterpreterGenerator::DoJumpIfNotHoleConstant( |
| 2765 InterpreterAssembler* assembler) { |
| 2968 Node* accumulator = __ GetAccumulator(); | 2766 Node* accumulator = __ GetAccumulator(); |
| 2969 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); | 2767 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); |
| 2970 Node* index = __ BytecodeOperandIdx(0); | 2768 Node* index = __ BytecodeOperandIdx(0); |
| 2971 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); | 2769 Node* relative_jump = __ LoadAndUntagConstantPoolEntry(index); |
| 2972 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); | 2770 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); |
| 2973 } | 2771 } |
| 2974 | 2772 |
| 2975 // JumpLoop <imm> <loop_depth> | 2773 // JumpLoop <imm> <loop_depth> |
| 2976 // | 2774 // |
| 2977 // Jump by number of bytes represented by the immediate operand |imm|. Also | 2775 // Jump by number of bytes represented by the immediate operand |imm|. Also |
| 2978 // performs a loop nesting check and potentially triggers OSR in case the | 2776 // performs a loop nesting check and potentially triggers OSR in case the |
| 2979 // current OSR level matches (or exceeds) the specified |loop_depth|. | 2777 // current OSR level matches (or exceeds) the specified |loop_depth|. |
| 2980 void Interpreter::DoJumpLoop(InterpreterAssembler* assembler) { | 2778 void InterpreterGenerator::DoJumpLoop(InterpreterAssembler* assembler) { |
| 2981 Node* relative_jump = __ BytecodeOperandUImmWord(0); | 2779 Node* relative_jump = __ BytecodeOperandUImmWord(0); |
| 2982 Node* loop_depth = __ BytecodeOperandImm(1); | 2780 Node* loop_depth = __ BytecodeOperandImm(1); |
| 2983 Node* osr_level = __ LoadOSRNestingLevel(); | 2781 Node* osr_level = __ LoadOSRNestingLevel(); |
| 2984 | 2782 |
| 2985 // Check if OSR points at the given {loop_depth} are armed by comparing it to | 2783 // Check if OSR points at the given {loop_depth} are armed by comparing it to |
| 2986 // the current {osr_level} loaded from the header of the BytecodeArray. | 2784 // the current {osr_level} loaded from the header of the BytecodeArray. |
| 2987 Label ok(assembler), osr_armed(assembler, Label::kDeferred); | 2785 Label ok(assembler), osr_armed(assembler, Label::kDeferred); |
| 2988 Node* condition = __ Int32GreaterThanOrEqual(loop_depth, osr_level); | 2786 Node* condition = __ Int32GreaterThanOrEqual(loop_depth, osr_level); |
| 2989 __ Branch(condition, &ok, &osr_armed); | 2787 __ Branch(condition, &ok, &osr_armed); |
| 2990 | 2788 |
| 2991 __ Bind(&ok); | 2789 __ Bind(&ok); |
| 2992 __ JumpBackward(relative_jump); | 2790 __ JumpBackward(relative_jump); |
| 2993 | 2791 |
| 2994 __ Bind(&osr_armed); | 2792 __ Bind(&osr_armed); |
| 2995 { | 2793 { |
| 2996 Callable callable = CodeFactory::InterpreterOnStackReplacement(isolate_); | 2794 Callable callable = CodeFactory::InterpreterOnStackReplacement(isolate_); |
| 2997 Node* target = __ HeapConstant(callable.code()); | 2795 Node* target = __ HeapConstant(callable.code()); |
| 2998 Node* context = __ GetContext(); | 2796 Node* context = __ GetContext(); |
| 2999 __ CallStub(callable.descriptor(), target, context); | 2797 __ CallStub(callable.descriptor(), target, context); |
| 3000 __ JumpBackward(relative_jump); | 2798 __ JumpBackward(relative_jump); |
| 3001 } | 2799 } |
| 3002 } | 2800 } |
| 3003 | 2801 |
| 3004 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> | 2802 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> |
| 3005 // | 2803 // |
| 3006 // Creates a regular expression literal for literal index <literal_idx> with | 2804 // Creates a regular expression literal for literal index <literal_idx> with |
| 3007 // <flags> and the pattern in <pattern_idx>. | 2805 // <flags> and the pattern in <pattern_idx>. |
| 3008 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { | 2806 void InterpreterGenerator::DoCreateRegExpLiteral( |
| 2807 InterpreterAssembler* assembler) { |
| 3009 Node* index = __ BytecodeOperandIdx(0); | 2808 Node* index = __ BytecodeOperandIdx(0); |
| 3010 Node* pattern = __ LoadConstantPoolEntry(index); | 2809 Node* pattern = __ LoadConstantPoolEntry(index); |
| 3011 Node* literal_index = __ BytecodeOperandIdxSmi(1); | 2810 Node* literal_index = __ BytecodeOperandIdxSmi(1); |
| 3012 Node* flags = __ SmiFromWord32(__ BytecodeOperandFlag(2)); | 2811 Node* flags = __ SmiFromWord32(__ BytecodeOperandFlag(2)); |
| 3013 Node* closure = __ LoadRegister(Register::function_closure()); | 2812 Node* closure = __ LoadRegister(Register::function_closure()); |
| 3014 Node* context = __ GetContext(); | 2813 Node* context = __ GetContext(); |
| 3015 ConstructorBuiltinsAssembler constructor_assembler(assembler->state()); | 2814 ConstructorBuiltinsAssembler constructor_assembler(assembler->state()); |
| 3016 Node* result = constructor_assembler.EmitFastCloneRegExp( | 2815 Node* result = constructor_assembler.EmitFastCloneRegExp( |
| 3017 closure, literal_index, pattern, flags, context); | 2816 closure, literal_index, pattern, flags, context); |
| 3018 __ SetAccumulator(result); | 2817 __ SetAccumulator(result); |
| 3019 __ Dispatch(); | 2818 __ Dispatch(); |
| 3020 } | 2819 } |
| 3021 | 2820 |
| 3022 // CreateArrayLiteral <element_idx> <literal_idx> <flags> | 2821 // CreateArrayLiteral <element_idx> <literal_idx> <flags> |
| 3023 // | 2822 // |
| 3024 // Creates an array literal for literal index <literal_idx> with | 2823 // Creates an array literal for literal index <literal_idx> with |
| 3025 // CreateArrayLiteral flags <flags> and constant elements in <element_idx>. | 2824 // CreateArrayLiteral flags <flags> and constant elements in <element_idx>. |
| 3026 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { | 2825 void InterpreterGenerator::DoCreateArrayLiteral( |
| 2826 InterpreterAssembler* assembler) { |
| 3027 Node* literal_index = __ BytecodeOperandIdxSmi(1); | 2827 Node* literal_index = __ BytecodeOperandIdxSmi(1); |
| 3028 Node* closure = __ LoadRegister(Register::function_closure()); | 2828 Node* closure = __ LoadRegister(Register::function_closure()); |
| 3029 Node* context = __ GetContext(); | 2829 Node* context = __ GetContext(); |
| 3030 Node* bytecode_flags = __ BytecodeOperandFlag(2); | 2830 Node* bytecode_flags = __ BytecodeOperandFlag(2); |
| 3031 | 2831 |
| 3032 Label fast_shallow_clone(assembler), | 2832 Label fast_shallow_clone(assembler), |
| 3033 call_runtime(assembler, Label::kDeferred); | 2833 call_runtime(assembler, Label::kDeferred); |
| 3034 __ Branch(__ IsSetWord32<CreateArrayLiteralFlags::FastShallowCloneBit>( | 2834 __ Branch(__ IsSetWord32<CreateArrayLiteralFlags::FastShallowCloneBit>( |
| 3035 bytecode_flags), | 2835 bytecode_flags), |
| 3036 &fast_shallow_clone, &call_runtime); | 2836 &fast_shallow_clone, &call_runtime); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3057 literal_index, constant_elements, flags); | 2857 literal_index, constant_elements, flags); |
| 3058 __ SetAccumulator(result); | 2858 __ SetAccumulator(result); |
| 3059 __ Dispatch(); | 2859 __ Dispatch(); |
| 3060 } | 2860 } |
| 3061 } | 2861 } |
| 3062 | 2862 |
| 3063 // CreateObjectLiteral <element_idx> <literal_idx> <flags> | 2863 // CreateObjectLiteral <element_idx> <literal_idx> <flags> |
| 3064 // | 2864 // |
| 3065 // Creates an object literal for literal index <literal_idx> with | 2865 // Creates an object literal for literal index <literal_idx> with |
| 3066 // CreateObjectLiteralFlags <flags> and constant elements in <element_idx>. | 2866 // CreateObjectLiteralFlags <flags> and constant elements in <element_idx>. |
| 3067 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { | 2867 void InterpreterGenerator::DoCreateObjectLiteral( |
| 2868 InterpreterAssembler* assembler) { |
| 3068 Node* literal_index = __ BytecodeOperandIdxSmi(1); | 2869 Node* literal_index = __ BytecodeOperandIdxSmi(1); |
| 3069 Node* bytecode_flags = __ BytecodeOperandFlag(2); | 2870 Node* bytecode_flags = __ BytecodeOperandFlag(2); |
| 3070 Node* closure = __ LoadRegister(Register::function_closure()); | 2871 Node* closure = __ LoadRegister(Register::function_closure()); |
| 3071 | 2872 |
| 3072 // Check if we can do a fast clone or have to call the runtime. | 2873 // Check if we can do a fast clone or have to call the runtime. |
| 3073 Label if_fast_clone(assembler), | 2874 Label if_fast_clone(assembler), |
| 3074 if_not_fast_clone(assembler, Label::kDeferred); | 2875 if_not_fast_clone(assembler, Label::kDeferred); |
| 3075 Node* fast_clone_properties_count = __ DecodeWordFromWord32< | 2876 Node* fast_clone_properties_count = __ DecodeWordFromWord32< |
| 3076 CreateObjectLiteralFlags::FastClonePropertiesCountBits>(bytecode_flags); | 2877 CreateObjectLiteralFlags::FastClonePropertiesCountBits>(bytecode_flags); |
| 3077 __ Branch(__ WordNotEqual(fast_clone_properties_count, __ IntPtrConstant(0)), | 2878 __ Branch(__ WordNotEqual(fast_clone_properties_count, __ IntPtrConstant(0)), |
| (...skipping 28 matching lines...) Expand all Loading... |
| 3106 __ StoreRegister(result, __ BytecodeOperandReg(3)); | 2907 __ StoreRegister(result, __ BytecodeOperandReg(3)); |
| 3107 // TODO(klaasb) build a single dispatch once the call is inlined | 2908 // TODO(klaasb) build a single dispatch once the call is inlined |
| 3108 __ Dispatch(); | 2909 __ Dispatch(); |
| 3109 } | 2910 } |
| 3110 } | 2911 } |
| 3111 | 2912 |
| 3112 // CreateClosure <index> <slot> <tenured> | 2913 // CreateClosure <index> <slot> <tenured> |
| 3113 // | 2914 // |
| 3114 // Creates a new closure for SharedFunctionInfo at position |index| in the | 2915 // Creates a new closure for SharedFunctionInfo at position |index| in the |
| 3115 // constant pool and with the PretenureFlag <tenured>. | 2916 // constant pool and with the PretenureFlag <tenured>. |
| 3116 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { | 2917 void InterpreterGenerator::DoCreateClosure(InterpreterAssembler* assembler) { |
| 3117 Node* index = __ BytecodeOperandIdx(0); | 2918 Node* index = __ BytecodeOperandIdx(0); |
| 3118 Node* shared = __ LoadConstantPoolEntry(index); | 2919 Node* shared = __ LoadConstantPoolEntry(index); |
| 3119 Node* flags = __ BytecodeOperandFlag(2); | 2920 Node* flags = __ BytecodeOperandFlag(2); |
| 3120 Node* context = __ GetContext(); | 2921 Node* context = __ GetContext(); |
| 3121 | 2922 |
| 3122 Label call_runtime(assembler, Label::kDeferred); | 2923 Label call_runtime(assembler, Label::kDeferred); |
| 3123 __ GotoIfNot(__ IsSetWord32<CreateClosureFlags::FastNewClosureBit>(flags), | 2924 __ GotoIfNot(__ IsSetWord32<CreateClosureFlags::FastNewClosureBit>(flags), |
| 3124 &call_runtime); | 2925 &call_runtime); |
| 3125 ConstructorBuiltinsAssembler constructor_assembler(assembler->state()); | 2926 ConstructorBuiltinsAssembler constructor_assembler(assembler->state()); |
| 3126 Node* vector_index = __ BytecodeOperandIdx(1); | 2927 Node* vector_index = __ BytecodeOperandIdx(1); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 3143 feedback_vector, vector_index, tenured); | 2944 feedback_vector, vector_index, tenured); |
| 3144 __ SetAccumulator(result); | 2945 __ SetAccumulator(result); |
| 3145 __ Dispatch(); | 2946 __ Dispatch(); |
| 3146 } | 2947 } |
| 3147 } | 2948 } |
| 3148 | 2949 |
| 3149 // CreateBlockContext <index> | 2950 // CreateBlockContext <index> |
| 3150 // | 2951 // |
| 3151 // Creates a new block context with the scope info constant at |index| and the | 2952 // Creates a new block context with the scope info constant at |index| and the |
| 3152 // closure in the accumulator. | 2953 // closure in the accumulator. |
| 3153 void Interpreter::DoCreateBlockContext(InterpreterAssembler* assembler) { | 2954 void InterpreterGenerator::DoCreateBlockContext( |
| 2955 InterpreterAssembler* assembler) { |
| 3154 Node* index = __ BytecodeOperandIdx(0); | 2956 Node* index = __ BytecodeOperandIdx(0); |
| 3155 Node* scope_info = __ LoadConstantPoolEntry(index); | 2957 Node* scope_info = __ LoadConstantPoolEntry(index); |
| 3156 Node* closure = __ GetAccumulator(); | 2958 Node* closure = __ GetAccumulator(); |
| 3157 Node* context = __ GetContext(); | 2959 Node* context = __ GetContext(); |
| 3158 __ SetAccumulator( | 2960 __ SetAccumulator( |
| 3159 __ CallRuntime(Runtime::kPushBlockContext, context, scope_info, closure)); | 2961 __ CallRuntime(Runtime::kPushBlockContext, context, scope_info, closure)); |
| 3160 __ Dispatch(); | 2962 __ Dispatch(); |
| 3161 } | 2963 } |
| 3162 | 2964 |
| 3163 // CreateCatchContext <exception> <name_idx> <scope_info_idx> | 2965 // CreateCatchContext <exception> <name_idx> <scope_info_idx> |
| 3164 // | 2966 // |
| 3165 // Creates a new context for a catch block with the |exception| in a register, | 2967 // Creates a new context for a catch block with the |exception| in a register, |
| 3166 // the variable name at |name_idx|, the ScopeInfo at |scope_info_idx|, and the | 2968 // the variable name at |name_idx|, the ScopeInfo at |scope_info_idx|, and the |
| 3167 // closure in the accumulator. | 2969 // closure in the accumulator. |
| 3168 void Interpreter::DoCreateCatchContext(InterpreterAssembler* assembler) { | 2970 void InterpreterGenerator::DoCreateCatchContext( |
| 2971 InterpreterAssembler* assembler) { |
| 3169 Node* exception_reg = __ BytecodeOperandReg(0); | 2972 Node* exception_reg = __ BytecodeOperandReg(0); |
| 3170 Node* exception = __ LoadRegister(exception_reg); | 2973 Node* exception = __ LoadRegister(exception_reg); |
| 3171 Node* name_idx = __ BytecodeOperandIdx(1); | 2974 Node* name_idx = __ BytecodeOperandIdx(1); |
| 3172 Node* name = __ LoadConstantPoolEntry(name_idx); | 2975 Node* name = __ LoadConstantPoolEntry(name_idx); |
| 3173 Node* scope_info_idx = __ BytecodeOperandIdx(2); | 2976 Node* scope_info_idx = __ BytecodeOperandIdx(2); |
| 3174 Node* scope_info = __ LoadConstantPoolEntry(scope_info_idx); | 2977 Node* scope_info = __ LoadConstantPoolEntry(scope_info_idx); |
| 3175 Node* closure = __ GetAccumulator(); | 2978 Node* closure = __ GetAccumulator(); |
| 3176 Node* context = __ GetContext(); | 2979 Node* context = __ GetContext(); |
| 3177 __ SetAccumulator(__ CallRuntime(Runtime::kPushCatchContext, context, name, | 2980 __ SetAccumulator(__ CallRuntime(Runtime::kPushCatchContext, context, name, |
| 3178 exception, scope_info, closure)); | 2981 exception, scope_info, closure)); |
| 3179 __ Dispatch(); | 2982 __ Dispatch(); |
| 3180 } | 2983 } |
| 3181 | 2984 |
| 3182 // CreateFunctionContext <slots> | 2985 // CreateFunctionContext <slots> |
| 3183 // | 2986 // |
| 3184 // Creates a new context with number of |slots| for the function closure. | 2987 // Creates a new context with number of |slots| for the function closure. |
| 3185 void Interpreter::DoCreateFunctionContext(InterpreterAssembler* assembler) { | 2988 void InterpreterGenerator::DoCreateFunctionContext( |
| 2989 InterpreterAssembler* assembler) { |
| 3186 Node* closure = __ LoadRegister(Register::function_closure()); | 2990 Node* closure = __ LoadRegister(Register::function_closure()); |
| 3187 Node* slots = __ BytecodeOperandUImm(0); | 2991 Node* slots = __ BytecodeOperandUImm(0); |
| 3188 Node* context = __ GetContext(); | 2992 Node* context = __ GetContext(); |
| 3189 ConstructorBuiltinsAssembler constructor_assembler(assembler->state()); | 2993 ConstructorBuiltinsAssembler constructor_assembler(assembler->state()); |
| 3190 __ SetAccumulator(constructor_assembler.EmitFastNewFunctionContext( | 2994 __ SetAccumulator(constructor_assembler.EmitFastNewFunctionContext( |
| 3191 closure, slots, context, FUNCTION_SCOPE)); | 2995 closure, slots, context, FUNCTION_SCOPE)); |
| 3192 __ Dispatch(); | 2996 __ Dispatch(); |
| 3193 } | 2997 } |
| 3194 | 2998 |
| 3195 // CreateEvalContext <slots> | 2999 // CreateEvalContext <slots> |
| 3196 // | 3000 // |
| 3197 // Creates a new context with number of |slots| for an eval closure. | 3001 // Creates a new context with number of |slots| for an eval closure. |
| 3198 void Interpreter::DoCreateEvalContext(InterpreterAssembler* assembler) { | 3002 void InterpreterGenerator::DoCreateEvalContext( |
| 3003 InterpreterAssembler* assembler) { |
| 3199 Node* closure = __ LoadRegister(Register::function_closure()); | 3004 Node* closure = __ LoadRegister(Register::function_closure()); |
| 3200 Node* slots = __ BytecodeOperandUImm(0); | 3005 Node* slots = __ BytecodeOperandUImm(0); |
| 3201 Node* context = __ GetContext(); | 3006 Node* context = __ GetContext(); |
| 3202 ConstructorBuiltinsAssembler constructor_assembler(assembler->state()); | 3007 ConstructorBuiltinsAssembler constructor_assembler(assembler->state()); |
| 3203 __ SetAccumulator(constructor_assembler.EmitFastNewFunctionContext( | 3008 __ SetAccumulator(constructor_assembler.EmitFastNewFunctionContext( |
| 3204 closure, slots, context, EVAL_SCOPE)); | 3009 closure, slots, context, EVAL_SCOPE)); |
| 3205 __ Dispatch(); | 3010 __ Dispatch(); |
| 3206 } | 3011 } |
| 3207 | 3012 |
| 3208 // CreateWithContext <register> <scope_info_idx> | 3013 // CreateWithContext <register> <scope_info_idx> |
| 3209 // | 3014 // |
| 3210 // Creates a new context with the ScopeInfo at |scope_info_idx| for a | 3015 // Creates a new context with the ScopeInfo at |scope_info_idx| for a |
| 3211 // with-statement with the object in |register| and the closure in the | 3016 // with-statement with the object in |register| and the closure in the |
| 3212 // accumulator. | 3017 // accumulator. |
| 3213 void Interpreter::DoCreateWithContext(InterpreterAssembler* assembler) { | 3018 void InterpreterGenerator::DoCreateWithContext( |
| 3019 InterpreterAssembler* assembler) { |
| 3214 Node* reg_index = __ BytecodeOperandReg(0); | 3020 Node* reg_index = __ BytecodeOperandReg(0); |
| 3215 Node* object = __ LoadRegister(reg_index); | 3021 Node* object = __ LoadRegister(reg_index); |
| 3216 Node* scope_info_idx = __ BytecodeOperandIdx(1); | 3022 Node* scope_info_idx = __ BytecodeOperandIdx(1); |
| 3217 Node* scope_info = __ LoadConstantPoolEntry(scope_info_idx); | 3023 Node* scope_info = __ LoadConstantPoolEntry(scope_info_idx); |
| 3218 Node* closure = __ GetAccumulator(); | 3024 Node* closure = __ GetAccumulator(); |
| 3219 Node* context = __ GetContext(); | 3025 Node* context = __ GetContext(); |
| 3220 __ SetAccumulator(__ CallRuntime(Runtime::kPushWithContext, context, object, | 3026 __ SetAccumulator(__ CallRuntime(Runtime::kPushWithContext, context, object, |
| 3221 scope_info, closure)); | 3027 scope_info, closure)); |
| 3222 __ Dispatch(); | 3028 __ Dispatch(); |
| 3223 } | 3029 } |
| 3224 | 3030 |
| 3225 // CreateMappedArguments | 3031 // CreateMappedArguments |
| 3226 // | 3032 // |
| 3227 // Creates a new mapped arguments object. | 3033 // Creates a new mapped arguments object. |
| 3228 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) { | 3034 void InterpreterGenerator::DoCreateMappedArguments( |
| 3035 InterpreterAssembler* assembler) { |
| 3229 Node* closure = __ LoadRegister(Register::function_closure()); | 3036 Node* closure = __ LoadRegister(Register::function_closure()); |
| 3230 Node* context = __ GetContext(); | 3037 Node* context = __ GetContext(); |
| 3231 | 3038 |
| 3232 Label if_duplicate_parameters(assembler, Label::kDeferred); | 3039 Label if_duplicate_parameters(assembler, Label::kDeferred); |
| 3233 Label if_not_duplicate_parameters(assembler); | 3040 Label if_not_duplicate_parameters(assembler); |
| 3234 | 3041 |
| 3235 // Check if function has duplicate parameters. | 3042 // Check if function has duplicate parameters. |
| 3236 // TODO(rmcilroy): Remove this check when FastNewSloppyArgumentsStub supports | 3043 // TODO(rmcilroy): Remove this check when FastNewSloppyArgumentsStub supports |
| 3237 // duplicate parameters. | 3044 // duplicate parameters. |
| 3238 Node* shared_info = | 3045 Node* shared_info = |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3259 Node* result = | 3066 Node* result = |
| 3260 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); | 3067 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); |
| 3261 __ SetAccumulator(result); | 3068 __ SetAccumulator(result); |
| 3262 __ Dispatch(); | 3069 __ Dispatch(); |
| 3263 } | 3070 } |
| 3264 } | 3071 } |
| 3265 | 3072 |
| 3266 // CreateUnmappedArguments | 3073 // CreateUnmappedArguments |
| 3267 // | 3074 // |
| 3268 // Creates a new unmapped arguments object. | 3075 // Creates a new unmapped arguments object. |
| 3269 void Interpreter::DoCreateUnmappedArguments(InterpreterAssembler* assembler) { | 3076 void InterpreterGenerator::DoCreateUnmappedArguments( |
| 3077 InterpreterAssembler* assembler) { |
| 3270 Node* context = __ GetContext(); | 3078 Node* context = __ GetContext(); |
| 3271 Node* closure = __ LoadRegister(Register::function_closure()); | 3079 Node* closure = __ LoadRegister(Register::function_closure()); |
| 3272 ArgumentsBuiltinsAssembler builtins_assembler(assembler->state()); | 3080 ArgumentsBuiltinsAssembler builtins_assembler(assembler->state()); |
| 3273 Node* result = | 3081 Node* result = |
| 3274 builtins_assembler.EmitFastNewStrictArguments(context, closure); | 3082 builtins_assembler.EmitFastNewStrictArguments(context, closure); |
| 3275 __ SetAccumulator(result); | 3083 __ SetAccumulator(result); |
| 3276 __ Dispatch(); | 3084 __ Dispatch(); |
| 3277 } | 3085 } |
| 3278 | 3086 |
| 3279 // CreateRestParameter | 3087 // CreateRestParameter |
| 3280 // | 3088 // |
| 3281 // Creates a new rest parameter array. | 3089 // Creates a new rest parameter array. |
| 3282 void Interpreter::DoCreateRestParameter(InterpreterAssembler* assembler) { | 3090 void InterpreterGenerator::DoCreateRestParameter( |
| 3091 InterpreterAssembler* assembler) { |
| 3283 Node* closure = __ LoadRegister(Register::function_closure()); | 3092 Node* closure = __ LoadRegister(Register::function_closure()); |
| 3284 Node* context = __ GetContext(); | 3093 Node* context = __ GetContext(); |
| 3285 ArgumentsBuiltinsAssembler builtins_assembler(assembler->state()); | 3094 ArgumentsBuiltinsAssembler builtins_assembler(assembler->state()); |
| 3286 Node* result = builtins_assembler.EmitFastNewRestParameter(context, closure); | 3095 Node* result = builtins_assembler.EmitFastNewRestParameter(context, closure); |
| 3287 __ SetAccumulator(result); | 3096 __ SetAccumulator(result); |
| 3288 __ Dispatch(); | 3097 __ Dispatch(); |
| 3289 } | 3098 } |
| 3290 | 3099 |
| 3291 // StackCheck | 3100 // StackCheck |
| 3292 // | 3101 // |
| 3293 // Performs a stack guard check. | 3102 // Performs a stack guard check. |
| 3294 void Interpreter::DoStackCheck(InterpreterAssembler* assembler) { | 3103 void InterpreterGenerator::DoStackCheck(InterpreterAssembler* assembler) { |
| 3295 Label ok(assembler), stack_check_interrupt(assembler, Label::kDeferred); | 3104 Label ok(assembler), stack_check_interrupt(assembler, Label::kDeferred); |
| 3296 | 3105 |
| 3297 Node* interrupt = __ StackCheckTriggeredInterrupt(); | 3106 Node* interrupt = __ StackCheckTriggeredInterrupt(); |
| 3298 __ Branch(interrupt, &stack_check_interrupt, &ok); | 3107 __ Branch(interrupt, &stack_check_interrupt, &ok); |
| 3299 | 3108 |
| 3300 __ Bind(&ok); | 3109 __ Bind(&ok); |
| 3301 __ Dispatch(); | 3110 __ Dispatch(); |
| 3302 | 3111 |
| 3303 __ Bind(&stack_check_interrupt); | 3112 __ Bind(&stack_check_interrupt); |
| 3304 { | 3113 { |
| 3305 Node* context = __ GetContext(); | 3114 Node* context = __ GetContext(); |
| 3306 __ CallRuntime(Runtime::kStackGuard, context); | 3115 __ CallRuntime(Runtime::kStackGuard, context); |
| 3307 __ Dispatch(); | 3116 __ Dispatch(); |
| 3308 } | 3117 } |
| 3309 } | 3118 } |
| 3310 | 3119 |
| 3311 // SetPendingMessage | 3120 // SetPendingMessage |
| 3312 // | 3121 // |
| 3313 // Sets the pending message to the value in the accumulator, and returns the | 3122 // Sets the pending message to the value in the accumulator, and returns the |
| 3314 // previous pending message in the accumulator. | 3123 // previous pending message in the accumulator. |
| 3315 void Interpreter::DoSetPendingMessage(InterpreterAssembler* assembler) { | 3124 void InterpreterGenerator::DoSetPendingMessage( |
| 3125 InterpreterAssembler* assembler) { |
| 3316 Node* pending_message = __ ExternalConstant( | 3126 Node* pending_message = __ ExternalConstant( |
| 3317 ExternalReference::address_of_pending_message_obj(isolate_)); | 3127 ExternalReference::address_of_pending_message_obj(isolate_)); |
| 3318 Node* previous_message = | 3128 Node* previous_message = |
| 3319 __ Load(MachineType::TaggedPointer(), pending_message); | 3129 __ Load(MachineType::TaggedPointer(), pending_message); |
| 3320 Node* new_message = __ GetAccumulator(); | 3130 Node* new_message = __ GetAccumulator(); |
| 3321 __ StoreNoWriteBarrier(MachineRepresentation::kTaggedPointer, pending_message, | 3131 __ StoreNoWriteBarrier(MachineRepresentation::kTaggedPointer, pending_message, |
| 3322 new_message); | 3132 new_message); |
| 3323 __ SetAccumulator(previous_message); | 3133 __ SetAccumulator(previous_message); |
| 3324 __ Dispatch(); | 3134 __ Dispatch(); |
| 3325 } | 3135 } |
| 3326 | 3136 |
| 3327 // Throw | 3137 // Throw |
| 3328 // | 3138 // |
| 3329 // Throws the exception in the accumulator. | 3139 // Throws the exception in the accumulator. |
| 3330 void Interpreter::DoThrow(InterpreterAssembler* assembler) { | 3140 void InterpreterGenerator::DoThrow(InterpreterAssembler* assembler) { |
| 3331 Node* exception = __ GetAccumulator(); | 3141 Node* exception = __ GetAccumulator(); |
| 3332 Node* context = __ GetContext(); | 3142 Node* context = __ GetContext(); |
| 3333 __ CallRuntime(Runtime::kThrow, context, exception); | 3143 __ CallRuntime(Runtime::kThrow, context, exception); |
| 3334 // We shouldn't ever return from a throw. | 3144 // We shouldn't ever return from a throw. |
| 3335 __ Abort(kUnexpectedReturnFromThrow); | 3145 __ Abort(kUnexpectedReturnFromThrow); |
| 3336 } | 3146 } |
| 3337 | 3147 |
| 3338 // ReThrow | 3148 // ReThrow |
| 3339 // | 3149 // |
| 3340 // Re-throws the exception in the accumulator. | 3150 // Re-throws the exception in the accumulator. |
| 3341 void Interpreter::DoReThrow(InterpreterAssembler* assembler) { | 3151 void InterpreterGenerator::DoReThrow(InterpreterAssembler* assembler) { |
| 3342 Node* exception = __ GetAccumulator(); | 3152 Node* exception = __ GetAccumulator(); |
| 3343 Node* context = __ GetContext(); | 3153 Node* context = __ GetContext(); |
| 3344 __ CallRuntime(Runtime::kReThrow, context, exception); | 3154 __ CallRuntime(Runtime::kReThrow, context, exception); |
| 3345 // We shouldn't ever return from a throw. | 3155 // We shouldn't ever return from a throw. |
| 3346 __ Abort(kUnexpectedReturnFromThrow); | 3156 __ Abort(kUnexpectedReturnFromThrow); |
| 3347 } | 3157 } |
| 3348 | 3158 |
| 3349 // Return | 3159 // Return |
| 3350 // | 3160 // |
| 3351 // Return the value in the accumulator. | 3161 // Return the value in the accumulator. |
| 3352 void Interpreter::DoReturn(InterpreterAssembler* assembler) { | 3162 void InterpreterGenerator::DoReturn(InterpreterAssembler* assembler) { |
| 3353 __ UpdateInterruptBudgetOnReturn(); | 3163 __ UpdateInterruptBudgetOnReturn(); |
| 3354 Node* accumulator = __ GetAccumulator(); | 3164 Node* accumulator = __ GetAccumulator(); |
| 3355 __ Return(accumulator); | 3165 __ Return(accumulator); |
| 3356 } | 3166 } |
| 3357 | 3167 |
| 3358 // Debugger | 3168 // Debugger |
| 3359 // | 3169 // |
| 3360 // Call runtime to handle debugger statement. | 3170 // Call runtime to handle debugger statement. |
| 3361 void Interpreter::DoDebugger(InterpreterAssembler* assembler) { | 3171 void InterpreterGenerator::DoDebugger(InterpreterAssembler* assembler) { |
| 3362 Node* context = __ GetContext(); | 3172 Node* context = __ GetContext(); |
| 3363 __ CallStub(CodeFactory::HandleDebuggerStatement(isolate_), context); | 3173 __ CallStub(CodeFactory::HandleDebuggerStatement(isolate_), context); |
| 3364 __ Dispatch(); | 3174 __ Dispatch(); |
| 3365 } | 3175 } |
| 3366 | 3176 |
| 3367 // DebugBreak | 3177 // DebugBreak |
| 3368 // | 3178 // |
| 3369 // Call runtime to handle a debug break. | 3179 // Call runtime to handle a debug break. |
| 3370 #define DEBUG_BREAK(Name, ...) \ | 3180 #define DEBUG_BREAK(Name, ...) \ |
| 3371 void Interpreter::Do##Name(InterpreterAssembler* assembler) { \ | 3181 void InterpreterGenerator::Do##Name(InterpreterAssembler* assembler) { \ |
| 3372 Node* context = __ GetContext(); \ | 3182 Node* context = __ GetContext(); \ |
| 3373 Node* accumulator = __ GetAccumulator(); \ | 3183 Node* accumulator = __ GetAccumulator(); \ |
| 3374 Node* original_handler = \ | 3184 Node* original_handler = \ |
| 3375 __ CallRuntime(Runtime::kDebugBreakOnBytecode, context, accumulator); \ | 3185 __ CallRuntime(Runtime::kDebugBreakOnBytecode, context, accumulator); \ |
| 3376 __ MaybeDropFrames(context); \ | 3186 __ MaybeDropFrames(context); \ |
| 3377 __ DispatchToBytecodeHandler(original_handler); \ | 3187 __ DispatchToBytecodeHandler(original_handler); \ |
| 3378 } | 3188 } |
| 3379 DEBUG_BREAK_BYTECODE_LIST(DEBUG_BREAK); | 3189 DEBUG_BREAK_BYTECODE_LIST(DEBUG_BREAK); |
| 3380 #undef DEBUG_BREAK | 3190 #undef DEBUG_BREAK |
| 3381 | 3191 |
| 3382 void Interpreter::BuildForInPrepareResult(Node* output_register, | 3192 void InterpreterGenerator::BuildForInPrepareResult( |
| 3383 Node* cache_type, Node* cache_array, | 3193 Node* output_register, Node* cache_type, Node* cache_array, |
| 3384 Node* cache_length, | 3194 Node* cache_length, InterpreterAssembler* assembler) { |
| 3385 InterpreterAssembler* assembler) { | |
| 3386 __ StoreRegister(cache_type, output_register); | 3195 __ StoreRegister(cache_type, output_register); |
| 3387 output_register = __ NextRegister(output_register); | 3196 output_register = __ NextRegister(output_register); |
| 3388 __ StoreRegister(cache_array, output_register); | 3197 __ StoreRegister(cache_array, output_register); |
| 3389 output_register = __ NextRegister(output_register); | 3198 output_register = __ NextRegister(output_register); |
| 3390 __ StoreRegister(cache_length, output_register); | 3199 __ StoreRegister(cache_length, output_register); |
| 3391 } | 3200 } |
| 3392 | 3201 |
| 3393 // ForInPrepare <receiver> <cache_info_triple> | 3202 // ForInPrepare <receiver> <cache_info_triple> |
| 3394 // | 3203 // |
| 3395 // Returns state for for..in loop execution based on the object in the register | 3204 // Returns state for for..in loop execution based on the object in the register |
| 3396 // |receiver|. The object must not be null or undefined and must have been | 3205 // |receiver|. The object must not be null or undefined and must have been |
| 3397 // converted to a receiver already. | 3206 // converted to a receiver already. |
| 3398 // The result is output in registers |cache_info_triple| to | 3207 // The result is output in registers |cache_info_triple| to |
| 3399 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array, | 3208 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array, |
| 3400 // and cache_length respectively. | 3209 // and cache_length respectively. |
| 3401 void Interpreter::DoForInPrepare(InterpreterAssembler* assembler) { | 3210 void InterpreterGenerator::DoForInPrepare(InterpreterAssembler* assembler) { |
| 3402 Node* object_register = __ BytecodeOperandReg(0); | 3211 Node* object_register = __ BytecodeOperandReg(0); |
| 3403 Node* output_register = __ BytecodeOperandReg(1); | 3212 Node* output_register = __ BytecodeOperandReg(1); |
| 3404 Node* receiver = __ LoadRegister(object_register); | 3213 Node* receiver = __ LoadRegister(object_register); |
| 3405 Node* context = __ GetContext(); | 3214 Node* context = __ GetContext(); |
| 3406 | 3215 |
| 3407 Node* cache_type; | 3216 Node* cache_type; |
| 3408 Node* cache_array; | 3217 Node* cache_array; |
| 3409 Node* cache_length; | 3218 Node* cache_length; |
| 3410 Label call_runtime(assembler, Label::kDeferred), | 3219 Label call_runtime(assembler, Label::kDeferred), |
| 3411 nothing_to_iterate(assembler, Label::kDeferred); | 3220 nothing_to_iterate(assembler, Label::kDeferred); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 3435 // Receiver is null or undefined or descriptors are zero length. | 3244 // Receiver is null or undefined or descriptors are zero length. |
| 3436 Node* zero = __ SmiConstant(0); | 3245 Node* zero = __ SmiConstant(0); |
| 3437 BuildForInPrepareResult(output_register, zero, zero, zero, assembler); | 3246 BuildForInPrepareResult(output_register, zero, zero, zero, assembler); |
| 3438 __ Dispatch(); | 3247 __ Dispatch(); |
| 3439 } | 3248 } |
| 3440 } | 3249 } |
| 3441 | 3250 |
| 3442 // ForInNext <receiver> <index> <cache_info_pair> | 3251 // ForInNext <receiver> <index> <cache_info_pair> |
| 3443 // | 3252 // |
| 3444 // Returns the next enumerable property in the the accumulator. | 3253 // Returns the next enumerable property in the the accumulator. |
| 3445 void Interpreter::DoForInNext(InterpreterAssembler* assembler) { | 3254 void InterpreterGenerator::DoForInNext(InterpreterAssembler* assembler) { |
| 3446 Node* receiver_reg = __ BytecodeOperandReg(0); | 3255 Node* receiver_reg = __ BytecodeOperandReg(0); |
| 3447 Node* receiver = __ LoadRegister(receiver_reg); | 3256 Node* receiver = __ LoadRegister(receiver_reg); |
| 3448 Node* index_reg = __ BytecodeOperandReg(1); | 3257 Node* index_reg = __ BytecodeOperandReg(1); |
| 3449 Node* index = __ LoadRegister(index_reg); | 3258 Node* index = __ LoadRegister(index_reg); |
| 3450 Node* cache_type_reg = __ BytecodeOperandReg(2); | 3259 Node* cache_type_reg = __ BytecodeOperandReg(2); |
| 3451 Node* cache_type = __ LoadRegister(cache_type_reg); | 3260 Node* cache_type = __ LoadRegister(cache_type_reg); |
| 3452 Node* cache_array_reg = __ NextRegister(cache_type_reg); | 3261 Node* cache_array_reg = __ NextRegister(cache_type_reg); |
| 3453 Node* cache_array = __ LoadRegister(cache_array_reg); | 3262 Node* cache_array = __ LoadRegister(cache_array_reg); |
| 3454 | 3263 |
| 3455 // Load the next key from the enumeration array. | 3264 // Load the next key from the enumeration array. |
| (...skipping 25 matching lines...) Expand all Loading... |
| 3481 Callable callable = CodeFactory::ForInFilter(assembler->isolate()); | 3290 Callable callable = CodeFactory::ForInFilter(assembler->isolate()); |
| 3482 Node* result = __ CallStub(callable, context, key, receiver); | 3291 Node* result = __ CallStub(callable, context, key, receiver); |
| 3483 __ SetAccumulator(result); | 3292 __ SetAccumulator(result); |
| 3484 __ Dispatch(); | 3293 __ Dispatch(); |
| 3485 } | 3294 } |
| 3486 } | 3295 } |
| 3487 | 3296 |
| 3488 // ForInContinue <index> <cache_length> | 3297 // ForInContinue <index> <cache_length> |
| 3489 // | 3298 // |
| 3490 // Returns false if the end of the enumerable properties has been reached. | 3299 // Returns false if the end of the enumerable properties has been reached. |
| 3491 void Interpreter::DoForInContinue(InterpreterAssembler* assembler) { | 3300 void InterpreterGenerator::DoForInContinue(InterpreterAssembler* assembler) { |
| 3492 Node* index_reg = __ BytecodeOperandReg(0); | 3301 Node* index_reg = __ BytecodeOperandReg(0); |
| 3493 Node* index = __ LoadRegister(index_reg); | 3302 Node* index = __ LoadRegister(index_reg); |
| 3494 Node* cache_length_reg = __ BytecodeOperandReg(1); | 3303 Node* cache_length_reg = __ BytecodeOperandReg(1); |
| 3495 Node* cache_length = __ LoadRegister(cache_length_reg); | 3304 Node* cache_length = __ LoadRegister(cache_length_reg); |
| 3496 | 3305 |
| 3497 // Check if {index} is at {cache_length} already. | 3306 // Check if {index} is at {cache_length} already. |
| 3498 Label if_true(assembler), if_false(assembler), end(assembler); | 3307 Label if_true(assembler), if_false(assembler), end(assembler); |
| 3499 __ Branch(__ WordEqual(index, cache_length), &if_true, &if_false); | 3308 __ Branch(__ WordEqual(index, cache_length), &if_true, &if_false); |
| 3500 __ Bind(&if_true); | 3309 __ Bind(&if_true); |
| 3501 { | 3310 { |
| 3502 __ SetAccumulator(__ BooleanConstant(false)); | 3311 __ SetAccumulator(__ BooleanConstant(false)); |
| 3503 __ Goto(&end); | 3312 __ Goto(&end); |
| 3504 } | 3313 } |
| 3505 __ Bind(&if_false); | 3314 __ Bind(&if_false); |
| 3506 { | 3315 { |
| 3507 __ SetAccumulator(__ BooleanConstant(true)); | 3316 __ SetAccumulator(__ BooleanConstant(true)); |
| 3508 __ Goto(&end); | 3317 __ Goto(&end); |
| 3509 } | 3318 } |
| 3510 __ Bind(&end); | 3319 __ Bind(&end); |
| 3511 __ Dispatch(); | 3320 __ Dispatch(); |
| 3512 } | 3321 } |
| 3513 | 3322 |
| 3514 // ForInStep <index> | 3323 // ForInStep <index> |
| 3515 // | 3324 // |
| 3516 // Increments the loop counter in register |index| and stores the result | 3325 // Increments the loop counter in register |index| and stores the result |
| 3517 // in the accumulator. | 3326 // in the accumulator. |
| 3518 void Interpreter::DoForInStep(InterpreterAssembler* assembler) { | 3327 void InterpreterGenerator::DoForInStep(InterpreterAssembler* assembler) { |
| 3519 Node* index_reg = __ BytecodeOperandReg(0); | 3328 Node* index_reg = __ BytecodeOperandReg(0); |
| 3520 Node* index = __ LoadRegister(index_reg); | 3329 Node* index = __ LoadRegister(index_reg); |
| 3521 Node* one = __ SmiConstant(Smi::FromInt(1)); | 3330 Node* one = __ SmiConstant(Smi::FromInt(1)); |
| 3522 Node* result = __ SmiAdd(index, one); | 3331 Node* result = __ SmiAdd(index, one); |
| 3523 __ SetAccumulator(result); | 3332 __ SetAccumulator(result); |
| 3524 __ Dispatch(); | 3333 __ Dispatch(); |
| 3525 } | 3334 } |
| 3526 | 3335 |
| 3527 // Wide | 3336 // Wide |
| 3528 // | 3337 // |
| 3529 // Prefix bytecode indicating next bytecode has wide (16-bit) operands. | 3338 // Prefix bytecode indicating next bytecode has wide (16-bit) operands. |
| 3530 void Interpreter::DoWide(InterpreterAssembler* assembler) { | 3339 void InterpreterGenerator::DoWide(InterpreterAssembler* assembler) { |
| 3531 __ DispatchWide(OperandScale::kDouble); | 3340 __ DispatchWide(OperandScale::kDouble); |
| 3532 } | 3341 } |
| 3533 | 3342 |
| 3534 // ExtraWide | 3343 // ExtraWide |
| 3535 // | 3344 // |
| 3536 // Prefix bytecode indicating next bytecode has extra-wide (32-bit) operands. | 3345 // Prefix bytecode indicating next bytecode has extra-wide (32-bit) operands. |
| 3537 void Interpreter::DoExtraWide(InterpreterAssembler* assembler) { | 3346 void InterpreterGenerator::DoExtraWide(InterpreterAssembler* assembler) { |
| 3538 __ DispatchWide(OperandScale::kQuadruple); | 3347 __ DispatchWide(OperandScale::kQuadruple); |
| 3539 } | 3348 } |
| 3540 | 3349 |
| 3541 // Illegal | 3350 // Illegal |
| 3542 // | 3351 // |
| 3543 // An invalid bytecode aborting execution if dispatched. | 3352 // An invalid bytecode aborting execution if dispatched. |
| 3544 void Interpreter::DoIllegal(InterpreterAssembler* assembler) { | 3353 void InterpreterGenerator::DoIllegal(InterpreterAssembler* assembler) { |
| 3545 __ Abort(kInvalidBytecode); | 3354 __ Abort(kInvalidBytecode); |
| 3546 } | 3355 } |
| 3547 | 3356 |
| 3548 // Nop | 3357 // Nop |
| 3549 // | 3358 // |
| 3550 // No operation. | 3359 // No operation. |
| 3551 void Interpreter::DoNop(InterpreterAssembler* assembler) { __ Dispatch(); } | 3360 void InterpreterGenerator::DoNop(InterpreterAssembler* assembler) { |
| 3361 __ Dispatch(); |
| 3362 } |
| 3552 | 3363 |
| 3553 // SuspendGenerator <generator> | 3364 // SuspendGenerator <generator> |
| 3554 // | 3365 // |
| 3555 // Exports the register file and stores it into the generator. Also stores the | 3366 // Exports the register file and stores it into the generator. Also stores the |
| 3556 // current context, the state given in the accumulator, and the current bytecode | 3367 // current context, the state given in the accumulator, and the current bytecode |
| 3557 // offset (for debugging purposes) into the generator. | 3368 // offset (for debugging purposes) into the generator. |
| 3558 void Interpreter::DoSuspendGenerator(InterpreterAssembler* assembler) { | 3369 void InterpreterGenerator::DoSuspendGenerator(InterpreterAssembler* assembler) { |
| 3559 Node* generator_reg = __ BytecodeOperandReg(0); | 3370 Node* generator_reg = __ BytecodeOperandReg(0); |
| 3560 Node* generator = __ LoadRegister(generator_reg); | 3371 Node* generator = __ LoadRegister(generator_reg); |
| 3561 | 3372 |
| 3562 Label if_stepping(assembler, Label::kDeferred), ok(assembler); | 3373 Label if_stepping(assembler, Label::kDeferred), ok(assembler); |
| 3563 Node* step_action_address = __ ExternalConstant( | 3374 Node* step_action_address = __ ExternalConstant( |
| 3564 ExternalReference::debug_last_step_action_address(isolate_)); | 3375 ExternalReference::debug_last_step_action_address(isolate_)); |
| 3565 Node* step_action = __ Load(MachineType::Int8(), step_action_address); | 3376 Node* step_action = __ Load(MachineType::Int8(), step_action_address); |
| 3566 STATIC_ASSERT(StepIn > StepNext); | 3377 STATIC_ASSERT(StepIn > StepNext); |
| 3567 STATIC_ASSERT(LastStepAction == StepIn); | 3378 STATIC_ASSERT(LastStepAction == StepIn); |
| 3568 Node* step_next = __ Int32Constant(StepNext); | 3379 Node* step_next = __ Int32Constant(StepNext); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 3590 __ CallRuntime(Runtime::kDebugRecordGenerator, context, generator); | 3401 __ CallRuntime(Runtime::kDebugRecordGenerator, context, generator); |
| 3591 __ Goto(&ok); | 3402 __ Goto(&ok); |
| 3592 } | 3403 } |
| 3593 } | 3404 } |
| 3594 | 3405 |
| 3595 // ResumeGenerator <generator> | 3406 // ResumeGenerator <generator> |
| 3596 // | 3407 // |
| 3597 // Imports the register file stored in the generator. Also loads the | 3408 // Imports the register file stored in the generator. Also loads the |
| 3598 // generator's state and stores it in the accumulator, before overwriting it | 3409 // generator's state and stores it in the accumulator, before overwriting it |
| 3599 // with kGeneratorExecuting. | 3410 // with kGeneratorExecuting. |
| 3600 void Interpreter::DoResumeGenerator(InterpreterAssembler* assembler) { | 3411 void InterpreterGenerator::DoResumeGenerator(InterpreterAssembler* assembler) { |
| 3601 Node* generator_reg = __ BytecodeOperandReg(0); | 3412 Node* generator_reg = __ BytecodeOperandReg(0); |
| 3602 Node* generator = __ LoadRegister(generator_reg); | 3413 Node* generator = __ LoadRegister(generator_reg); |
| 3603 | 3414 |
| 3604 __ ImportRegisterFile( | 3415 __ ImportRegisterFile( |
| 3605 __ LoadObjectField(generator, JSGeneratorObject::kRegisterFileOffset)); | 3416 __ LoadObjectField(generator, JSGeneratorObject::kRegisterFileOffset)); |
| 3606 | 3417 |
| 3607 Node* old_state = | 3418 Node* old_state = |
| 3608 __ LoadObjectField(generator, JSGeneratorObject::kContinuationOffset); | 3419 __ LoadObjectField(generator, JSGeneratorObject::kContinuationOffset); |
| 3609 Node* new_state = __ Int32Constant(JSGeneratorObject::kGeneratorExecuting); | 3420 Node* new_state = __ Int32Constant(JSGeneratorObject::kGeneratorExecuting); |
| 3610 __ StoreObjectField(generator, JSGeneratorObject::kContinuationOffset, | 3421 __ StoreObjectField(generator, JSGeneratorObject::kContinuationOffset, |
| 3611 __ SmiTag(new_state)); | 3422 __ SmiTag(new_state)); |
| 3612 __ SetAccumulator(old_state); | 3423 __ SetAccumulator(old_state); |
| 3613 | 3424 |
| 3614 __ Dispatch(); | 3425 __ Dispatch(); |
| 3615 } | 3426 } |
| 3616 | 3427 |
| 3617 } // namespace interpreter | 3428 } // namespace interpreter |
| 3618 } // namespace internal | 3429 } // namespace internal |
| 3619 } // namespace v8 | 3430 } // namespace v8 |
| OLD | NEW |