Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(36)

Side by Side Diff: src/interpreter/interpreter.cc

Issue 1783483002: [interpreter] Add support for scalable operands. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Re-generate golden files. Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/interpreter/interpreter.h ('k') | src/interpreter/interpreter-assembler.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter.h" 5 #include "src/interpreter/interpreter.h"
6 6
7 #include "src/ast/prettyprinter.h" 7 #include "src/ast/prettyprinter.h"
8 #include "src/code-factory.h" 8 #include "src/code-factory.h"
9 #include "src/compiler.h" 9 #include "src/compiler.h"
10 #include "src/factory.h" 10 #include "src/factory.h"
11 #include "src/interpreter/bytecode-generator.h" 11 #include "src/interpreter/bytecode-generator.h"
12 #include "src/interpreter/bytecodes.h" 12 #include "src/interpreter/bytecodes.h"
13 #include "src/interpreter/interpreter-assembler.h" 13 #include "src/interpreter/interpreter-assembler.h"
14 #include "src/log.h" 14 #include "src/log.h"
15 #include "src/zone.h" 15 #include "src/zone.h"
16 16
17 namespace v8 { 17 namespace v8 {
18 namespace internal { 18 namespace internal {
19 namespace interpreter { 19 namespace interpreter {
20 20
21 using compiler::Node; 21 using compiler::Node;
22 22
23 #define __ assembler-> 23 #define __ assembler->
24 24
25 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { 25 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) {
26 memset(&dispatch_table_, 0, sizeof(dispatch_table_)); 26 memset(dispatch_table_, 0, sizeof(dispatch_table_));
27 } 27 }
28 28
29 void Interpreter::Initialize() { 29 void Interpreter::Initialize() {
30 DCHECK(FLAG_ignition); 30 DCHECK(FLAG_ignition);
31 if (IsDispatchTableInitialized()) return; 31 if (IsDispatchTableInitialized()) return;
32 Zone zone; 32 Zone zone;
33 HandleScope scope(isolate_); 33 HandleScope scope(isolate_);
34 34
35 #define GENERATE_CODE(Name, ...) \ 35 // Generate bytecode handlers for all bytecodes and scales.
36 { \ 36 for (OperandScale operand_scale = OperandScale::kSingle;
37 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name); \ 37 operand_scale <= OperandScale::kMaxValid;
38 Do##Name(&assembler); \ 38 operand_scale = Bytecodes::NextOperandScale(operand_scale)) {
39 Handle<Code> code = assembler.GenerateCode(); \ 39 #define GENERATE_CODE(Name, ...) \
40 dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] = *code; \ 40 { \
41 TraceCodegen(code); \ 41 if (BytecodeHasHandler(Bytecode::k##Name, operand_scale)) { \
42 LOG_CODE_EVENT(isolate_, \ 42 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name, \
43 CodeCreateEvent(Logger::BYTECODE_HANDLER_TAG, \ 43 operand_scale); \
44 AbstractCode::cast(*code), #Name)); \ 44 Do##Name(&assembler); \
45 Handle<Code> code = assembler.GenerateCode(); \
46 size_t index = GetDispatchTableIndex(Bytecode::k##Name, operand_scale); \
47 dispatch_table_[index] = *code; \
48 TraceCodegen(code); \
49 LOG_CODE_EVENT(isolate_, \
50 CodeCreateEvent(Logger::BYTECODE_HANDLER_TAG, \
51 AbstractCode::cast(*code), #Name)); \
52 } \
45 } 53 }
46 BYTECODE_LIST(GENERATE_CODE) 54 BYTECODE_LIST(GENERATE_CODE)
47 #undef GENERATE_CODE 55 #undef GENERATE_CODE
56 }
57
58 // Fill unused entries will the illegal bytecode handler.
59 size_t illegal_index =
60 GetDispatchTableIndex(Bytecode::kIllegal, OperandScale::kSingle);
61 for (size_t index = 0; index < arraysize(dispatch_table_); ++index) {
62 if (dispatch_table_[index] == nullptr) {
63 dispatch_table_[index] = dispatch_table_[illegal_index];
64 }
65 }
48 } 66 }
49 67
50 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode) { 68 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode,
69 OperandScale operand_scale) {
51 DCHECK(IsDispatchTableInitialized()); 70 DCHECK(IsDispatchTableInitialized());
52 return dispatch_table_[Bytecodes::ToByte(bytecode)]; 71 DCHECK(BytecodeHasHandler(bytecode, operand_scale));
72 size_t index = GetDispatchTableIndex(bytecode, operand_scale);
73 return dispatch_table_[index];
74 }
75
76 // static
77 size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode,
78 OperandScale operand_scale) {
79 static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte;
80 size_t index = static_cast<size_t>(bytecode);
81 OperandScale current_scale = OperandScale::kSingle;
82 while (current_scale != operand_scale) {
83 index += kEntriesPerOperandScale;
84 current_scale = Bytecodes::NextOperandScale(current_scale);
85 }
86 return index;
87 }
88
89 // static
90 bool Interpreter::BytecodeHasHandler(Bytecode bytecode,
91 OperandScale operand_scale) {
92 return operand_scale == OperandScale::kSingle ||
93 Bytecodes::IsBytecodeWithScalableOperands(bytecode);
53 } 94 }
54 95
55 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { 96 void Interpreter::IterateDispatchTable(ObjectVisitor* v) {
56 v->VisitPointers( 97 v->VisitPointers(
57 reinterpret_cast<Object**>(&dispatch_table_[0]), 98 reinterpret_cast<Object**>(&dispatch_table_[0]),
58 reinterpret_cast<Object**>(&dispatch_table_[0] + kDispatchTableSize)); 99 reinterpret_cast<Object**>(&dispatch_table_[0] + kDispatchTableSize));
59 } 100 }
60 101
61 // static 102 // static
62 int Interpreter::InterruptBudget() { 103 int Interpreter::InterruptBudget() {
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
142 183
143 // LdaZero 184 // LdaZero
144 // 185 //
145 // Load literal '0' into the accumulator. 186 // Load literal '0' into the accumulator.
146 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) { 187 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) {
147 Node* zero_value = __ NumberConstant(0.0); 188 Node* zero_value = __ NumberConstant(0.0);
148 __ SetAccumulator(zero_value); 189 __ SetAccumulator(zero_value);
149 __ Dispatch(); 190 __ Dispatch();
150 } 191 }
151 192
152 193 // LdaSmi <imm>
153 // LdaSmi8 <imm8>
154 // 194 //
155 // Load an 8-bit integer literal into the accumulator as a Smi. 195 // Load an integer literal into the accumulator as a Smi.
156 void Interpreter::DoLdaSmi8(InterpreterAssembler* assembler) { 196 void Interpreter::DoLdaSmi(InterpreterAssembler* assembler) {
157 Node* raw_int = __ BytecodeOperandImm(0); 197 Node* raw_int = __ BytecodeOperandImm(0);
158 Node* smi_int = __ SmiTag(raw_int); 198 Node* smi_int = __ SmiTag(raw_int);
159 __ SetAccumulator(smi_int); 199 __ SetAccumulator(smi_int);
160 __ Dispatch(); 200 __ Dispatch();
161 } 201 }
162 202
163 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) { 203 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) {
164 Node* index = __ BytecodeOperandIdx(0); 204 Node* index = __ BytecodeOperandIdx(0);
165 Node* constant = __ LoadConstantPoolEntry(index); 205 Node* constant = __ LoadConstantPoolEntry(index);
166 __ SetAccumulator(constant); 206 __ SetAccumulator(constant);
167 __ Dispatch(); 207 __ Dispatch();
168 } 208 }
169 209
170 210
171 // LdaConstant <idx> 211 // LdaConstant <idx>
172 // 212 //
173 // Load constant literal at |idx| in the constant pool into the accumulator. 213 // Load constant literal at |idx| in the constant pool into the accumulator.
174 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) { 214 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) {
175 DoLoadConstant(assembler); 215 DoLoadConstant(assembler);
176 } 216 }
177 217
178
179 // LdaConstantWide <idx>
180 //
181 // Load constant literal at |idx| in the constant pool into the accumulator.
182 void Interpreter::DoLdaConstantWide(InterpreterAssembler* assembler) {
183 DoLoadConstant(assembler);
184 }
185
186
187 // LdaUndefined 218 // LdaUndefined
188 // 219 //
189 // Load Undefined into the accumulator. 220 // Load Undefined into the accumulator.
190 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) { 221 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) {
191 Node* undefined_value = 222 Node* undefined_value =
192 __ HeapConstant(isolate_->factory()->undefined_value()); 223 __ HeapConstant(isolate_->factory()->undefined_value());
193 __ SetAccumulator(undefined_value); 224 __ SetAccumulator(undefined_value);
194 __ Dispatch(); 225 __ Dispatch();
195 } 226 }
196 227
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
262 // Stores the value of register <src> to register <dst>. 293 // Stores the value of register <src> to register <dst>.
263 void Interpreter::DoMov(InterpreterAssembler* assembler) { 294 void Interpreter::DoMov(InterpreterAssembler* assembler) {
264 Node* src_index = __ BytecodeOperandReg(0); 295 Node* src_index = __ BytecodeOperandReg(0);
265 Node* src_value = __ LoadRegister(src_index); 296 Node* src_value = __ LoadRegister(src_index);
266 Node* dst_index = __ BytecodeOperandReg(1); 297 Node* dst_index = __ BytecodeOperandReg(1);
267 __ StoreRegister(src_value, dst_index); 298 __ StoreRegister(src_value, dst_index);
268 __ Dispatch(); 299 __ Dispatch();
269 } 300 }
270 301
271 302
272 // MovWide <src> <dst>
273 //
274 // Stores the value of register <src> to register <dst>.
275 void Interpreter::DoMovWide(InterpreterAssembler* assembler) {
276 DoMov(assembler);
277 }
278
279 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) { 303 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) {
280 // Get the global object. 304 // Get the global object.
281 Node* context = __ GetContext(); 305 Node* context = __ GetContext();
282 Node* native_context = 306 Node* native_context =
283 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); 307 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
284 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); 308 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX);
285 309
286 // Load the global via the LoadIC. 310 // Load the global via the LoadIC.
287 Node* code_target = __ HeapConstant(ic.code()); 311 Node* code_target = __ HeapConstant(ic.code());
288 Node* constant_index = __ BytecodeOperandIdx(0); 312 Node* constant_index = __ BytecodeOperandIdx(0);
(...skipping 20 matching lines...) Expand all
309 // LdaGlobalInsideTypeof <name_index> <slot> 333 // LdaGlobalInsideTypeof <name_index> <slot>
310 // 334 //
311 // Load the global with name in constant pool entry <name_index> into the 335 // Load the global with name in constant pool entry <name_index> into the
312 // accumulator using FeedBackVector slot <slot> inside of a typeof. 336 // accumulator using FeedBackVector slot <slot> inside of a typeof.
313 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) { 337 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) {
314 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, 338 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
315 UNINITIALIZED); 339 UNINITIALIZED);
316 DoLoadGlobal(ic, assembler); 340 DoLoadGlobal(ic, assembler);
317 } 341 }
318 342
319 // LdaGlobalWide <name_index> <slot>
320 //
321 // Load the global with name in constant pool entry <name_index> into the
322 // accumulator using FeedBackVector slot <slot> outside of a typeof.
323 void Interpreter::DoLdaGlobalWide(InterpreterAssembler* assembler) {
324 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
325 UNINITIALIZED);
326 DoLoadGlobal(ic, assembler);
327 }
328
329 // LdaGlobalInsideTypeofWide <name_index> <slot>
330 //
331 // Load the global with name in constant pool entry <name_index> into the
332 // accumulator using FeedBackVector slot <slot> inside of a typeof.
333 void Interpreter::DoLdaGlobalInsideTypeofWide(InterpreterAssembler* assembler) {
334 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
335 UNINITIALIZED);
336 DoLoadGlobal(ic, assembler);
337 }
338
339
340 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) { 343 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) {
341 // Get the global object. 344 // Get the global object.
342 Node* context = __ GetContext(); 345 Node* context = __ GetContext();
343 Node* native_context = 346 Node* native_context =
344 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); 347 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
345 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); 348 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX);
346 349
347 // Store the global via the StoreIC. 350 // Store the global via the StoreIC.
348 Node* code_target = __ HeapConstant(ic.code()); 351 Node* code_target = __ HeapConstant(ic.code());
349 Node* constant_index = __ BytecodeOperandIdx(0); 352 Node* constant_index = __ BytecodeOperandIdx(0);
350 Node* name = __ LoadConstantPoolEntry(constant_index); 353 Node* name = __ LoadConstantPoolEntry(constant_index);
351 Node* value = __ GetAccumulator(); 354 Node* value = __ GetAccumulator();
352 Node* raw_slot = __ BytecodeOperandIdx(1); 355 Node* raw_slot = __ BytecodeOperandIdx(1);
353 Node* smi_slot = __ SmiTag(raw_slot); 356 Node* smi_slot = __ SmiTag(raw_slot);
354 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 357 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
355 __ CallStub(ic.descriptor(), code_target, context, global, name, value, 358 __ CallStub(ic.descriptor(), code_target, context, global, name, value,
356 smi_slot, type_feedback_vector); 359 smi_slot, type_feedback_vector);
357
358 __ Dispatch(); 360 __ Dispatch();
359 } 361 }
360 362
361 363
362 // StaGlobalSloppy <name_index> <slot> 364 // StaGlobalSloppy <name_index> <slot>
363 // 365 //
364 // Store the value in the accumulator into the global with name in constant pool 366 // Store the value in the accumulator into the global with name in constant pool
365 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. 367 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode.
366 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) { 368 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) {
367 Callable ic = 369 Callable ic =
368 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); 370 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
369 DoStoreGlobal(ic, assembler); 371 DoStoreGlobal(ic, assembler);
370 } 372 }
371 373
372 374
373 // StaGlobalStrict <name_index> <slot> 375 // StaGlobalStrict <name_index> <slot>
374 // 376 //
375 // Store the value in the accumulator into the global with name in constant pool 377 // Store the value in the accumulator into the global with name in constant pool
376 // entry <name_index> using FeedBackVector slot <slot> in strict mode. 378 // entry <name_index> using FeedBackVector slot <slot> in strict mode.
377 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) { 379 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) {
378 Callable ic = 380 Callable ic =
379 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 381 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
380 DoStoreGlobal(ic, assembler); 382 DoStoreGlobal(ic, assembler);
381 } 383 }
382 384
383
384 // StaGlobalSloppyWide <name_index> <slot>
385 //
386 // Store the value in the accumulator into the global with name in constant pool
387 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode.
388 void Interpreter::DoStaGlobalSloppyWide(InterpreterAssembler* assembler) {
389 Callable ic =
390 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
391 DoStoreGlobal(ic, assembler);
392 }
393
394
395 // StaGlobalStrictWide <name_index> <slot>
396 //
397 // Store the value in the accumulator into the global with name in constant pool
398 // entry <name_index> using FeedBackVector slot <slot> in strict mode.
399 void Interpreter::DoStaGlobalStrictWide(InterpreterAssembler* assembler) {
400 Callable ic =
401 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
402 DoStoreGlobal(ic, assembler);
403 }
404
405
406 // LdaContextSlot <context> <slot_index> 385 // LdaContextSlot <context> <slot_index>
407 // 386 //
408 // Load the object in |slot_index| of |context| into the accumulator. 387 // Load the object in |slot_index| of |context| into the accumulator.
409 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) { 388 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) {
410 Node* reg_index = __ BytecodeOperandReg(0); 389 Node* reg_index = __ BytecodeOperandReg(0);
411 Node* context = __ LoadRegister(reg_index); 390 Node* context = __ LoadRegister(reg_index);
412 Node* slot_index = __ BytecodeOperandIdx(1); 391 Node* slot_index = __ BytecodeOperandIdx(1);
413 Node* result = __ LoadContextSlot(context, slot_index); 392 Node* result = __ LoadContextSlot(context, slot_index);
414 __ SetAccumulator(result); 393 __ SetAccumulator(result);
415 __ Dispatch(); 394 __ Dispatch();
416 } 395 }
417 396
418
419 // LdaContextSlotWide <context> <slot_index>
420 //
421 // Load the object in |slot_index| of |context| into the accumulator.
422 void Interpreter::DoLdaContextSlotWide(InterpreterAssembler* assembler) {
423 DoLdaContextSlot(assembler);
424 }
425
426
427 // StaContextSlot <context> <slot_index> 397 // StaContextSlot <context> <slot_index>
428 // 398 //
429 // Stores the object in the accumulator into |slot_index| of |context|. 399 // Stores the object in the accumulator into |slot_index| of |context|.
430 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) { 400 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) {
431 Node* value = __ GetAccumulator(); 401 Node* value = __ GetAccumulator();
432 Node* reg_index = __ BytecodeOperandReg(0); 402 Node* reg_index = __ BytecodeOperandReg(0);
433 Node* context = __ LoadRegister(reg_index); 403 Node* context = __ LoadRegister(reg_index);
434 Node* slot_index = __ BytecodeOperandIdx(1); 404 Node* slot_index = __ BytecodeOperandIdx(1);
435 __ StoreContextSlot(context, slot_index, value); 405 __ StoreContextSlot(context, slot_index, value);
436 __ Dispatch(); 406 __ Dispatch();
437 } 407 }
438 408
439
440 // StaContextSlot <context> <slot_index>
441 //
442 // Stores the object in the accumulator into |slot_index| of |context|.
443 void Interpreter::DoStaContextSlotWide(InterpreterAssembler* assembler) {
444 DoStaContextSlot(assembler);
445 }
446
447 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id, 409 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id,
448 InterpreterAssembler* assembler) { 410 InterpreterAssembler* assembler) {
449 Node* index = __ BytecodeOperandIdx(0); 411 Node* index = __ BytecodeOperandIdx(0);
450 Node* name = __ LoadConstantPoolEntry(index); 412 Node* name = __ LoadConstantPoolEntry(index);
451 Node* context = __ GetContext(); 413 Node* context = __ GetContext();
452 Node* result = __ CallRuntime(function_id, context, name); 414 Node* result = __ CallRuntime(function_id, context, name);
453 __ SetAccumulator(result); 415 __ SetAccumulator(result);
454 __ Dispatch(); 416 __ Dispatch();
455 } 417 }
456 418
457
458 // LdaLookupSlot <name_index> 419 // LdaLookupSlot <name_index>
459 // 420 //
460 // Lookup the object with the name in constant pool entry |name_index| 421 // Lookup the object with the name in constant pool entry |name_index|
461 // dynamically. 422 // dynamically.
462 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { 423 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) {
463 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler); 424 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler);
464 } 425 }
465 426
466
467 // LdaLookupSlotInsideTypeof <name_index> 427 // LdaLookupSlotInsideTypeof <name_index>
468 // 428 //
469 // Lookup the object with the name in constant pool entry |name_index| 429 // Lookup the object with the name in constant pool entry |name_index|
470 // dynamically without causing a NoReferenceError. 430 // dynamically without causing a NoReferenceError.
471 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) { 431 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) {
472 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); 432 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler);
473 } 433 }
474 434
475
476 // LdaLookupSlotWide <name_index>
477 //
478 // Lookup the object with the name in constant pool entry |name_index|
479 // dynamically.
480 void Interpreter::DoLdaLookupSlotWide(InterpreterAssembler* assembler) {
481 DoLdaLookupSlot(assembler);
482 }
483
484
485 // LdaLookupSlotInsideTypeofWide <name_index>
486 //
487 // Lookup the object with the name in constant pool entry |name_index|
488 // dynamically without causing a NoReferenceError.
489 void Interpreter::DoLdaLookupSlotInsideTypeofWide(
490 InterpreterAssembler* assembler) {
491 DoLdaLookupSlotInsideTypeof(assembler);
492 }
493
494 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode, 435 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode,
495 InterpreterAssembler* assembler) { 436 InterpreterAssembler* assembler) {
496 Node* value = __ GetAccumulator(); 437 Node* value = __ GetAccumulator();
497 Node* index = __ BytecodeOperandIdx(0); 438 Node* index = __ BytecodeOperandIdx(0);
498 Node* name = __ LoadConstantPoolEntry(index); 439 Node* name = __ LoadConstantPoolEntry(index);
499 Node* context = __ GetContext(); 440 Node* context = __ GetContext();
500 Node* result = __ CallRuntime(is_strict(language_mode) 441 Node* result = __ CallRuntime(is_strict(language_mode)
501 ? Runtime::kStoreLookupSlot_Strict 442 ? Runtime::kStoreLookupSlot_Strict
502 : Runtime::kStoreLookupSlot_Sloppy, 443 : Runtime::kStoreLookupSlot_Sloppy,
503 context, name, value); 444 context, name, value);
504 __ SetAccumulator(result); 445 __ SetAccumulator(result);
505 __ Dispatch(); 446 __ Dispatch();
506 } 447 }
507 448
508
509 // StaLookupSlotSloppy <name_index> 449 // StaLookupSlotSloppy <name_index>
510 // 450 //
511 // Store the object in accumulator to the object with the name in constant 451 // Store the object in accumulator to the object with the name in constant
512 // pool entry |name_index| in sloppy mode. 452 // pool entry |name_index| in sloppy mode.
513 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) { 453 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) {
514 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler); 454 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler);
515 } 455 }
516 456
517 457
518 // StaLookupSlotStrict <name_index> 458 // StaLookupSlotStrict <name_index>
519 // 459 //
520 // Store the object in accumulator to the object with the name in constant 460 // Store the object in accumulator to the object with the name in constant
521 // pool entry |name_index| in strict mode. 461 // pool entry |name_index| in strict mode.
522 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) { 462 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) {
523 DoStoreLookupSlot(LanguageMode::STRICT, assembler); 463 DoStoreLookupSlot(LanguageMode::STRICT, assembler);
524 } 464 }
525 465
526
527 // StaLookupSlotSloppyWide <name_index>
528 //
529 // Store the object in accumulator to the object with the name in constant
530 // pool entry |name_index| in sloppy mode.
531 void Interpreter::DoStaLookupSlotSloppyWide(InterpreterAssembler* assembler) {
532 DoStaLookupSlotSloppy(assembler);
533 }
534
535
536 // StaLookupSlotStrictWide <name_index>
537 //
538 // Store the object in accumulator to the object with the name in constant
539 // pool entry |name_index| in strict mode.
540 void Interpreter::DoStaLookupSlotStrictWide(InterpreterAssembler* assembler) {
541 DoStaLookupSlotStrict(assembler);
542 }
543
544 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) { 466 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) {
545 Node* code_target = __ HeapConstant(ic.code()); 467 Node* code_target = __ HeapConstant(ic.code());
546 Node* register_index = __ BytecodeOperandReg(0); 468 Node* register_index = __ BytecodeOperandReg(0);
547 Node* object = __ LoadRegister(register_index); 469 Node* object = __ LoadRegister(register_index);
548 Node* constant_index = __ BytecodeOperandIdx(1); 470 Node* constant_index = __ BytecodeOperandIdx(1);
549 Node* name = __ LoadConstantPoolEntry(constant_index); 471 Node* name = __ LoadConstantPoolEntry(constant_index);
550 Node* raw_slot = __ BytecodeOperandIdx(2); 472 Node* raw_slot = __ BytecodeOperandIdx(2);
551 Node* smi_slot = __ SmiTag(raw_slot); 473 Node* smi_slot = __ SmiTag(raw_slot);
552 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 474 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
553 Node* context = __ GetContext(); 475 Node* context = __ GetContext();
554 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, 476 Node* result = __ CallStub(ic.descriptor(), code_target, context, object,
555 name, smi_slot, type_feedback_vector); 477 name, smi_slot, type_feedback_vector);
556 __ SetAccumulator(result); 478 __ SetAccumulator(result);
557 __ Dispatch(); 479 __ Dispatch();
558 } 480 }
559 481
560 // LoadIC <object> <name_index> <slot> 482 // LoadIC <object> <name_index> <slot>
561 // 483 //
562 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at 484 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at
563 // constant pool entry <name_index>. 485 // constant pool entry <name_index>.
564 void Interpreter::DoLoadIC(InterpreterAssembler* assembler) { 486 void Interpreter::DoLoadIC(InterpreterAssembler* assembler) {
565 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, 487 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
566 UNINITIALIZED); 488 UNINITIALIZED);
567 DoLoadIC(ic, assembler); 489 DoLoadIC(ic, assembler);
568 } 490 }
569 491
570 // LoadICWide <object> <name_index> <slot>
571 //
572 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at
573 // constant pool entry <name_index>.
574 void Interpreter::DoLoadICWide(InterpreterAssembler* assembler) {
575 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
576 UNINITIALIZED);
577 DoLoadIC(ic, assembler);
578 }
579
580
581 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) { 492 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) {
582 Node* code_target = __ HeapConstant(ic.code()); 493 Node* code_target = __ HeapConstant(ic.code());
583 Node* reg_index = __ BytecodeOperandReg(0); 494 Node* reg_index = __ BytecodeOperandReg(0);
584 Node* object = __ LoadRegister(reg_index); 495 Node* object = __ LoadRegister(reg_index);
585 Node* name = __ GetAccumulator(); 496 Node* name = __ GetAccumulator();
586 Node* raw_slot = __ BytecodeOperandIdx(1); 497 Node* raw_slot = __ BytecodeOperandIdx(1);
587 Node* smi_slot = __ SmiTag(raw_slot); 498 Node* smi_slot = __ SmiTag(raw_slot);
588 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 499 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
589 Node* context = __ GetContext(); 500 Node* context = __ GetContext();
590 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, 501 Node* result = __ CallStub(ic.descriptor(), code_target, context, object,
591 name, smi_slot, type_feedback_vector); 502 name, smi_slot, type_feedback_vector);
592 __ SetAccumulator(result); 503 __ SetAccumulator(result);
593 __ Dispatch(); 504 __ Dispatch();
594 } 505 }
595 506
596 // KeyedLoadIC <object> <slot> 507 // KeyedLoadIC <object> <slot>
597 // 508 //
598 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key 509 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key
599 // in the accumulator. 510 // in the accumulator.
600 void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) { 511 void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) {
601 Callable ic = 512 Callable ic =
602 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED); 513 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED);
603 DoKeyedLoadIC(ic, assembler); 514 DoKeyedLoadIC(ic, assembler);
604 } 515 }
605 516
606 // KeyedLoadICWide <object> <slot>
607 //
608 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key
609 // in the accumulator.
610 void Interpreter::DoKeyedLoadICWide(InterpreterAssembler* assembler) {
611 Callable ic =
612 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED);
613 DoKeyedLoadIC(ic, assembler);
614 }
615
616
617 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) { 517 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) {
618 Node* code_target = __ HeapConstant(ic.code()); 518 Node* code_target = __ HeapConstant(ic.code());
619 Node* object_reg_index = __ BytecodeOperandReg(0); 519 Node* object_reg_index = __ BytecodeOperandReg(0);
620 Node* object = __ LoadRegister(object_reg_index); 520 Node* object = __ LoadRegister(object_reg_index);
621 Node* constant_index = __ BytecodeOperandIdx(1); 521 Node* constant_index = __ BytecodeOperandIdx(1);
622 Node* name = __ LoadConstantPoolEntry(constant_index); 522 Node* name = __ LoadConstantPoolEntry(constant_index);
623 Node* value = __ GetAccumulator(); 523 Node* value = __ GetAccumulator();
624 Node* raw_slot = __ BytecodeOperandIdx(2); 524 Node* raw_slot = __ BytecodeOperandIdx(2);
625 Node* smi_slot = __ SmiTag(raw_slot); 525 Node* smi_slot = __ SmiTag(raw_slot);
626 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 526 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
(...skipping 20 matching lines...) Expand all
647 // 547 //
648 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and 548 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
649 // the name in constant pool entry <name_index> with the value in the 549 // the name in constant pool entry <name_index> with the value in the
650 // accumulator. 550 // accumulator.
651 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) { 551 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) {
652 Callable ic = 552 Callable ic =
653 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 553 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
654 DoStoreIC(ic, assembler); 554 DoStoreIC(ic, assembler);
655 } 555 }
656 556
657
658 // StoreICSloppyWide <object> <name_index> <slot>
659 //
660 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and
661 // the name in constant pool entry <name_index> with the value in the
662 // accumulator.
663 void Interpreter::DoStoreICSloppyWide(InterpreterAssembler* assembler) {
664 Callable ic =
665 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
666 DoStoreIC(ic, assembler);
667 }
668
669
670 // StoreICStrictWide <object> <name_index> <slot>
671 //
672 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
673 // the name in constant pool entry <name_index> with the value in the
674 // accumulator.
675 void Interpreter::DoStoreICStrictWide(InterpreterAssembler* assembler) {
676 Callable ic =
677 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
678 DoStoreIC(ic, assembler);
679 }
680
681 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) { 557 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) {
682 Node* code_target = __ HeapConstant(ic.code()); 558 Node* code_target = __ HeapConstant(ic.code());
683 Node* object_reg_index = __ BytecodeOperandReg(0); 559 Node* object_reg_index = __ BytecodeOperandReg(0);
684 Node* object = __ LoadRegister(object_reg_index); 560 Node* object = __ LoadRegister(object_reg_index);
685 Node* name_reg_index = __ BytecodeOperandReg(1); 561 Node* name_reg_index = __ BytecodeOperandReg(1);
686 Node* name = __ LoadRegister(name_reg_index); 562 Node* name = __ LoadRegister(name_reg_index);
687 Node* value = __ GetAccumulator(); 563 Node* value = __ GetAccumulator();
688 Node* raw_slot = __ BytecodeOperandIdx(2); 564 Node* raw_slot = __ BytecodeOperandIdx(2);
689 Node* smi_slot = __ SmiTag(raw_slot); 565 Node* smi_slot = __ SmiTag(raw_slot);
690 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 566 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
(...skipping 18 matching lines...) Expand all
709 // KeyedStoreICStore <object> <key> <slot> 585 // KeyedStoreICStore <object> <key> <slot>
710 // 586 //
711 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> 587 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
712 // and the key <key> with the value in the accumulator. 588 // and the key <key> with the value in the accumulator.
713 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) { 589 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) {
714 Callable ic = 590 Callable ic =
715 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 591 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
716 DoKeyedStoreIC(ic, assembler); 592 DoKeyedStoreIC(ic, assembler);
717 } 593 }
718 594
719
720 // KeyedStoreICSloppyWide <object> <key> <slot>
721 //
722 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object>
723 // and the key <key> with the value in the accumulator.
724 void Interpreter::DoKeyedStoreICSloppyWide(InterpreterAssembler* assembler) {
725 Callable ic =
726 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
727 DoKeyedStoreIC(ic, assembler);
728 }
729
730
731 // KeyedStoreICStoreWide <object> <key> <slot>
732 //
733 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
734 // and the key <key> with the value in the accumulator.
735 void Interpreter::DoKeyedStoreICStrictWide(InterpreterAssembler* assembler) {
736 Callable ic =
737 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
738 DoKeyedStoreIC(ic, assembler);
739 }
740
741 // PushContext <context> 595 // PushContext <context>
742 // 596 //
743 // Saves the current context in <context>, and pushes the accumulator as the 597 // Saves the current context in <context>, and pushes the accumulator as the
744 // new current context. 598 // new current context.
745 void Interpreter::DoPushContext(InterpreterAssembler* assembler) { 599 void Interpreter::DoPushContext(InterpreterAssembler* assembler) {
746 Node* reg_index = __ BytecodeOperandReg(0); 600 Node* reg_index = __ BytecodeOperandReg(0);
747 Node* new_context = __ GetAccumulator(); 601 Node* new_context = __ GetAccumulator();
748 Node* old_context = __ GetContext(); 602 Node* old_context = __ GetContext();
749 __ StoreRegister(old_context, reg_index); 603 __ StoreRegister(old_context, reg_index);
750 __ SetContext(new_context); 604 __ SetContext(new_context);
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
1004 858
1005 859
1006 // Call <callable> <receiver> <arg_count> 860 // Call <callable> <receiver> <arg_count>
1007 // 861 //
1008 // Call a JSfunction or Callable in |callable| with the |receiver| and 862 // Call a JSfunction or Callable in |callable| with the |receiver| and
1009 // |arg_count| arguments in subsequent registers. 863 // |arg_count| arguments in subsequent registers.
1010 void Interpreter::DoCall(InterpreterAssembler* assembler) { 864 void Interpreter::DoCall(InterpreterAssembler* assembler) {
1011 DoJSCall(assembler, TailCallMode::kDisallow); 865 DoJSCall(assembler, TailCallMode::kDisallow);
1012 } 866 }
1013 867
1014
1015 // CallWide <callable> <receiver> <arg_count>
1016 //
1017 // Call a JSfunction or Callable in |callable| with the |receiver| and
1018 // |arg_count| arguments in subsequent registers.
1019 void Interpreter::DoCallWide(InterpreterAssembler* assembler) {
1020 DoJSCall(assembler, TailCallMode::kDisallow);
1021 }
1022
1023 // TailCall <callable> <receiver> <arg_count> 868 // TailCall <callable> <receiver> <arg_count>
1024 // 869 //
1025 // Tail call a JSfunction or Callable in |callable| with the |receiver| and 870 // Tail call a JSfunction or Callable in |callable| with the |receiver| and
1026 // |arg_count| arguments in subsequent registers. 871 // |arg_count| arguments in subsequent registers.
1027 void Interpreter::DoTailCall(InterpreterAssembler* assembler) { 872 void Interpreter::DoTailCall(InterpreterAssembler* assembler) {
1028 DoJSCall(assembler, TailCallMode::kAllow); 873 DoJSCall(assembler, TailCallMode::kAllow);
1029 } 874 }
1030 875
1031 // TailCallWide <callable> <receiver> <arg_count>
1032 //
1033 // Tail call a JSfunction or Callable in |callable| with the |receiver| and
1034 // |arg_count| arguments in subsequent registers.
1035 void Interpreter::DoTailCallWide(InterpreterAssembler* assembler) {
1036 DoJSCall(assembler, TailCallMode::kAllow);
1037 }
1038
1039 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) { 876 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) {
1040 Node* function_id = __ BytecodeOperandIdx(0); 877 Node* function_id = __ BytecodeOperandRuntimeId(0);
1041 Node* first_arg_reg = __ BytecodeOperandReg(1); 878 Node* first_arg_reg = __ BytecodeOperandReg(1);
1042 Node* first_arg = __ RegisterLocation(first_arg_reg); 879 Node* first_arg = __ RegisterLocation(first_arg_reg);
1043 Node* args_count = __ BytecodeOperandCount(2); 880 Node* args_count = __ BytecodeOperandCount(2);
1044 Node* context = __ GetContext(); 881 Node* context = __ GetContext();
1045 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); 882 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count);
1046 __ SetAccumulator(result); 883 __ SetAccumulator(result);
1047 __ Dispatch(); 884 __ Dispatch();
1048 } 885 }
1049 886
1050 887
1051 // CallRuntime <function_id> <first_arg> <arg_count> 888 // CallRuntime <function_id> <first_arg> <arg_count>
1052 // 889 //
1053 // Call the runtime function |function_id| with the first argument in 890 // Call the runtime function |function_id| with the first argument in
1054 // register |first_arg| and |arg_count| arguments in subsequent 891 // register |first_arg| and |arg_count| arguments in subsequent
1055 // registers. 892 // registers.
1056 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) { 893 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) {
1057 DoCallRuntimeCommon(assembler); 894 DoCallRuntimeCommon(assembler);
1058 } 895 }
1059 896
1060
1061 // CallRuntime <function_id> <first_arg> <arg_count>
1062 //
1063 // Call the runtime function |function_id| with the first argument in
1064 // register |first_arg| and |arg_count| arguments in subsequent
1065 // registers.
1066 void Interpreter::DoCallRuntimeWide(InterpreterAssembler* assembler) {
1067 DoCallRuntimeCommon(assembler);
1068 }
1069
1070 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) { 897 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) {
1071 // Call the runtime function. 898 // Call the runtime function.
1072 Node* function_id = __ BytecodeOperandIdx(0); 899 Node* function_id = __ BytecodeOperandRuntimeId(0);
1073 Node* first_arg_reg = __ BytecodeOperandReg(1); 900 Node* first_arg_reg = __ BytecodeOperandReg(1);
1074 Node* first_arg = __ RegisterLocation(first_arg_reg); 901 Node* first_arg = __ RegisterLocation(first_arg_reg);
1075 Node* args_count = __ BytecodeOperandCount(2); 902 Node* args_count = __ BytecodeOperandCount(2);
1076 Node* context = __ GetContext(); 903 Node* context = __ GetContext();
1077 Node* result_pair = 904 Node* result_pair =
1078 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); 905 __ CallRuntimeN(function_id, context, first_arg, args_count, 2);
1079 906
1080 // Store the results in <first_return> and <first_return + 1> 907 // Store the results in <first_return> and <first_return + 1>
1081 Node* first_return_reg = __ BytecodeOperandReg(3); 908 Node* first_return_reg = __ BytecodeOperandReg(3);
1082 Node* second_return_reg = __ NextRegister(first_return_reg); 909 Node* second_return_reg = __ NextRegister(first_return_reg);
1083 Node* result0 = __ Projection(0, result_pair); 910 Node* result0 = __ Projection(0, result_pair);
1084 Node* result1 = __ Projection(1, result_pair); 911 Node* result1 = __ Projection(1, result_pair);
1085 __ StoreRegister(result0, first_return_reg); 912 __ StoreRegister(result0, first_return_reg);
1086 __ StoreRegister(result1, second_return_reg); 913 __ StoreRegister(result1, second_return_reg);
1087 __ Dispatch(); 914 __ Dispatch();
1088 } 915 }
1089 916
1090 917
1091 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> 918 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return>
1092 // 919 //
1093 // Call the runtime function |function_id| which returns a pair, with the 920 // Call the runtime function |function_id| which returns a pair, with the
1094 // first argument in register |first_arg| and |arg_count| arguments in 921 // first argument in register |first_arg| and |arg_count| arguments in
1095 // subsequent registers. Returns the result in <first_return> and 922 // subsequent registers. Returns the result in <first_return> and
1096 // <first_return + 1> 923 // <first_return + 1>
1097 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) { 924 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) {
1098 DoCallRuntimeForPairCommon(assembler); 925 DoCallRuntimeForPairCommon(assembler);
1099 } 926 }
1100 927
1101
1102 // CallRuntimeForPairWide <function_id> <first_arg> <arg_count> <first_return>
1103 //
1104 // Call the runtime function |function_id| which returns a pair, with the
1105 // first argument in register |first_arg| and |arg_count| arguments in
1106 // subsequent registers. Returns the result in <first_return> and
1107 // <first_return + 1>
1108 void Interpreter::DoCallRuntimeForPairWide(InterpreterAssembler* assembler) {
1109 DoCallRuntimeForPairCommon(assembler);
1110 }
1111
1112 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) { 928 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) {
1113 Node* context_index = __ BytecodeOperandIdx(0); 929 Node* context_index = __ BytecodeOperandIdx(0);
1114 Node* receiver_reg = __ BytecodeOperandReg(1); 930 Node* receiver_reg = __ BytecodeOperandReg(1);
1115 Node* first_arg = __ RegisterLocation(receiver_reg); 931 Node* first_arg = __ RegisterLocation(receiver_reg);
1116 Node* receiver_args_count = __ BytecodeOperandCount(2); 932 Node* receiver_args_count = __ BytecodeOperandCount(2);
1117 Node* receiver_count = __ Int32Constant(1); 933 Node* receiver_count = __ Int32Constant(1);
1118 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); 934 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count);
1119 935
1120 // Get the function to call from the native context. 936 // Get the function to call from the native context.
1121 Node* context = __ GetContext(); 937 Node* context = __ GetContext();
(...skipping 10 matching lines...) Expand all
1132 948
1133 949
1134 // CallJSRuntime <context_index> <receiver> <arg_count> 950 // CallJSRuntime <context_index> <receiver> <arg_count>
1135 // 951 //
1136 // Call the JS runtime function that has the |context_index| with the receiver 952 // Call the JS runtime function that has the |context_index| with the receiver
1137 // in register |receiver| and |arg_count| arguments in subsequent registers. 953 // in register |receiver| and |arg_count| arguments in subsequent registers.
1138 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { 954 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) {
1139 DoCallJSRuntimeCommon(assembler); 955 DoCallJSRuntimeCommon(assembler);
1140 } 956 }
1141 957
1142
1143 // CallJSRuntimeWide <context_index> <receiver> <arg_count>
1144 //
1145 // Call the JS runtime function that has the |context_index| with the receiver
1146 // in register |receiver| and |arg_count| arguments in subsequent registers.
1147 void Interpreter::DoCallJSRuntimeWide(InterpreterAssembler* assembler) {
1148 DoCallJSRuntimeCommon(assembler);
1149 }
1150
1151 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) { 958 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) {
1152 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); 959 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_);
1153 Node* new_target = __ GetAccumulator(); 960 Node* new_target = __ GetAccumulator();
1154 Node* constructor_reg = __ BytecodeOperandReg(0); 961 Node* constructor_reg = __ BytecodeOperandReg(0);
1155 Node* constructor = __ LoadRegister(constructor_reg); 962 Node* constructor = __ LoadRegister(constructor_reg);
1156 Node* first_arg_reg = __ BytecodeOperandReg(1); 963 Node* first_arg_reg = __ BytecodeOperandReg(1);
1157 Node* first_arg = __ RegisterLocation(first_arg_reg); 964 Node* first_arg = __ RegisterLocation(first_arg_reg);
1158 Node* args_count = __ BytecodeOperandCount(2); 965 Node* args_count = __ BytecodeOperandCount(2);
1159 Node* context = __ GetContext(); 966 Node* context = __ GetContext();
1160 Node* result = 967 Node* result =
1161 __ CallConstruct(constructor, context, new_target, first_arg, args_count); 968 __ CallConstruct(constructor, context, new_target, first_arg, args_count);
1162 __ SetAccumulator(result); 969 __ SetAccumulator(result);
1163 __ Dispatch(); 970 __ Dispatch();
1164 } 971 }
1165 972
1166 973
1167 // New <constructor> <first_arg> <arg_count> 974 // New <constructor> <first_arg> <arg_count>
1168 // 975 //
1169 // Call operator new with |constructor| and the first argument in 976 // Call operator new with |constructor| and the first argument in
1170 // register |first_arg| and |arg_count| arguments in subsequent 977 // register |first_arg| and |arg_count| arguments in subsequent
1171 // registers. The new.target is in the accumulator. 978 // registers. The new.target is in the accumulator.
1172 // 979 //
1173 void Interpreter::DoNew(InterpreterAssembler* assembler) { 980 void Interpreter::DoNew(InterpreterAssembler* assembler) {
1174 DoCallConstruct(assembler); 981 DoCallConstruct(assembler);
1175 } 982 }
1176 983
1177
1178 // NewWide <constructor> <first_arg> <arg_count>
1179 //
1180 // Call operator new with |constructor| and the first argument in
1181 // register |first_arg| and |arg_count| arguments in subsequent
1182 // registers. The new.target is in the accumulator.
1183 //
1184 void Interpreter::DoNewWide(InterpreterAssembler* assembler) {
1185 DoCallConstruct(assembler);
1186 }
1187
1188
1189 // TestEqual <src> 984 // TestEqual <src>
1190 // 985 //
1191 // Test if the value in the <src> register equals the accumulator. 986 // Test if the value in the <src> register equals the accumulator.
1192 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) { 987 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) {
1193 DoBinaryOp(CodeFactory::Equal(isolate_), assembler); 988 DoBinaryOp(CodeFactory::Equal(isolate_), assembler);
1194 } 989 }
1195 990
1196 991
1197 // TestNotEqual <src> 992 // TestNotEqual <src>
1198 // 993 //
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
1288 } 1083 }
1289 1084
1290 1085
1291 // ToObject 1086 // ToObject
1292 // 1087 //
1293 // Cast the object referenced by the accumulator to a JSObject. 1088 // Cast the object referenced by the accumulator to a JSObject.
1294 void Interpreter::DoToObject(InterpreterAssembler* assembler) { 1089 void Interpreter::DoToObject(InterpreterAssembler* assembler) {
1295 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler); 1090 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler);
1296 } 1091 }
1297 1092
1298 1093 // Jump <imm>
1299 // Jump <imm8>
1300 // 1094 //
1301 // Jump by number of bytes represented by the immediate operand |imm8|. 1095 // Jump by number of bytes represented by the immediate operand |imm|.
1302 void Interpreter::DoJump(InterpreterAssembler* assembler) { 1096 void Interpreter::DoJump(InterpreterAssembler* assembler) {
1303 Node* relative_jump = __ BytecodeOperandImm(0); 1097 Node* relative_jump = __ BytecodeOperandImm(0);
1304 __ Jump(relative_jump); 1098 __ Jump(relative_jump);
1305 } 1099 }
1306 1100
1307 1101 // JumpConstant <idx>
1308 // JumpConstant <idx8>
1309 // 1102 //
1310 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool. 1103 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool.
1311 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) { 1104 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) {
1312 Node* index = __ BytecodeOperandIdx(0); 1105 Node* index = __ BytecodeOperandIdx(0);
1313 Node* constant = __ LoadConstantPoolEntry(index); 1106 Node* constant = __ LoadConstantPoolEntry(index);
1314 Node* relative_jump = __ SmiUntag(constant); 1107 Node* relative_jump = __ SmiUntag(constant);
1315 __ Jump(relative_jump); 1108 __ Jump(relative_jump);
1316 } 1109 }
1317 1110
1318 1111 // JumpIfTrue <imm>
1319 // JumpConstantWide <idx16>
1320 //
1321 // Jump by number of bytes in the Smi in the |idx16| entry in the
1322 // constant pool.
1323 void Interpreter::DoJumpConstantWide(InterpreterAssembler* assembler) {
1324 DoJumpConstant(assembler);
1325 }
1326
1327
1328 // JumpIfTrue <imm8>
1329 // 1112 //
1330 // Jump by number of bytes represented by an immediate operand if the 1113 // Jump by number of bytes represented by an immediate operand if the
1331 // accumulator contains true. 1114 // accumulator contains true.
1332 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) { 1115 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) {
1333 Node* accumulator = __ GetAccumulator(); 1116 Node* accumulator = __ GetAccumulator();
1334 Node* relative_jump = __ BytecodeOperandImm(0); 1117 Node* relative_jump = __ BytecodeOperandImm(0);
1335 Node* true_value = __ BooleanConstant(true); 1118 Node* true_value = __ BooleanConstant(true);
1336 __ JumpIfWordEqual(accumulator, true_value, relative_jump); 1119 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
1337 } 1120 }
1338 1121
1339 1122 // JumpIfTrueConstant <idx>
1340 // JumpIfTrueConstant <idx8>
1341 // 1123 //
1342 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1124 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1343 // if the accumulator contains true. 1125 // if the accumulator contains true.
1344 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) { 1126 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) {
1345 Node* accumulator = __ GetAccumulator(); 1127 Node* accumulator = __ GetAccumulator();
1346 Node* index = __ BytecodeOperandIdx(0); 1128 Node* index = __ BytecodeOperandIdx(0);
1347 Node* constant = __ LoadConstantPoolEntry(index); 1129 Node* constant = __ LoadConstantPoolEntry(index);
1348 Node* relative_jump = __ SmiUntag(constant); 1130 Node* relative_jump = __ SmiUntag(constant);
1349 Node* true_value = __ BooleanConstant(true); 1131 Node* true_value = __ BooleanConstant(true);
1350 __ JumpIfWordEqual(accumulator, true_value, relative_jump); 1132 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
1351 } 1133 }
1352 1134
1353 1135 // JumpIfFalse <imm>
1354 // JumpIfTrueConstantWide <idx16>
1355 //
1356 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1357 // if the accumulator contains true.
1358 void Interpreter::DoJumpIfTrueConstantWide(InterpreterAssembler* assembler) {
1359 DoJumpIfTrueConstant(assembler);
1360 }
1361
1362
1363 // JumpIfFalse <imm8>
1364 // 1136 //
1365 // Jump by number of bytes represented by an immediate operand if the 1137 // Jump by number of bytes represented by an immediate operand if the
1366 // accumulator contains false. 1138 // accumulator contains false.
1367 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) { 1139 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) {
1368 Node* accumulator = __ GetAccumulator(); 1140 Node* accumulator = __ GetAccumulator();
1369 Node* relative_jump = __ BytecodeOperandImm(0); 1141 Node* relative_jump = __ BytecodeOperandImm(0);
1370 Node* false_value = __ BooleanConstant(false); 1142 Node* false_value = __ BooleanConstant(false);
1371 __ JumpIfWordEqual(accumulator, false_value, relative_jump); 1143 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
1372 } 1144 }
1373 1145
1374 1146 // JumpIfFalseConstant <idx>
1375 // JumpIfFalseConstant <idx8>
1376 // 1147 //
1377 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1148 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1378 // if the accumulator contains false. 1149 // if the accumulator contains false.
1379 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) { 1150 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) {
1380 Node* accumulator = __ GetAccumulator(); 1151 Node* accumulator = __ GetAccumulator();
1381 Node* index = __ BytecodeOperandIdx(0); 1152 Node* index = __ BytecodeOperandIdx(0);
1382 Node* constant = __ LoadConstantPoolEntry(index); 1153 Node* constant = __ LoadConstantPoolEntry(index);
1383 Node* relative_jump = __ SmiUntag(constant); 1154 Node* relative_jump = __ SmiUntag(constant);
1384 Node* false_value = __ BooleanConstant(false); 1155 Node* false_value = __ BooleanConstant(false);
1385 __ JumpIfWordEqual(accumulator, false_value, relative_jump); 1156 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
1386 } 1157 }
1387 1158
1388 1159 // JumpIfToBooleanTrue <imm>
1389 // JumpIfFalseConstant <idx16>
1390 //
1391 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1392 // if the accumulator contains false.
1393 void Interpreter::DoJumpIfFalseConstantWide(InterpreterAssembler* assembler) {
1394 DoJumpIfFalseConstant(assembler);
1395 }
1396
1397
1398 // JumpIfToBooleanTrue <imm8>
1399 // 1160 //
1400 // Jump by number of bytes represented by an immediate operand if the object 1161 // Jump by number of bytes represented by an immediate operand if the object
1401 // referenced by the accumulator is true when the object is cast to boolean. 1162 // referenced by the accumulator is true when the object is cast to boolean.
1402 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) { 1163 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) {
1403 Callable callable = CodeFactory::ToBoolean(isolate_); 1164 Callable callable = CodeFactory::ToBoolean(isolate_);
1404 Node* target = __ HeapConstant(callable.code()); 1165 Node* target = __ HeapConstant(callable.code());
1405 Node* accumulator = __ GetAccumulator(); 1166 Node* accumulator = __ GetAccumulator();
1406 Node* context = __ GetContext(); 1167 Node* context = __ GetContext();
1407 Node* to_boolean_value = 1168 Node* to_boolean_value =
1408 __ CallStub(callable.descriptor(), target, context, accumulator); 1169 __ CallStub(callable.descriptor(), target, context, accumulator);
1409 Node* relative_jump = __ BytecodeOperandImm(0); 1170 Node* relative_jump = __ BytecodeOperandImm(0);
1410 Node* true_value = __ BooleanConstant(true); 1171 Node* true_value = __ BooleanConstant(true);
1411 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); 1172 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
1412 } 1173 }
1413 1174
1414 1175 // JumpIfToBooleanTrueConstant <idx>
1415 // JumpIfToBooleanTrueConstant <idx8>
1416 // 1176 //
1417 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1177 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1418 // if the object referenced by the accumulator is true when the object is cast 1178 // if the object referenced by the accumulator is true when the object is cast
1419 // to boolean. 1179 // to boolean.
1420 void Interpreter::DoJumpIfToBooleanTrueConstant( 1180 void Interpreter::DoJumpIfToBooleanTrueConstant(
1421 InterpreterAssembler* assembler) { 1181 InterpreterAssembler* assembler) {
1422 Callable callable = CodeFactory::ToBoolean(isolate_); 1182 Callable callable = CodeFactory::ToBoolean(isolate_);
1423 Node* target = __ HeapConstant(callable.code()); 1183 Node* target = __ HeapConstant(callable.code());
1424 Node* accumulator = __ GetAccumulator(); 1184 Node* accumulator = __ GetAccumulator();
1425 Node* context = __ GetContext(); 1185 Node* context = __ GetContext();
1426 Node* to_boolean_value = 1186 Node* to_boolean_value =
1427 __ CallStub(callable.descriptor(), target, context, accumulator); 1187 __ CallStub(callable.descriptor(), target, context, accumulator);
1428 Node* index = __ BytecodeOperandIdx(0); 1188 Node* index = __ BytecodeOperandIdx(0);
1429 Node* constant = __ LoadConstantPoolEntry(index); 1189 Node* constant = __ LoadConstantPoolEntry(index);
1430 Node* relative_jump = __ SmiUntag(constant); 1190 Node* relative_jump = __ SmiUntag(constant);
1431 Node* true_value = __ BooleanConstant(true); 1191 Node* true_value = __ BooleanConstant(true);
1432 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); 1192 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
1433 } 1193 }
1434 1194
1435 1195 // JumpIfToBooleanFalse <imm>
1436 // JumpIfToBooleanTrueConstantWide <idx16>
1437 //
1438 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1439 // if the object referenced by the accumulator is true when the object is cast
1440 // to boolean.
1441 void Interpreter::DoJumpIfToBooleanTrueConstantWide(
1442 InterpreterAssembler* assembler) {
1443 DoJumpIfToBooleanTrueConstant(assembler);
1444 }
1445
1446
1447 // JumpIfToBooleanFalse <imm8>
1448 // 1196 //
1449 // Jump by number of bytes represented by an immediate operand if the object 1197 // Jump by number of bytes represented by an immediate operand if the object
1450 // referenced by the accumulator is false when the object is cast to boolean. 1198 // referenced by the accumulator is false when the object is cast to boolean.
1451 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) { 1199 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) {
1452 Callable callable = CodeFactory::ToBoolean(isolate_); 1200 Callable callable = CodeFactory::ToBoolean(isolate_);
1453 Node* target = __ HeapConstant(callable.code()); 1201 Node* target = __ HeapConstant(callable.code());
1454 Node* accumulator = __ GetAccumulator(); 1202 Node* accumulator = __ GetAccumulator();
1455 Node* context = __ GetContext(); 1203 Node* context = __ GetContext();
1456 Node* to_boolean_value = 1204 Node* to_boolean_value =
1457 __ CallStub(callable.descriptor(), target, context, accumulator); 1205 __ CallStub(callable.descriptor(), target, context, accumulator);
1458 Node* relative_jump = __ BytecodeOperandImm(0); 1206 Node* relative_jump = __ BytecodeOperandImm(0);
1459 Node* false_value = __ BooleanConstant(false); 1207 Node* false_value = __ BooleanConstant(false);
1460 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); 1208 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
1461 } 1209 }
1462 1210
1463 1211 // JumpIfToBooleanFalseConstant <idx>
1464 // JumpIfToBooleanFalseConstant <idx8>
1465 // 1212 //
1466 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1213 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1467 // if the object referenced by the accumulator is false when the object is cast 1214 // if the object referenced by the accumulator is false when the object is cast
1468 // to boolean. 1215 // to boolean.
1469 void Interpreter::DoJumpIfToBooleanFalseConstant( 1216 void Interpreter::DoJumpIfToBooleanFalseConstant(
1470 InterpreterAssembler* assembler) { 1217 InterpreterAssembler* assembler) {
1471 Callable callable = CodeFactory::ToBoolean(isolate_); 1218 Callable callable = CodeFactory::ToBoolean(isolate_);
1472 Node* target = __ HeapConstant(callable.code()); 1219 Node* target = __ HeapConstant(callable.code());
1473 Node* accumulator = __ GetAccumulator(); 1220 Node* accumulator = __ GetAccumulator();
1474 Node* context = __ GetContext(); 1221 Node* context = __ GetContext();
1475 Node* to_boolean_value = 1222 Node* to_boolean_value =
1476 __ CallStub(callable.descriptor(), target, context, accumulator); 1223 __ CallStub(callable.descriptor(), target, context, accumulator);
1477 Node* index = __ BytecodeOperandIdx(0); 1224 Node* index = __ BytecodeOperandIdx(0);
1478 Node* constant = __ LoadConstantPoolEntry(index); 1225 Node* constant = __ LoadConstantPoolEntry(index);
1479 Node* relative_jump = __ SmiUntag(constant); 1226 Node* relative_jump = __ SmiUntag(constant);
1480 Node* false_value = __ BooleanConstant(false); 1227 Node* false_value = __ BooleanConstant(false);
1481 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); 1228 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
1482 } 1229 }
1483 1230
1484 1231 // JumpIfNull <imm>
1485 // JumpIfToBooleanFalseConstantWide <idx16>
1486 //
1487 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1488 // if the object referenced by the accumulator is false when the object is cast
1489 // to boolean.
1490 void Interpreter::DoJumpIfToBooleanFalseConstantWide(
1491 InterpreterAssembler* assembler) {
1492 DoJumpIfToBooleanFalseConstant(assembler);
1493 }
1494
1495
1496 // JumpIfNull <imm8>
1497 // 1232 //
1498 // Jump by number of bytes represented by an immediate operand if the object 1233 // Jump by number of bytes represented by an immediate operand if the object
1499 // referenced by the accumulator is the null constant. 1234 // referenced by the accumulator is the null constant.
1500 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) { 1235 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) {
1501 Node* accumulator = __ GetAccumulator(); 1236 Node* accumulator = __ GetAccumulator();
1502 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); 1237 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1503 Node* relative_jump = __ BytecodeOperandImm(0); 1238 Node* relative_jump = __ BytecodeOperandImm(0);
1504 __ JumpIfWordEqual(accumulator, null_value, relative_jump); 1239 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1505 } 1240 }
1506 1241
1507 1242 // JumpIfNullConstant <idx>
1508 // JumpIfNullConstant <idx8>
1509 // 1243 //
1510 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1244 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1511 // if the object referenced by the accumulator is the null constant. 1245 // if the object referenced by the accumulator is the null constant.
1512 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) { 1246 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) {
1513 Node* accumulator = __ GetAccumulator(); 1247 Node* accumulator = __ GetAccumulator();
1514 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); 1248 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1515 Node* index = __ BytecodeOperandIdx(0); 1249 Node* index = __ BytecodeOperandIdx(0);
1516 Node* constant = __ LoadConstantPoolEntry(index); 1250 Node* constant = __ LoadConstantPoolEntry(index);
1517 Node* relative_jump = __ SmiUntag(constant); 1251 Node* relative_jump = __ SmiUntag(constant);
1518 __ JumpIfWordEqual(accumulator, null_value, relative_jump); 1252 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1519 } 1253 }
1520 1254
1521 1255 // JumpIfUndefined <imm>
1522 // JumpIfNullConstantWide <idx16>
1523 //
1524 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1525 // if the object referenced by the accumulator is the null constant.
1526 void Interpreter::DoJumpIfNullConstantWide(InterpreterAssembler* assembler) {
1527 DoJumpIfNullConstant(assembler);
1528 }
1529
1530 // JumpIfUndefined <imm8>
1531 // 1256 //
1532 // Jump by number of bytes represented by an immediate operand if the object 1257 // Jump by number of bytes represented by an immediate operand if the object
1533 // referenced by the accumulator is the undefined constant. 1258 // referenced by the accumulator is the undefined constant.
1534 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) { 1259 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) {
1535 Node* accumulator = __ GetAccumulator(); 1260 Node* accumulator = __ GetAccumulator();
1536 Node* undefined_value = 1261 Node* undefined_value =
1537 __ HeapConstant(isolate_->factory()->undefined_value()); 1262 __ HeapConstant(isolate_->factory()->undefined_value());
1538 Node* relative_jump = __ BytecodeOperandImm(0); 1263 Node* relative_jump = __ BytecodeOperandImm(0);
1539 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); 1264 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1540 } 1265 }
1541 1266
1542 1267 // JumpIfUndefinedConstant <idx>
1543 // JumpIfUndefinedConstant <idx8>
1544 // 1268 //
1545 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1269 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1546 // if the object referenced by the accumulator is the undefined constant. 1270 // if the object referenced by the accumulator is the undefined constant.
1547 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) { 1271 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) {
1548 Node* accumulator = __ GetAccumulator(); 1272 Node* accumulator = __ GetAccumulator();
1549 Node* undefined_value = 1273 Node* undefined_value =
1550 __ HeapConstant(isolate_->factory()->undefined_value()); 1274 __ HeapConstant(isolate_->factory()->undefined_value());
1551 Node* index = __ BytecodeOperandIdx(0); 1275 Node* index = __ BytecodeOperandIdx(0);
1552 Node* constant = __ LoadConstantPoolEntry(index); 1276 Node* constant = __ LoadConstantPoolEntry(index);
1553 Node* relative_jump = __ SmiUntag(constant); 1277 Node* relative_jump = __ SmiUntag(constant);
1554 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); 1278 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1555 } 1279 }
1556 1280
1557 1281 // JumpIfNotHole <imm>
1558 // JumpIfUndefinedConstantWide <idx16>
1559 //
1560 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1561 // if the object referenced by the accumulator is the undefined constant.
1562 void Interpreter::DoJumpIfUndefinedConstantWide(
1563 InterpreterAssembler* assembler) {
1564 DoJumpIfUndefinedConstant(assembler);
1565 }
1566
1567 // JumpIfNotHole <imm8>
1568 // 1282 //
1569 // Jump by number of bytes represented by an immediate operand if the object 1283 // Jump by number of bytes represented by an immediate operand if the object
1570 // referenced by the accumulator is the hole. 1284 // referenced by the accumulator is the hole.
1571 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) { 1285 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) {
1572 Node* accumulator = __ GetAccumulator(); 1286 Node* accumulator = __ GetAccumulator();
1573 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); 1287 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value());
1574 Node* relative_jump = __ BytecodeOperandImm(0); 1288 Node* relative_jump = __ BytecodeOperandImm(0);
1575 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); 1289 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump);
1576 } 1290 }
1577 1291
1578 // JumpIfNotHoleConstant <idx8> 1292 // JumpIfNotHoleConstant <idx>
1579 // 1293 //
1580 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1294 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1581 // if the object referenced by the accumulator is the hole constant. 1295 // if the object referenced by the accumulator is the hole constant.
1582 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) { 1296 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) {
1583 Node* accumulator = __ GetAccumulator(); 1297 Node* accumulator = __ GetAccumulator();
1584 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); 1298 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value());
1585 Node* index = __ BytecodeOperandIdx(0); 1299 Node* index = __ BytecodeOperandIdx(0);
1586 Node* constant = __ LoadConstantPoolEntry(index); 1300 Node* constant = __ LoadConstantPoolEntry(index);
1587 Node* relative_jump = __ SmiUntag(constant); 1301 Node* relative_jump = __ SmiUntag(constant);
1588 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); 1302 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump);
1589 } 1303 }
1590 1304
1591 // JumpIfNotHoleConstantWide <idx16>
1592 //
1593 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1594 // if the object referenced by the accumulator is the hole constant.
1595 void Interpreter::DoJumpIfNotHoleConstantWide(InterpreterAssembler* assembler) {
1596 DoJumpIfNotHoleConstant(assembler);
1597 }
1598
1599 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, 1305 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id,
1600 InterpreterAssembler* assembler) { 1306 InterpreterAssembler* assembler) {
1601 Node* index = __ BytecodeOperandIdx(0); 1307 Node* index = __ BytecodeOperandIdx(0);
1602 Node* constant_elements = __ LoadConstantPoolEntry(index); 1308 Node* constant_elements = __ LoadConstantPoolEntry(index);
1603 Node* literal_index_raw = __ BytecodeOperandIdx(1); 1309 Node* literal_index_raw = __ BytecodeOperandIdx(1);
1604 Node* literal_index = __ SmiTag(literal_index_raw); 1310 Node* literal_index = __ SmiTag(literal_index_raw);
1605 Node* flags_raw = __ BytecodeOperandImm(2); 1311 Node* flags_raw = __ BytecodeOperandFlag(2);
1606 Node* flags = __ SmiTag(flags_raw); 1312 Node* flags = __ SmiTag(flags_raw);
1607 Node* closure = __ LoadRegister(Register::function_closure()); 1313 Node* closure = __ LoadRegister(Register::function_closure());
1608 Node* context = __ GetContext(); 1314 Node* context = __ GetContext();
1609 Node* result = __ CallRuntime(function_id, context, closure, literal_index, 1315 Node* result = __ CallRuntime(function_id, context, closure, literal_index,
1610 constant_elements, flags); 1316 constant_elements, flags);
1611 __ SetAccumulator(result); 1317 __ SetAccumulator(result);
1612 __ Dispatch(); 1318 __ Dispatch();
1613 } 1319 }
1614 1320
1615 1321
1616 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> 1322 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags>
1617 // 1323 //
1618 // Creates a regular expression literal for literal index <literal_idx> with 1324 // Creates a regular expression literal for literal index <literal_idx> with
1619 // <flags> and the pattern in <pattern_idx>. 1325 // <flags> and the pattern in <pattern_idx>.
1620 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { 1326 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) {
1621 Callable callable = CodeFactory::FastCloneRegExp(isolate_); 1327 Callable callable = CodeFactory::FastCloneRegExp(isolate_);
1622 Node* target = __ HeapConstant(callable.code()); 1328 Node* target = __ HeapConstant(callable.code());
1623 Node* index = __ BytecodeOperandIdx(0); 1329 Node* index = __ BytecodeOperandIdx(0);
1624 Node* pattern = __ LoadConstantPoolEntry(index); 1330 Node* pattern = __ LoadConstantPoolEntry(index);
1625 Node* literal_index_raw = __ BytecodeOperandIdx(1); 1331 Node* literal_index_raw = __ BytecodeOperandIdx(1);
1626 Node* literal_index = __ SmiTag(literal_index_raw); 1332 Node* literal_index = __ SmiTag(literal_index_raw);
1627 Node* flags_raw = __ BytecodeOperandImm(2); 1333 Node* flags_raw = __ BytecodeOperandFlag(2);
1628 Node* flags = __ SmiTag(flags_raw); 1334 Node* flags = __ SmiTag(flags_raw);
1629 Node* closure = __ LoadRegister(Register::function_closure()); 1335 Node* closure = __ LoadRegister(Register::function_closure());
1630 Node* context = __ GetContext(); 1336 Node* context = __ GetContext();
1631 Node* result = __ CallStub(callable.descriptor(), target, context, closure, 1337 Node* result = __ CallStub(callable.descriptor(), target, context, closure,
1632 literal_index, pattern, flags); 1338 literal_index, pattern, flags);
1633 __ SetAccumulator(result); 1339 __ SetAccumulator(result);
1634 __ Dispatch(); 1340 __ Dispatch();
1635 } 1341 }
1636 1342
1637
1638 // CreateRegExpLiteralWide <pattern_idx> <literal_idx> <flags>
1639 //
1640 // Creates a regular expression literal for literal index <literal_idx> with
1641 // <flags> and the pattern in <pattern_idx>.
1642 void Interpreter::DoCreateRegExpLiteralWide(InterpreterAssembler* assembler) {
1643 DoCreateRegExpLiteral(assembler);
1644 }
1645
1646
1647 // CreateArrayLiteral <element_idx> <literal_idx> <flags> 1343 // CreateArrayLiteral <element_idx> <literal_idx> <flags>
1648 // 1344 //
1649 // Creates an array literal for literal index <literal_idx> with flags <flags> 1345 // Creates an array literal for literal index <literal_idx> with flags <flags>
1650 // and constant elements in <element_idx>. 1346 // and constant elements in <element_idx>.
1651 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { 1347 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) {
1652 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); 1348 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler);
1653 } 1349 }
1654 1350
1655
1656 // CreateArrayLiteralWide <element_idx> <literal_idx> <flags>
1657 //
1658 // Creates an array literal for literal index <literal_idx> with flags <flags>
1659 // and constant elements in <element_idx>.
1660 void Interpreter::DoCreateArrayLiteralWide(InterpreterAssembler* assembler) {
1661 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler);
1662 }
1663
1664
1665 // CreateObjectLiteral <element_idx> <literal_idx> <flags> 1351 // CreateObjectLiteral <element_idx> <literal_idx> <flags>
1666 // 1352 //
1667 // Creates an object literal for literal index <literal_idx> with flags <flags> 1353 // Creates an object literal for literal index <literal_idx> with flags <flags>
1668 // and constant elements in <element_idx>. 1354 // and constant elements in <element_idx>.
1669 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { 1355 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) {
1670 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); 1356 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler);
1671 } 1357 }
1672 1358
1673
1674 // CreateObjectLiteralWide <element_idx> <literal_idx> <flags>
1675 //
1676 // Creates an object literal for literal index <literal_idx> with flags <flags>
1677 // and constant elements in <element_idx>.
1678 void Interpreter::DoCreateObjectLiteralWide(InterpreterAssembler* assembler) {
1679 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler);
1680 }
1681
1682
1683 // CreateClosure <index> <tenured> 1359 // CreateClosure <index> <tenured>
1684 // 1360 //
1685 // Creates a new closure for SharedFunctionInfo at position |index| in the 1361 // Creates a new closure for SharedFunctionInfo at position |index| in the
1686 // constant pool and with the PretenureFlag <tenured>. 1362 // constant pool and with the PretenureFlag <tenured>.
1687 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { 1363 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) {
1688 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of 1364 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of
1689 // calling into the runtime. 1365 // calling into the runtime.
1690 Node* index = __ BytecodeOperandIdx(0); 1366 Node* index = __ BytecodeOperandIdx(0);
1691 Node* shared = __ LoadConstantPoolEntry(index); 1367 Node* shared = __ LoadConstantPoolEntry(index);
1692 Node* tenured_raw = __ BytecodeOperandImm(1); 1368 Node* tenured_raw = __ BytecodeOperandFlag(1);
1693 Node* tenured = __ SmiTag(tenured_raw); 1369 Node* tenured = __ SmiTag(tenured_raw);
1694 Node* context = __ GetContext(); 1370 Node* context = __ GetContext();
1695 Node* result = 1371 Node* result =
1696 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured); 1372 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured);
1697 __ SetAccumulator(result); 1373 __ SetAccumulator(result);
1698 __ Dispatch(); 1374 __ Dispatch();
1699 } 1375 }
1700 1376
1701
1702 // CreateClosureWide <index> <tenured>
1703 //
1704 // Creates a new closure for SharedFunctionInfo at position |index| in the
1705 // constant pool and with the PretenureFlag <tenured>.
1706 void Interpreter::DoCreateClosureWide(InterpreterAssembler* assembler) {
1707 return DoCreateClosure(assembler);
1708 }
1709
1710
1711 // CreateMappedArguments 1377 // CreateMappedArguments
1712 // 1378 //
1713 // Creates a new mapped arguments object. 1379 // Creates a new mapped arguments object.
1714 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) { 1380 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) {
1715 Node* closure = __ LoadRegister(Register::function_closure()); 1381 Node* closure = __ LoadRegister(Register::function_closure());
1716 Node* context = __ GetContext(); 1382 Node* context = __ GetContext();
1717 Node* result = 1383 Node* result =
1718 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); 1384 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure);
1719 __ SetAccumulator(result); 1385 __ SetAccumulator(result);
1720 __ Dispatch(); 1386 __ Dispatch();
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
1822 // 0 == cache_type, 1 == cache_array, 2 == cache_length 1488 // 0 == cache_type, 1 == cache_array, 2 == cache_length
1823 Node* output_register = __ BytecodeOperandReg(0); 1489 Node* output_register = __ BytecodeOperandReg(0);
1824 for (int i = 0; i < 3; i++) { 1490 for (int i = 0; i < 3; i++) {
1825 Node* cache_info = __ Projection(i, result_triple); 1491 Node* cache_info = __ Projection(i, result_triple);
1826 __ StoreRegister(cache_info, output_register); 1492 __ StoreRegister(cache_info, output_register);
1827 output_register = __ NextRegister(output_register); 1493 output_register = __ NextRegister(output_register);
1828 } 1494 }
1829 __ Dispatch(); 1495 __ Dispatch();
1830 } 1496 }
1831 1497
1832
1833 // ForInPrepareWide <cache_info_triple>
1834 //
1835 // Returns state for for..in loop execution based on the object in the
1836 // accumulator. The result is output in registers |cache_info_triple| to
1837 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array,
1838 // and cache_length respectively.
1839 void Interpreter::DoForInPrepareWide(InterpreterAssembler* assembler) {
1840 DoForInPrepare(assembler);
1841 }
1842
1843
1844 // ForInNext <receiver> <index> <cache_info_pair> 1498 // ForInNext <receiver> <index> <cache_info_pair>
1845 // 1499 //
1846 // Returns the next enumerable property in the the accumulator. 1500 // Returns the next enumerable property in the the accumulator.
1847 void Interpreter::DoForInNext(InterpreterAssembler* assembler) { 1501 void Interpreter::DoForInNext(InterpreterAssembler* assembler) {
1848 Node* receiver_reg = __ BytecodeOperandReg(0); 1502 Node* receiver_reg = __ BytecodeOperandReg(0);
1849 Node* receiver = __ LoadRegister(receiver_reg); 1503 Node* receiver = __ LoadRegister(receiver_reg);
1850 Node* index_reg = __ BytecodeOperandReg(1); 1504 Node* index_reg = __ BytecodeOperandReg(1);
1851 Node* index = __ LoadRegister(index_reg); 1505 Node* index = __ LoadRegister(index_reg);
1852 Node* cache_type_reg = __ BytecodeOperandReg(2); 1506 Node* cache_type_reg = __ BytecodeOperandReg(2);
1853 Node* cache_type = __ LoadRegister(cache_type_reg); 1507 Node* cache_type = __ LoadRegister(cache_type_reg);
(...skipping 26 matching lines...) Expand all
1880 1534
1881 // Need to filter the {key} for the {receiver}. 1535 // Need to filter the {key} for the {receiver}.
1882 Node* context = __ GetContext(); 1536 Node* context = __ GetContext();
1883 Node* result = 1537 Node* result =
1884 __ CallRuntime(Runtime::kForInFilter, context, receiver, key); 1538 __ CallRuntime(Runtime::kForInFilter, context, receiver, key);
1885 __ SetAccumulator(result); 1539 __ SetAccumulator(result);
1886 __ Dispatch(); 1540 __ Dispatch();
1887 } 1541 }
1888 } 1542 }
1889 1543
1890
1891 // ForInNextWide <receiver> <index> <cache_info_pair>
1892 //
1893 // Returns the next enumerable property in the the accumulator.
1894 void Interpreter::DoForInNextWide(InterpreterAssembler* assembler) {
1895 return DoForInNext(assembler);
1896 }
1897
1898
1899 // ForInDone <index> <cache_length> 1544 // ForInDone <index> <cache_length>
1900 // 1545 //
1901 // Returns true if the end of the enumerable properties has been reached. 1546 // Returns true if the end of the enumerable properties has been reached.
1902 void Interpreter::DoForInDone(InterpreterAssembler* assembler) { 1547 void Interpreter::DoForInDone(InterpreterAssembler* assembler) {
1903 // TODO(oth): Implement directly rather than making a runtime call. 1548 // TODO(oth): Implement directly rather than making a runtime call.
1904 Node* index_reg = __ BytecodeOperandReg(0); 1549 Node* index_reg = __ BytecodeOperandReg(0);
1905 Node* index = __ LoadRegister(index_reg); 1550 Node* index = __ LoadRegister(index_reg);
1906 Node* cache_length_reg = __ BytecodeOperandReg(1); 1551 Node* cache_length_reg = __ BytecodeOperandReg(1);
1907 Node* cache_length = __ LoadRegister(cache_length_reg); 1552 Node* cache_length = __ LoadRegister(cache_length_reg);
1908 Node* context = __ GetContext(); 1553 Node* context = __ GetContext();
1909 Node* result = 1554 Node* result =
1910 __ CallRuntime(Runtime::kForInDone, context, index, cache_length); 1555 __ CallRuntime(Runtime::kForInDone, context, index, cache_length);
1911 __ SetAccumulator(result); 1556 __ SetAccumulator(result);
1912 __ Dispatch(); 1557 __ Dispatch();
1913 } 1558 }
1914 1559
1915
1916 // ForInStep <index> 1560 // ForInStep <index>
1917 // 1561 //
1918 // Increments the loop counter in register |index| and stores the result 1562 // Increments the loop counter in register |index| and stores the result
1919 // in the accumulator. 1563 // in the accumulator.
1920 void Interpreter::DoForInStep(InterpreterAssembler* assembler) { 1564 void Interpreter::DoForInStep(InterpreterAssembler* assembler) {
1921 Node* index_reg = __ BytecodeOperandReg(0); 1565 Node* index_reg = __ BytecodeOperandReg(0);
1922 Node* index = __ LoadRegister(index_reg); 1566 Node* index = __ LoadRegister(index_reg);
1923 Node* one = __ SmiConstant(Smi::FromInt(1)); 1567 Node* one = __ SmiConstant(Smi::FromInt(1));
1924 Node* result = __ SmiAdd(index, one); 1568 Node* result = __ SmiAdd(index, one);
1925 __ SetAccumulator(result); 1569 __ SetAccumulator(result);
1926 __ Dispatch(); 1570 __ Dispatch();
1927 } 1571 }
1928 1572
1573 // Wide
1574 //
1575 // Prefix bytecode indicating next bytecode has wide (16-bit) operands.
1576 void Interpreter::DoWide(InterpreterAssembler* assembler) {
1577 __ DispatchWide(OperandScale::kDouble);
1578 }
1579
1580 // ExtraWide
1581 //
1582 // Prefix bytecode indicating next bytecode has extra-wide (32-bit) operands.
1583 void Interpreter::DoExtraWide(InterpreterAssembler* assembler) {
1584 __ DispatchWide(OperandScale::kQuadruple);
1585 }
1586
1587 // Illegal
1588 //
1589 // An invalid bytecode aborting execution if dispatched.
1590 void Interpreter::DoIllegal(InterpreterAssembler* assembler) {
1591 __ Abort(kInvalidBytecode);
1592 }
1593
1929 } // namespace interpreter 1594 } // namespace interpreter
1930 } // namespace internal 1595 } // namespace internal
1931 } // namespace v8 1596 } // namespace v8
OLDNEW
« no previous file with comments | « src/interpreter/interpreter.h ('k') | src/interpreter/interpreter-assembler.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698