Chromium Code Reviews

Side by Side Diff: src/interpreter/interpreter.cc

Issue 1783483002: [interpreter] Add support for scalable operands. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Additional test for debugger stepping and wider constant array builder test. Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff |
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter.h" 5 #include "src/interpreter/interpreter.h"
6 6
7 #include "src/ast/prettyprinter.h" 7 #include "src/ast/prettyprinter.h"
8 #include "src/code-factory.h" 8 #include "src/code-factory.h"
9 #include "src/compiler.h" 9 #include "src/compiler.h"
10 #include "src/factory.h" 10 #include "src/factory.h"
11 #include "src/interpreter/bytecode-generator.h" 11 #include "src/interpreter/bytecode-generator.h"
12 #include "src/interpreter/bytecodes.h" 12 #include "src/interpreter/bytecodes.h"
13 #include "src/interpreter/interpreter-assembler.h" 13 #include "src/interpreter/interpreter-assembler.h"
14 #include "src/log.h" 14 #include "src/log.h"
15 #include "src/zone.h" 15 #include "src/zone.h"
16 16
17 namespace v8 { 17 namespace v8 {
18 namespace internal { 18 namespace internal {
19 namespace interpreter { 19 namespace interpreter {
20 20
21 using compiler::Node; 21 using compiler::Node;
22 22
23 #define __ assembler-> 23 #define __ assembler->
24 24
25 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) { 25 Interpreter::Interpreter(Isolate* isolate) : isolate_(isolate) {
26 memset(&dispatch_table_, 0, sizeof(dispatch_table_)); 26 memset(dispatch_table_, 0, sizeof(dispatch_table_));
27 } 27 }
28 28
29 void Interpreter::Initialize() { 29 void Interpreter::Initialize() {
30 DCHECK(FLAG_ignition); 30 DCHECK(FLAG_ignition);
31 if (IsDispatchTableInitialized()) return; 31 if (IsDispatchTableInitialized()) return;
32 Zone zone; 32 Zone zone;
33 HandleScope scope(isolate_); 33 HandleScope scope(isolate_);
34 34
35 #define GENERATE_CODE(Name, ...) \ 35 OperandScale operand_scales[] = {OperandScale::k1X, OperandScale::k2X,
rmcilroy 2016/03/17 17:30:50 const kOperandScales
oth 2016/03/21 09:16:54 Meant to remove this and replace with NextOperandS
36 { \ 36 OperandScale::k4X};
37 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name); \ 37 for (size_t i = 0; i < arraysize(operand_scales); ++i) {
rmcilroy 2016/03/17 17:30:50 nit - pull out: OperandScale operand_scale = opera
oth 2016/03/21 09:16:54 operand_scale is now the loop variable.
38 Do##Name(&assembler); \ 38 #define GENERATE_CODE(Name, ...) \
39 Handle<Code> code = assembler.GenerateCode(); \ 39 { \
40 dispatch_table_[Bytecodes::ToByte(Bytecode::k##Name)] = *code; \ 40 if (BytecodeHasHandler(Bytecode::k##Name, operand_scales[i])) { \
41 TraceCodegen(code); \ 41 InterpreterAssembler assembler(isolate_, &zone, Bytecode::k##Name, \
42 LOG_CODE_EVENT(isolate_, \ 42 operand_scales[i]); \
43 CodeCreateEvent(Logger::BYTECODE_HANDLER_TAG, \ 43 Do##Name(&assembler); \
44 AbstractCode::cast(*code), #Name)); \ 44 Handle<Code> code = assembler.GenerateCode(); \
45 size_t index = \
46 GetDispatchTableIndex(Bytecode::k##Name, operand_scales[i]); \
47 dispatch_table_[index] = *code; \
48 TraceCodegen(code); \
49 LOG_CODE_EVENT(isolate_, \
50 CodeCreateEvent(Logger::BYTECODE_HANDLER_TAG, \
51 AbstractCode::cast(*code), #Name)); \
52 } \
45 } 53 }
46 BYTECODE_LIST(GENERATE_CODE) 54 BYTECODE_LIST(GENERATE_CODE)
47 #undef GENERATE_CODE 55 #undef GENERATE_CODE
56 }
57
58 size_t illegal_index =
59 GetDispatchTableIndex(Bytecode::kIllegal, OperandScale::k1X);
60 for (size_t index = 0; index < arraysize(dispatch_table_); ++index) {
rmcilroy 2016/03/17 17:30:50 nit - comment that this is filling in empty dispat
oth 2016/03/21 09:16:54 Done.
61 if (dispatch_table_[index] == nullptr) {
62 dispatch_table_[index] = dispatch_table_[illegal_index];
63 }
64 }
48 } 65 }
49 66
50 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode) { 67 Code* Interpreter::GetBytecodeHandler(Bytecode bytecode,
68 OperandScale operand_scale) {
51 DCHECK(IsDispatchTableInitialized()); 69 DCHECK(IsDispatchTableInitialized());
52 return dispatch_table_[Bytecodes::ToByte(bytecode)]; 70 DCHECK(BytecodeHasHandler(bytecode, operand_scale));
71 size_t index = GetDispatchTableIndex(bytecode, operand_scale);
72 return dispatch_table_[index];
73 }
74
75 // static
76 size_t Interpreter::GetDispatchTableIndex(Bytecode bytecode,
77 OperandScale operand_scale) {
78 static const size_t kEntriesPerOperandScale = 1u << kBitsPerByte;
79 size_t index = static_cast<size_t>(bytecode);
80 OperandScale current_scale = OperandScale::k1X;
81 while (current_scale != operand_scale) {
82 index += kEntriesPerOperandScale;
83 current_scale = Bytecodes::NextOperandScale(current_scale);
84 }
85 return index;
86 }
87
88 // static
89 bool Interpreter::BytecodeHasHandler(Bytecode bytecode,
90 OperandScale operand_scale) {
91 return operand_scale == OperandScale::k1X ||
92 (Bytecodes::IsBytecodeWithScalableOperands(bytecode) &&
93 !Bytecodes::IsPrefixScalingBytecode(bytecode) &&
94 !Bytecodes::IsDebugBreak(bytecode));
rmcilroy 2016/03/17 17:30:50 Do you need the !Bytecodes::IsPrefixScalingBytecod
oth 2016/03/21 09:16:54 Not with the reduced set of debug breaks and less
53 } 95 }
54 96
55 void Interpreter::IterateDispatchTable(ObjectVisitor* v) { 97 void Interpreter::IterateDispatchTable(ObjectVisitor* v) {
56 v->VisitPointers( 98 v->VisitPointers(
57 reinterpret_cast<Object**>(&dispatch_table_[0]), 99 reinterpret_cast<Object**>(&dispatch_table_[0]),
58 reinterpret_cast<Object**>(&dispatch_table_[0] + kDispatchTableSize)); 100 reinterpret_cast<Object**>(&dispatch_table_[0] + kDispatchTableSize));
59 } 101 }
60 102
61 // static 103 // static
62 int Interpreter::InterruptBudget() { 104 int Interpreter::InterruptBudget() {
(...skipping 77 matching lines...)
140 182
141 // LdaZero 183 // LdaZero
142 // 184 //
143 // Load literal '0' into the accumulator. 185 // Load literal '0' into the accumulator.
144 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) { 186 void Interpreter::DoLdaZero(InterpreterAssembler* assembler) {
145 Node* zero_value = __ NumberConstant(0.0); 187 Node* zero_value = __ NumberConstant(0.0);
146 __ SetAccumulator(zero_value); 188 __ SetAccumulator(zero_value);
147 __ Dispatch(); 189 __ Dispatch();
148 } 190 }
149 191
150 192 // LdaSmi <imm>
151 // LdaSmi8 <imm8>
152 // 193 //
153 // Load an 8-bit integer literal into the accumulator as a Smi. 194 // Load an integer literal into the accumulator as a Smi.
154 void Interpreter::DoLdaSmi8(InterpreterAssembler* assembler) { 195 void Interpreter::DoLdaSmi(InterpreterAssembler* assembler) {
155 Node* raw_int = __ BytecodeOperandImm(0); 196 Node* raw_int = __ BytecodeOperandImm(0);
156 Node* smi_int = __ SmiTag(raw_int); 197 Node* smi_int = __ SmiTag(raw_int);
157 __ SetAccumulator(smi_int); 198 __ SetAccumulator(smi_int);
158 __ Dispatch(); 199 __ Dispatch();
159 } 200 }
160 201
161 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) { 202 void Interpreter::DoLoadConstant(InterpreterAssembler* assembler) {
162 Node* index = __ BytecodeOperandIdx(0); 203 Node* index = __ BytecodeOperandIdx(0);
163 Node* constant = __ LoadConstantPoolEntry(index); 204 Node* constant = __ LoadConstantPoolEntry(index);
164 __ SetAccumulator(constant); 205 __ SetAccumulator(constant);
165 __ Dispatch(); 206 __ Dispatch();
166 } 207 }
167 208
168 209
169 // LdaConstant <idx> 210 // LdaConstant <idx>
170 // 211 //
171 // Load constant literal at |idx| in the constant pool into the accumulator. 212 // Load constant literal at |idx| in the constant pool into the accumulator.
172 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) { 213 void Interpreter::DoLdaConstant(InterpreterAssembler* assembler) {
173 DoLoadConstant(assembler); 214 DoLoadConstant(assembler);
174 } 215 }
175 216
176
177 // LdaConstantWide <idx>
178 //
179 // Load constant literal at |idx| in the constant pool into the accumulator.
180 void Interpreter::DoLdaConstantWide(InterpreterAssembler* assembler) {
181 DoLoadConstant(assembler);
182 }
183
184
185 // LdaUndefined 217 // LdaUndefined
186 // 218 //
187 // Load Undefined into the accumulator. 219 // Load Undefined into the accumulator.
188 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) { 220 void Interpreter::DoLdaUndefined(InterpreterAssembler* assembler) {
189 Node* undefined_value = 221 Node* undefined_value =
190 __ HeapConstant(isolate_->factory()->undefined_value()); 222 __ HeapConstant(isolate_->factory()->undefined_value());
191 __ SetAccumulator(undefined_value); 223 __ SetAccumulator(undefined_value);
192 __ Dispatch(); 224 __ Dispatch();
193 } 225 }
194 226
(...skipping 65 matching lines...)
260 // Stores the value of register <src> to register <dst>. 292 // Stores the value of register <src> to register <dst>.
261 void Interpreter::DoMov(InterpreterAssembler* assembler) { 293 void Interpreter::DoMov(InterpreterAssembler* assembler) {
262 Node* src_index = __ BytecodeOperandReg(0); 294 Node* src_index = __ BytecodeOperandReg(0);
263 Node* src_value = __ LoadRegister(src_index); 295 Node* src_value = __ LoadRegister(src_index);
264 Node* dst_index = __ BytecodeOperandReg(1); 296 Node* dst_index = __ BytecodeOperandReg(1);
265 __ StoreRegister(src_value, dst_index); 297 __ StoreRegister(src_value, dst_index);
266 __ Dispatch(); 298 __ Dispatch();
267 } 299 }
268 300
269 301
270 // MovWide <src> <dst>
271 //
272 // Stores the value of register <src> to register <dst>.
273 void Interpreter::DoMovWide(InterpreterAssembler* assembler) {
274 DoMov(assembler);
275 }
276
277 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) { 302 void Interpreter::DoLoadGlobal(Callable ic, InterpreterAssembler* assembler) {
278 // Get the global object. 303 // Get the global object.
279 Node* context = __ GetContext(); 304 Node* context = __ GetContext();
280 Node* native_context = 305 Node* native_context =
281 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); 306 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
282 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); 307 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX);
283 308
284 // Load the global via the LoadIC. 309 // Load the global via the LoadIC.
285 Node* code_target = __ HeapConstant(ic.code()); 310 Node* code_target = __ HeapConstant(ic.code());
286 Node* constant_index = __ BytecodeOperandIdx(0); 311 Node* constant_index = __ BytecodeOperandIdx(0);
(...skipping 20 matching lines...)
307 // LdaGlobalInsideTypeof <name_index> <slot> 332 // LdaGlobalInsideTypeof <name_index> <slot>
308 // 333 //
309 // Load the global with name in constant pool entry <name_index> into the 334 // Load the global with name in constant pool entry <name_index> into the
310 // accumulator using FeedBackVector slot <slot> inside of a typeof. 335 // accumulator using FeedBackVector slot <slot> inside of a typeof.
311 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) { 336 void Interpreter::DoLdaGlobalInsideTypeof(InterpreterAssembler* assembler) {
312 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF, 337 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
313 UNINITIALIZED); 338 UNINITIALIZED);
314 DoLoadGlobal(ic, assembler); 339 DoLoadGlobal(ic, assembler);
315 } 340 }
316 341
317 // LdaGlobalWide <name_index> <slot>
318 //
319 // Load the global with name in constant pool entry <name_index> into the
320 // accumulator using FeedBackVector slot <slot> outside of a typeof.
321 void Interpreter::DoLdaGlobalWide(InterpreterAssembler* assembler) {
322 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
323 UNINITIALIZED);
324 DoLoadGlobal(ic, assembler);
325 }
326
327 // LdaGlobalInsideTypeofWide <name_index> <slot>
328 //
329 // Load the global with name in constant pool entry <name_index> into the
330 // accumulator using FeedBackVector slot <slot> inside of a typeof.
331 void Interpreter::DoLdaGlobalInsideTypeofWide(InterpreterAssembler* assembler) {
332 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, INSIDE_TYPEOF,
333 UNINITIALIZED);
334 DoLoadGlobal(ic, assembler);
335 }
336
337
338 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) { 342 void Interpreter::DoStoreGlobal(Callable ic, InterpreterAssembler* assembler) {
339 // Get the global object. 343 // Get the global object.
340 Node* context = __ GetContext(); 344 Node* context = __ GetContext();
341 Node* native_context = 345 Node* native_context =
342 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX); 346 __ LoadContextSlot(context, Context::NATIVE_CONTEXT_INDEX);
343 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX); 347 Node* global = __ LoadContextSlot(native_context, Context::EXTENSION_INDEX);
344 348
345 // Store the global via the StoreIC. 349 // Store the global via the StoreIC.
346 Node* code_target = __ HeapConstant(ic.code()); 350 Node* code_target = __ HeapConstant(ic.code());
347 Node* constant_index = __ BytecodeOperandIdx(0); 351 Node* constant_index = __ BytecodeOperandIdx(0);
348 Node* name = __ LoadConstantPoolEntry(constant_index); 352 Node* name = __ LoadConstantPoolEntry(constant_index);
349 Node* value = __ GetAccumulator(); 353 Node* value = __ GetAccumulator();
350 Node* raw_slot = __ BytecodeOperandIdx(1); 354 Node* raw_slot = __ BytecodeOperandIdx(1);
351 Node* smi_slot = __ SmiTag(raw_slot); 355 Node* smi_slot = __ SmiTag(raw_slot);
352 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 356 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
353 __ CallStub(ic.descriptor(), code_target, context, global, name, value, 357 __ CallStub(ic.descriptor(), code_target, context, global, name, value,
354 smi_slot, type_feedback_vector); 358 smi_slot, type_feedback_vector);
355
356 __ Dispatch(); 359 __ Dispatch();
357 } 360 }
358 361
359 362
360 // StaGlobalSloppy <name_index> <slot> 363 // StaGlobalSloppy <name_index> <slot>
361 // 364 //
362 // Store the value in the accumulator into the global with name in constant pool 365 // Store the value in the accumulator into the global with name in constant pool
363 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode. 366 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode.
364 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) { 367 void Interpreter::DoStaGlobalSloppy(InterpreterAssembler* assembler) {
365 Callable ic = 368 Callable ic =
366 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED); 369 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
367 DoStoreGlobal(ic, assembler); 370 DoStoreGlobal(ic, assembler);
368 } 371 }
369 372
370 373
371 // StaGlobalStrict <name_index> <slot> 374 // StaGlobalStrict <name_index> <slot>
372 // 375 //
373 // Store the value in the accumulator into the global with name in constant pool 376 // Store the value in the accumulator into the global with name in constant pool
374 // entry <name_index> using FeedBackVector slot <slot> in strict mode. 377 // entry <name_index> using FeedBackVector slot <slot> in strict mode.
375 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) { 378 void Interpreter::DoStaGlobalStrict(InterpreterAssembler* assembler) {
376 Callable ic = 379 Callable ic =
377 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 380 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
378 DoStoreGlobal(ic, assembler); 381 DoStoreGlobal(ic, assembler);
379 } 382 }
380 383
381
382 // StaGlobalSloppyWide <name_index> <slot>
383 //
384 // Store the value in the accumulator into the global with name in constant pool
385 // entry <name_index> using FeedBackVector slot <slot> in sloppy mode.
386 void Interpreter::DoStaGlobalSloppyWide(InterpreterAssembler* assembler) {
387 Callable ic =
388 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
389 DoStoreGlobal(ic, assembler);
390 }
391
392
393 // StaGlobalStrictWide <name_index> <slot>
394 //
395 // Store the value in the accumulator into the global with name in constant pool
396 // entry <name_index> using FeedBackVector slot <slot> in strict mode.
397 void Interpreter::DoStaGlobalStrictWide(InterpreterAssembler* assembler) {
398 Callable ic =
399 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
400 DoStoreGlobal(ic, assembler);
401 }
402
403
404 // LdaContextSlot <context> <slot_index> 384 // LdaContextSlot <context> <slot_index>
405 // 385 //
406 // Load the object in |slot_index| of |context| into the accumulator. 386 // Load the object in |slot_index| of |context| into the accumulator.
407 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) { 387 void Interpreter::DoLdaContextSlot(InterpreterAssembler* assembler) {
408 Node* reg_index = __ BytecodeOperandReg(0); 388 Node* reg_index = __ BytecodeOperandReg(0);
409 Node* context = __ LoadRegister(reg_index); 389 Node* context = __ LoadRegister(reg_index);
410 Node* slot_index = __ BytecodeOperandIdx(1); 390 Node* slot_index = __ BytecodeOperandIdx(1);
411 Node* result = __ LoadContextSlot(context, slot_index); 391 Node* result = __ LoadContextSlot(context, slot_index);
412 __ SetAccumulator(result); 392 __ SetAccumulator(result);
413 __ Dispatch(); 393 __ Dispatch();
414 } 394 }
415 395
416
417 // LdaContextSlotWide <context> <slot_index>
418 //
419 // Load the object in |slot_index| of |context| into the accumulator.
420 void Interpreter::DoLdaContextSlotWide(InterpreterAssembler* assembler) {
421 DoLdaContextSlot(assembler);
422 }
423
424
425 // StaContextSlot <context> <slot_index> 396 // StaContextSlot <context> <slot_index>
426 // 397 //
427 // Stores the object in the accumulator into |slot_index| of |context|. 398 // Stores the object in the accumulator into |slot_index| of |context|.
428 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) { 399 void Interpreter::DoStaContextSlot(InterpreterAssembler* assembler) {
429 Node* value = __ GetAccumulator(); 400 Node* value = __ GetAccumulator();
430 Node* reg_index = __ BytecodeOperandReg(0); 401 Node* reg_index = __ BytecodeOperandReg(0);
431 Node* context = __ LoadRegister(reg_index); 402 Node* context = __ LoadRegister(reg_index);
432 Node* slot_index = __ BytecodeOperandIdx(1); 403 Node* slot_index = __ BytecodeOperandIdx(1);
433 __ StoreContextSlot(context, slot_index, value); 404 __ StoreContextSlot(context, slot_index, value);
434 __ Dispatch(); 405 __ Dispatch();
435 } 406 }
436 407
437
438 // StaContextSlot <context> <slot_index>
439 //
440 // Stores the object in the accumulator into |slot_index| of |context|.
441 void Interpreter::DoStaContextSlotWide(InterpreterAssembler* assembler) {
442 DoStaContextSlot(assembler);
443 }
444
445 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id, 408 void Interpreter::DoLoadLookupSlot(Runtime::FunctionId function_id,
446 InterpreterAssembler* assembler) { 409 InterpreterAssembler* assembler) {
447 Node* index = __ BytecodeOperandIdx(0); 410 Node* index = __ BytecodeOperandIdx(0);
448 Node* name = __ LoadConstantPoolEntry(index); 411 Node* name = __ LoadConstantPoolEntry(index);
449 Node* context = __ GetContext(); 412 Node* context = __ GetContext();
450 Node* result = __ CallRuntime(function_id, context, name); 413 Node* result = __ CallRuntime(function_id, context, name);
451 __ SetAccumulator(result); 414 __ SetAccumulator(result);
452 __ Dispatch(); 415 __ Dispatch();
453 } 416 }
454 417
455
456 // LdaLookupSlot <name_index> 418 // LdaLookupSlot <name_index>
457 // 419 //
458 // Lookup the object with the name in constant pool entry |name_index| 420 // Lookup the object with the name in constant pool entry |name_index|
459 // dynamically. 421 // dynamically.
460 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) { 422 void Interpreter::DoLdaLookupSlot(InterpreterAssembler* assembler) {
461 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler); 423 DoLoadLookupSlot(Runtime::kLoadLookupSlot, assembler);
462 } 424 }
463 425
464
465 // LdaLookupSlotInsideTypeof <name_index> 426 // LdaLookupSlotInsideTypeof <name_index>
466 // 427 //
467 // Lookup the object with the name in constant pool entry |name_index| 428 // Lookup the object with the name in constant pool entry |name_index|
468 // dynamically without causing a NoReferenceError. 429 // dynamically without causing a NoReferenceError.
469 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) { 430 void Interpreter::DoLdaLookupSlotInsideTypeof(InterpreterAssembler* assembler) {
470 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler); 431 DoLoadLookupSlot(Runtime::kLoadLookupSlotInsideTypeof, assembler);
471 } 432 }
472 433
473
474 // LdaLookupSlotWide <name_index>
475 //
476 // Lookup the object with the name in constant pool entry |name_index|
477 // dynamically.
478 void Interpreter::DoLdaLookupSlotWide(InterpreterAssembler* assembler) {
479 DoLdaLookupSlot(assembler);
480 }
481
482
483 // LdaLookupSlotInsideTypeofWide <name_index>
484 //
485 // Lookup the object with the name in constant pool entry |name_index|
486 // dynamically without causing a NoReferenceError.
487 void Interpreter::DoLdaLookupSlotInsideTypeofWide(
488 InterpreterAssembler* assembler) {
489 DoLdaLookupSlotInsideTypeof(assembler);
490 }
491
492 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode, 434 void Interpreter::DoStoreLookupSlot(LanguageMode language_mode,
493 InterpreterAssembler* assembler) { 435 InterpreterAssembler* assembler) {
494 Node* value = __ GetAccumulator(); 436 Node* value = __ GetAccumulator();
495 Node* index = __ BytecodeOperandIdx(0); 437 Node* index = __ BytecodeOperandIdx(0);
496 Node* name = __ LoadConstantPoolEntry(index); 438 Node* name = __ LoadConstantPoolEntry(index);
497 Node* context = __ GetContext(); 439 Node* context = __ GetContext();
498 Node* result = __ CallRuntime(is_strict(language_mode) 440 Node* result = __ CallRuntime(is_strict(language_mode)
499 ? Runtime::kStoreLookupSlot_Strict 441 ? Runtime::kStoreLookupSlot_Strict
500 : Runtime::kStoreLookupSlot_Sloppy, 442 : Runtime::kStoreLookupSlot_Sloppy,
501 context, name, value); 443 context, name, value);
502 __ SetAccumulator(result); 444 __ SetAccumulator(result);
503 __ Dispatch(); 445 __ Dispatch();
504 } 446 }
505 447
506
507 // StaLookupSlotSloppy <name_index> 448 // StaLookupSlotSloppy <name_index>
508 // 449 //
509 // Store the object in accumulator to the object with the name in constant 450 // Store the object in accumulator to the object with the name in constant
510 // pool entry |name_index| in sloppy mode. 451 // pool entry |name_index| in sloppy mode.
511 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) { 452 void Interpreter::DoStaLookupSlotSloppy(InterpreterAssembler* assembler) {
512 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler); 453 DoStoreLookupSlot(LanguageMode::SLOPPY, assembler);
513 } 454 }
514 455
515 456
516 // StaLookupSlotStrict <name_index> 457 // StaLookupSlotStrict <name_index>
517 // 458 //
518 // Store the object in accumulator to the object with the name in constant 459 // Store the object in accumulator to the object with the name in constant
519 // pool entry |name_index| in strict mode. 460 // pool entry |name_index| in strict mode.
520 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) { 461 void Interpreter::DoStaLookupSlotStrict(InterpreterAssembler* assembler) {
521 DoStoreLookupSlot(LanguageMode::STRICT, assembler); 462 DoStoreLookupSlot(LanguageMode::STRICT, assembler);
522 } 463 }
523 464
524
525 // StaLookupSlotSloppyWide <name_index>
526 //
527 // Store the object in accumulator to the object with the name in constant
528 // pool entry |name_index| in sloppy mode.
529 void Interpreter::DoStaLookupSlotSloppyWide(InterpreterAssembler* assembler) {
530 DoStaLookupSlotSloppy(assembler);
531 }
532
533
534 // StaLookupSlotStrictWide <name_index>
535 //
536 // Store the object in accumulator to the object with the name in constant
537 // pool entry |name_index| in strict mode.
538 void Interpreter::DoStaLookupSlotStrictWide(InterpreterAssembler* assembler) {
539 DoStaLookupSlotStrict(assembler);
540 }
541
542 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) { 465 void Interpreter::DoLoadIC(Callable ic, InterpreterAssembler* assembler) {
543 Node* code_target = __ HeapConstant(ic.code()); 466 Node* code_target = __ HeapConstant(ic.code());
544 Node* register_index = __ BytecodeOperandReg(0); 467 Node* register_index = __ BytecodeOperandReg(0);
545 Node* object = __ LoadRegister(register_index); 468 Node* object = __ LoadRegister(register_index);
546 Node* constant_index = __ BytecodeOperandIdx(1); 469 Node* constant_index = __ BytecodeOperandIdx(1);
547 Node* name = __ LoadConstantPoolEntry(constant_index); 470 Node* name = __ LoadConstantPoolEntry(constant_index);
548 Node* raw_slot = __ BytecodeOperandIdx(2); 471 Node* raw_slot = __ BytecodeOperandIdx(2);
549 Node* smi_slot = __ SmiTag(raw_slot); 472 Node* smi_slot = __ SmiTag(raw_slot);
550 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 473 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
551 Node* context = __ GetContext(); 474 Node* context = __ GetContext();
552 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, 475 Node* result = __ CallStub(ic.descriptor(), code_target, context, object,
553 name, smi_slot, type_feedback_vector); 476 name, smi_slot, type_feedback_vector);
554 __ SetAccumulator(result); 477 __ SetAccumulator(result);
555 __ Dispatch(); 478 __ Dispatch();
556 } 479 }
557 480
558 // LoadIC <object> <name_index> <slot> 481 // LoadIC <object> <name_index> <slot>
559 // 482 //
560 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at 483 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at
561 // constant pool entry <name_index>. 484 // constant pool entry <name_index>.
562 void Interpreter::DoLoadIC(InterpreterAssembler* assembler) { 485 void Interpreter::DoLoadIC(InterpreterAssembler* assembler) {
563 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF, 486 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
564 UNINITIALIZED); 487 UNINITIALIZED);
565 DoLoadIC(ic, assembler); 488 DoLoadIC(ic, assembler);
566 } 489 }
567 490
568 // LoadICWide <object> <name_index> <slot>
569 //
570 // Calls the LoadIC at FeedBackVector slot <slot> for <object> and the name at
571 // constant pool entry <name_index>.
572 void Interpreter::DoLoadICWide(InterpreterAssembler* assembler) {
573 Callable ic = CodeFactory::LoadICInOptimizedCode(isolate_, NOT_INSIDE_TYPEOF,
574 UNINITIALIZED);
575 DoLoadIC(ic, assembler);
576 }
577
578
579 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) { 491 void Interpreter::DoKeyedLoadIC(Callable ic, InterpreterAssembler* assembler) {
580 Node* code_target = __ HeapConstant(ic.code()); 492 Node* code_target = __ HeapConstant(ic.code());
581 Node* reg_index = __ BytecodeOperandReg(0); 493 Node* reg_index = __ BytecodeOperandReg(0);
582 Node* object = __ LoadRegister(reg_index); 494 Node* object = __ LoadRegister(reg_index);
583 Node* name = __ GetAccumulator(); 495 Node* name = __ GetAccumulator();
584 Node* raw_slot = __ BytecodeOperandIdx(1); 496 Node* raw_slot = __ BytecodeOperandIdx(1);
585 Node* smi_slot = __ SmiTag(raw_slot); 497 Node* smi_slot = __ SmiTag(raw_slot);
586 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 498 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
587 Node* context = __ GetContext(); 499 Node* context = __ GetContext();
588 Node* result = __ CallStub(ic.descriptor(), code_target, context, object, 500 Node* result = __ CallStub(ic.descriptor(), code_target, context, object,
589 name, smi_slot, type_feedback_vector); 501 name, smi_slot, type_feedback_vector);
590 __ SetAccumulator(result); 502 __ SetAccumulator(result);
591 __ Dispatch(); 503 __ Dispatch();
592 } 504 }
593 505
594 // KeyedLoadIC <object> <slot> 506 // KeyedLoadIC <object> <slot>
595 // 507 //
596 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key 508 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key
597 // in the accumulator. 509 // in the accumulator.
598 void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) { 510 void Interpreter::DoKeyedLoadIC(InterpreterAssembler* assembler) {
599 Callable ic = 511 Callable ic =
600 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED); 512 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED);
601 DoKeyedLoadIC(ic, assembler); 513 DoKeyedLoadIC(ic, assembler);
602 } 514 }
603 515
604 // KeyedLoadICWide <object> <slot>
605 //
606 // Calls the KeyedLoadIC at FeedBackVector slot <slot> for <object> and the key
607 // in the accumulator.
608 void Interpreter::DoKeyedLoadICWide(InterpreterAssembler* assembler) {
609 Callable ic =
610 CodeFactory::KeyedLoadICInOptimizedCode(isolate_, UNINITIALIZED);
611 DoKeyedLoadIC(ic, assembler);
612 }
613
614
615 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) { 516 void Interpreter::DoStoreIC(Callable ic, InterpreterAssembler* assembler) {
616 Node* code_target = __ HeapConstant(ic.code()); 517 Node* code_target = __ HeapConstant(ic.code());
617 Node* object_reg_index = __ BytecodeOperandReg(0); 518 Node* object_reg_index = __ BytecodeOperandReg(0);
618 Node* object = __ LoadRegister(object_reg_index); 519 Node* object = __ LoadRegister(object_reg_index);
619 Node* constant_index = __ BytecodeOperandIdx(1); 520 Node* constant_index = __ BytecodeOperandIdx(1);
620 Node* name = __ LoadConstantPoolEntry(constant_index); 521 Node* name = __ LoadConstantPoolEntry(constant_index);
621 Node* value = __ GetAccumulator(); 522 Node* value = __ GetAccumulator();
622 Node* raw_slot = __ BytecodeOperandIdx(2); 523 Node* raw_slot = __ BytecodeOperandIdx(2);
623 Node* smi_slot = __ SmiTag(raw_slot); 524 Node* smi_slot = __ SmiTag(raw_slot);
624 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 525 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
(...skipping 20 matching lines...)
645 // 546 //
646 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and 547 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
647 // the name in constant pool entry <name_index> with the value in the 548 // the name in constant pool entry <name_index> with the value in the
648 // accumulator. 549 // accumulator.
649 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) { 550 void Interpreter::DoStoreICStrict(InterpreterAssembler* assembler) {
650 Callable ic = 551 Callable ic =
651 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 552 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
652 DoStoreIC(ic, assembler); 553 DoStoreIC(ic, assembler);
653 } 554 }
654 555
655
656 // StoreICSloppyWide <object> <name_index> <slot>
657 //
658 // Calls the sloppy mode StoreIC at FeedBackVector slot <slot> for <object> and
659 // the name in constant pool entry <name_index> with the value in the
660 // accumulator.
661 void Interpreter::DoStoreICSloppyWide(InterpreterAssembler* assembler) {
662 Callable ic =
663 CodeFactory::StoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
664 DoStoreIC(ic, assembler);
665 }
666
667
668 // StoreICStrictWide <object> <name_index> <slot>
669 //
670 // Calls the strict mode StoreIC at FeedBackVector slot <slot> for <object> and
671 // the name in constant pool entry <name_index> with the value in the
672 // accumulator.
673 void Interpreter::DoStoreICStrictWide(InterpreterAssembler* assembler) {
674 Callable ic =
675 CodeFactory::StoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
676 DoStoreIC(ic, assembler);
677 }
678
679 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) { 556 void Interpreter::DoKeyedStoreIC(Callable ic, InterpreterAssembler* assembler) {
680 Node* code_target = __ HeapConstant(ic.code()); 557 Node* code_target = __ HeapConstant(ic.code());
681 Node* object_reg_index = __ BytecodeOperandReg(0); 558 Node* object_reg_index = __ BytecodeOperandReg(0);
682 Node* object = __ LoadRegister(object_reg_index); 559 Node* object = __ LoadRegister(object_reg_index);
683 Node* name_reg_index = __ BytecodeOperandReg(1); 560 Node* name_reg_index = __ BytecodeOperandReg(1);
684 Node* name = __ LoadRegister(name_reg_index); 561 Node* name = __ LoadRegister(name_reg_index);
685 Node* value = __ GetAccumulator(); 562 Node* value = __ GetAccumulator();
686 Node* raw_slot = __ BytecodeOperandIdx(2); 563 Node* raw_slot = __ BytecodeOperandIdx(2);
687 Node* smi_slot = __ SmiTag(raw_slot); 564 Node* smi_slot = __ SmiTag(raw_slot);
688 Node* type_feedback_vector = __ LoadTypeFeedbackVector(); 565 Node* type_feedback_vector = __ LoadTypeFeedbackVector();
(...skipping 18 matching lines...)
707 // KeyedStoreICStore <object> <key> <slot> 584 // KeyedStoreICStore <object> <key> <slot>
708 // 585 //
709 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object> 586 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
710 // and the key <key> with the value in the accumulator. 587 // and the key <key> with the value in the accumulator.
711 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) { 588 void Interpreter::DoKeyedStoreICStrict(InterpreterAssembler* assembler) {
712 Callable ic = 589 Callable ic =
713 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED); 590 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
714 DoKeyedStoreIC(ic, assembler); 591 DoKeyedStoreIC(ic, assembler);
715 } 592 }
716 593
717
718 // KeyedStoreICSloppyWide <object> <key> <slot>
719 //
720 // Calls the sloppy mode KeyStoreIC at FeedBackVector slot <slot> for <object>
721 // and the key <key> with the value in the accumulator.
722 void Interpreter::DoKeyedStoreICSloppyWide(InterpreterAssembler* assembler) {
723 Callable ic =
724 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, SLOPPY, UNINITIALIZED);
725 DoKeyedStoreIC(ic, assembler);
726 }
727
728
729 // KeyedStoreICStoreWide <object> <key> <slot>
730 //
731 // Calls the strict mode KeyStoreIC at FeedBackVector slot <slot> for <object>
732 // and the key <key> with the value in the accumulator.
733 void Interpreter::DoKeyedStoreICStrictWide(InterpreterAssembler* assembler) {
734 Callable ic =
735 CodeFactory::KeyedStoreICInOptimizedCode(isolate_, STRICT, UNINITIALIZED);
736 DoKeyedStoreIC(ic, assembler);
737 }
738
739 // PushContext <context> 594 // PushContext <context>
740 // 595 //
741 // Saves the current context in <context>, and pushes the accumulator as the 596 // Saves the current context in <context>, and pushes the accumulator as the
742 // new current context. 597 // new current context.
743 void Interpreter::DoPushContext(InterpreterAssembler* assembler) { 598 void Interpreter::DoPushContext(InterpreterAssembler* assembler) {
744 Node* reg_index = __ BytecodeOperandReg(0); 599 Node* reg_index = __ BytecodeOperandReg(0);
745 Node* new_context = __ GetAccumulator(); 600 Node* new_context = __ GetAccumulator();
746 Node* old_context = __ GetContext(); 601 Node* old_context = __ GetContext();
747 __ StoreRegister(old_context, reg_index); 602 __ StoreRegister(old_context, reg_index);
748 __ SetContext(new_context); 603 __ SetContext(new_context);
(...skipping 253 matching lines...)
1002 857
1003 858
1004 // Call <callable> <receiver> <arg_count> 859 // Call <callable> <receiver> <arg_count>
1005 // 860 //
1006 // Call a JSfunction or Callable in |callable| with the |receiver| and 861 // Call a JSfunction or Callable in |callable| with the |receiver| and
1007 // |arg_count| arguments in subsequent registers. 862 // |arg_count| arguments in subsequent registers.
1008 void Interpreter::DoCall(InterpreterAssembler* assembler) { 863 void Interpreter::DoCall(InterpreterAssembler* assembler) {
1009 DoJSCall(assembler, TailCallMode::kDisallow); 864 DoJSCall(assembler, TailCallMode::kDisallow);
1010 } 865 }
1011 866
1012
1013 // CallWide <callable> <receiver> <arg_count>
1014 //
1015 // Call a JSfunction or Callable in |callable| with the |receiver| and
1016 // |arg_count| arguments in subsequent registers.
1017 void Interpreter::DoCallWide(InterpreterAssembler* assembler) {
1018 DoJSCall(assembler, TailCallMode::kDisallow);
1019 }
1020
1021 // TailCall <callable> <receiver> <arg_count> 867 // TailCall <callable> <receiver> <arg_count>
1022 // 868 //
1023 // Tail call a JSfunction or Callable in |callable| with the |receiver| and 869 // Tail call a JSfunction or Callable in |callable| with the |receiver| and
1024 // |arg_count| arguments in subsequent registers. 870 // |arg_count| arguments in subsequent registers.
1025 void Interpreter::DoTailCall(InterpreterAssembler* assembler) { 871 void Interpreter::DoTailCall(InterpreterAssembler* assembler) {
1026 DoJSCall(assembler, TailCallMode::kAllow); 872 DoJSCall(assembler, TailCallMode::kAllow);
1027 } 873 }
1028 874
1029 // TailCallWide <callable> <receiver> <arg_count>
1030 //
1031 // Tail call a JSfunction or Callable in |callable| with the |receiver| and
1032 // |arg_count| arguments in subsequent registers.
1033 void Interpreter::DoTailCallWide(InterpreterAssembler* assembler) {
1034 DoJSCall(assembler, TailCallMode::kAllow);
1035 }
1036
1037 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) { 875 void Interpreter::DoCallRuntimeCommon(InterpreterAssembler* assembler) {
1038 Node* function_id = __ BytecodeOperandIdx(0); 876 Node* function_id = __ BytecodeOperandRuntimeId(0);
1039 Node* first_arg_reg = __ BytecodeOperandReg(1); 877 Node* first_arg_reg = __ BytecodeOperandReg(1);
1040 Node* first_arg = __ RegisterLocation(first_arg_reg); 878 Node* first_arg = __ RegisterLocation(first_arg_reg);
1041 Node* args_count = __ BytecodeOperandCount(2); 879 Node* args_count = __ BytecodeOperandCount(2);
1042 Node* context = __ GetContext(); 880 Node* context = __ GetContext();
1043 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count); 881 Node* result = __ CallRuntimeN(function_id, context, first_arg, args_count);
1044 __ SetAccumulator(result); 882 __ SetAccumulator(result);
1045 __ Dispatch(); 883 __ Dispatch();
1046 } 884 }
1047 885
1048 886
1049 // CallRuntime <function_id> <first_arg> <arg_count> 887 // CallRuntime <function_id> <first_arg> <arg_count>
1050 // 888 //
1051 // Call the runtime function |function_id| with the first argument in 889 // Call the runtime function |function_id| with the first argument in
1052 // register |first_arg| and |arg_count| arguments in subsequent 890 // register |first_arg| and |arg_count| arguments in subsequent
1053 // registers. 891 // registers.
1054 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) { 892 void Interpreter::DoCallRuntime(InterpreterAssembler* assembler) {
1055 DoCallRuntimeCommon(assembler); 893 DoCallRuntimeCommon(assembler);
1056 } 894 }
1057 895
1058
1059 // CallRuntime <function_id> <first_arg> <arg_count>
1060 //
1061 // Call the runtime function |function_id| with the first argument in
1062 // register |first_arg| and |arg_count| arguments in subsequent
1063 // registers.
1064 void Interpreter::DoCallRuntimeWide(InterpreterAssembler* assembler) {
1065 DoCallRuntimeCommon(assembler);
1066 }
1067
1068 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) { 896 void Interpreter::DoCallRuntimeForPairCommon(InterpreterAssembler* assembler) {
1069 // Call the runtime function. 897 // Call the runtime function.
1070 Node* function_id = __ BytecodeOperandIdx(0); 898 Node* function_id = __ BytecodeOperandRuntimeId(0);
1071 Node* first_arg_reg = __ BytecodeOperandReg(1); 899 Node* first_arg_reg = __ BytecodeOperandReg(1);
1072 Node* first_arg = __ RegisterLocation(first_arg_reg); 900 Node* first_arg = __ RegisterLocation(first_arg_reg);
1073 Node* args_count = __ BytecodeOperandCount(2); 901 Node* args_count = __ BytecodeOperandCount(2);
1074 Node* context = __ GetContext(); 902 Node* context = __ GetContext();
1075 Node* result_pair = 903 Node* result_pair =
1076 __ CallRuntimeN(function_id, context, first_arg, args_count, 2); 904 __ CallRuntimeN(function_id, context, first_arg, args_count, 2);
1077 905
1078 // Store the results in <first_return> and <first_return + 1> 906 // Store the results in <first_return> and <first_return + 1>
1079 Node* first_return_reg = __ BytecodeOperandReg(3); 907 Node* first_return_reg = __ BytecodeOperandReg(3);
1080 Node* second_return_reg = __ NextRegister(first_return_reg); 908 Node* second_return_reg = __ NextRegister(first_return_reg);
1081 Node* result0 = __ Projection(0, result_pair); 909 Node* result0 = __ Projection(0, result_pair);
1082 Node* result1 = __ Projection(1, result_pair); 910 Node* result1 = __ Projection(1, result_pair);
1083 __ StoreRegister(result0, first_return_reg); 911 __ StoreRegister(result0, first_return_reg);
1084 __ StoreRegister(result1, second_return_reg); 912 __ StoreRegister(result1, second_return_reg);
1085 __ Dispatch(); 913 __ Dispatch();
1086 } 914 }
1087 915
1088 916
1089 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return> 917 // CallRuntimeForPair <function_id> <first_arg> <arg_count> <first_return>
1090 // 918 //
1091 // Call the runtime function |function_id| which returns a pair, with the 919 // Call the runtime function |function_id| which returns a pair, with the
1092 // first argument in register |first_arg| and |arg_count| arguments in 920 // first argument in register |first_arg| and |arg_count| arguments in
1093 // subsequent registers. Returns the result in <first_return> and 921 // subsequent registers. Returns the result in <first_return> and
1094 // <first_return + 1> 922 // <first_return + 1>
1095 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) { 923 void Interpreter::DoCallRuntimeForPair(InterpreterAssembler* assembler) {
1096 DoCallRuntimeForPairCommon(assembler); 924 DoCallRuntimeForPairCommon(assembler);
1097 } 925 }
1098 926
1099
1100 // CallRuntimeForPairWide <function_id> <first_arg> <arg_count> <first_return>
1101 //
1102 // Call the runtime function |function_id| which returns a pair, with the
1103 // first argument in register |first_arg| and |arg_count| arguments in
1104 // subsequent registers. Returns the result in <first_return> and
1105 // <first_return + 1>
1106 void Interpreter::DoCallRuntimeForPairWide(InterpreterAssembler* assembler) {
1107 DoCallRuntimeForPairCommon(assembler);
1108 }
1109
1110 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) { 927 void Interpreter::DoCallJSRuntimeCommon(InterpreterAssembler* assembler) {
1111 Node* context_index = __ BytecodeOperandIdx(0); 928 Node* context_index = __ BytecodeOperandIdx(0);
1112 Node* receiver_reg = __ BytecodeOperandReg(1); 929 Node* receiver_reg = __ BytecodeOperandReg(1);
1113 Node* first_arg = __ RegisterLocation(receiver_reg); 930 Node* first_arg = __ RegisterLocation(receiver_reg);
1114 Node* receiver_args_count = __ BytecodeOperandCount(2); 931 Node* receiver_args_count = __ BytecodeOperandCount(2);
1115 Node* receiver_count = __ Int32Constant(1); 932 Node* receiver_count = __ Int32Constant(1);
1116 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count); 933 Node* args_count = __ Int32Sub(receiver_args_count, receiver_count);
1117 934
1118 // Get the function to call from the native context. 935 // Get the function to call from the native context.
1119 Node* context = __ GetContext(); 936 Node* context = __ GetContext();
(...skipping 10 matching lines...)
1130 947
1131 948
1132 // CallJSRuntime <context_index> <receiver> <arg_count> 949 // CallJSRuntime <context_index> <receiver> <arg_count>
1133 // 950 //
1134 // Call the JS runtime function that has the |context_index| with the receiver 951 // Call the JS runtime function that has the |context_index| with the receiver
1135 // in register |receiver| and |arg_count| arguments in subsequent registers. 952 // in register |receiver| and |arg_count| arguments in subsequent registers.
1136 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) { 953 void Interpreter::DoCallJSRuntime(InterpreterAssembler* assembler) {
1137 DoCallJSRuntimeCommon(assembler); 954 DoCallJSRuntimeCommon(assembler);
1138 } 955 }
1139 956
1140
1141 // CallJSRuntimeWide <context_index> <receiver> <arg_count>
1142 //
1143 // Call the JS runtime function that has the |context_index| with the receiver
1144 // in register |receiver| and |arg_count| arguments in subsequent registers.
1145 void Interpreter::DoCallJSRuntimeWide(InterpreterAssembler* assembler) {
1146 DoCallJSRuntimeCommon(assembler);
1147 }
1148
1149 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) { 957 void Interpreter::DoCallConstruct(InterpreterAssembler* assembler) {
1150 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_); 958 Callable ic = CodeFactory::InterpreterPushArgsAndConstruct(isolate_);
1151 Node* new_target = __ GetAccumulator(); 959 Node* new_target = __ GetAccumulator();
1152 Node* constructor_reg = __ BytecodeOperandReg(0); 960 Node* constructor_reg = __ BytecodeOperandReg(0);
1153 Node* constructor = __ LoadRegister(constructor_reg); 961 Node* constructor = __ LoadRegister(constructor_reg);
1154 Node* first_arg_reg = __ BytecodeOperandReg(1); 962 Node* first_arg_reg = __ BytecodeOperandReg(1);
1155 Node* first_arg = __ RegisterLocation(first_arg_reg); 963 Node* first_arg = __ RegisterLocation(first_arg_reg);
1156 Node* args_count = __ BytecodeOperandCount(2); 964 Node* args_count = __ BytecodeOperandCount(2);
1157 Node* context = __ GetContext(); 965 Node* context = __ GetContext();
1158 Node* result = 966 Node* result =
1159 __ CallConstruct(constructor, context, new_target, first_arg, args_count); 967 __ CallConstruct(constructor, context, new_target, first_arg, args_count);
1160 __ SetAccumulator(result); 968 __ SetAccumulator(result);
1161 __ Dispatch(); 969 __ Dispatch();
1162 } 970 }
1163 971
1164 972
1165 // New <constructor> <first_arg> <arg_count> 973 // New <constructor> <first_arg> <arg_count>
1166 // 974 //
1167 // Call operator new with |constructor| and the first argument in 975 // Call operator new with |constructor| and the first argument in
1168 // register |first_arg| and |arg_count| arguments in subsequent 976 // register |first_arg| and |arg_count| arguments in subsequent
1169 // registers. The new.target is in the accumulator. 977 // registers. The new.target is in the accumulator.
1170 // 978 //
1171 void Interpreter::DoNew(InterpreterAssembler* assembler) { 979 void Interpreter::DoNew(InterpreterAssembler* assembler) {
1172 DoCallConstruct(assembler); 980 DoCallConstruct(assembler);
1173 } 981 }
1174 982
1175
1176 // NewWide <constructor> <first_arg> <arg_count>
1177 //
1178 // Call operator new with |constructor| and the first argument in
1179 // register |first_arg| and |arg_count| arguments in subsequent
1180 // registers. The new.target is in the accumulator.
1181 //
1182 void Interpreter::DoNewWide(InterpreterAssembler* assembler) {
1183 DoCallConstruct(assembler);
1184 }
1185
1186
1187 // TestEqual <src> 983 // TestEqual <src>
1188 // 984 //
1189 // Test if the value in the <src> register equals the accumulator. 985 // Test if the value in the <src> register equals the accumulator.
1190 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) { 986 void Interpreter::DoTestEqual(InterpreterAssembler* assembler) {
1191 DoBinaryOp(CodeFactory::Equal(isolate_), assembler); 987 DoBinaryOp(CodeFactory::Equal(isolate_), assembler);
1192 } 988 }
1193 989
1194 990
1195 // TestNotEqual <src> 991 // TestNotEqual <src>
1196 // 992 //
(...skipping 89 matching lines...)
1286 } 1082 }
1287 1083
1288 1084
1289 // ToObject 1085 // ToObject
1290 // 1086 //
1291 // Cast the object referenced by the accumulator to a JSObject. 1087 // Cast the object referenced by the accumulator to a JSObject.
1292 void Interpreter::DoToObject(InterpreterAssembler* assembler) { 1088 void Interpreter::DoToObject(InterpreterAssembler* assembler) {
1293 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler); 1089 DoTypeConversionOp(CodeFactory::ToObject(isolate_), assembler);
1294 } 1090 }
1295 1091
1296 1092 // Jump <imm>
1297 // Jump <imm8>
1298 // 1093 //
1299 // Jump by number of bytes represented by the immediate operand |imm8|. 1094 // Jump by number of bytes represented by the immediate operand |imm|.
1300 void Interpreter::DoJump(InterpreterAssembler* assembler) { 1095 void Interpreter::DoJump(InterpreterAssembler* assembler) {
1301 Node* relative_jump = __ BytecodeOperandImm(0); 1096 Node* relative_jump = __ BytecodeOperandImm(0);
1302 __ Jump(relative_jump); 1097 __ Jump(relative_jump);
1303 } 1098 }
1304 1099
1305 1100 // JumpConstant <idx>
1306 // JumpConstant <idx8>
1307 // 1101 //
1308 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool. 1102 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool.
1309 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) { 1103 void Interpreter::DoJumpConstant(InterpreterAssembler* assembler) {
1310 Node* index = __ BytecodeOperandIdx(0); 1104 Node* index = __ BytecodeOperandIdx(0);
1311 Node* constant = __ LoadConstantPoolEntry(index); 1105 Node* constant = __ LoadConstantPoolEntry(index);
1312 Node* relative_jump = __ SmiUntag(constant); 1106 Node* relative_jump = __ SmiUntag(constant);
1313 __ Jump(relative_jump); 1107 __ Jump(relative_jump);
1314 } 1108 }
1315 1109
1316 1110 // JumpIfTrue <imm>
1317 // JumpConstantWide <idx16>
1318 //
1319 // Jump by number of bytes in the Smi in the |idx16| entry in the
1320 // constant pool.
1321 void Interpreter::DoJumpConstantWide(InterpreterAssembler* assembler) {
1322 DoJumpConstant(assembler);
1323 }
1324
1325
1326 // JumpIfTrue <imm8>
1327 // 1111 //
1328 // Jump by number of bytes represented by an immediate operand if the 1112 // Jump by number of bytes represented by an immediate operand if the
1329 // accumulator contains true. 1113 // accumulator contains true.
1330 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) { 1114 void Interpreter::DoJumpIfTrue(InterpreterAssembler* assembler) {
1331 Node* accumulator = __ GetAccumulator(); 1115 Node* accumulator = __ GetAccumulator();
1332 Node* relative_jump = __ BytecodeOperandImm(0); 1116 Node* relative_jump = __ BytecodeOperandImm(0);
1333 Node* true_value = __ BooleanConstant(true); 1117 Node* true_value = __ BooleanConstant(true);
1334 __ JumpIfWordEqual(accumulator, true_value, relative_jump); 1118 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
1335 } 1119 }
1336 1120
1337 1121 // JumpIfTrueConstant <idx>
1338 // JumpIfTrueConstant <idx8>
1339 // 1122 //
1340 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1123 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1341 // if the accumulator contains true. 1124 // if the accumulator contains true.
1342 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) { 1125 void Interpreter::DoJumpIfTrueConstant(InterpreterAssembler* assembler) {
1343 Node* accumulator = __ GetAccumulator(); 1126 Node* accumulator = __ GetAccumulator();
1344 Node* index = __ BytecodeOperandIdx(0); 1127 Node* index = __ BytecodeOperandIdx(0);
1345 Node* constant = __ LoadConstantPoolEntry(index); 1128 Node* constant = __ LoadConstantPoolEntry(index);
1346 Node* relative_jump = __ SmiUntag(constant); 1129 Node* relative_jump = __ SmiUntag(constant);
1347 Node* true_value = __ BooleanConstant(true); 1130 Node* true_value = __ BooleanConstant(true);
1348 __ JumpIfWordEqual(accumulator, true_value, relative_jump); 1131 __ JumpIfWordEqual(accumulator, true_value, relative_jump);
1349 } 1132 }
1350 1133
1351 1134 // JumpIfFalse <imm>
1352 // JumpIfTrueConstantWide <idx16>
1353 //
1354 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1355 // if the accumulator contains true.
1356 void Interpreter::DoJumpIfTrueConstantWide(InterpreterAssembler* assembler) {
1357 DoJumpIfTrueConstant(assembler);
1358 }
1359
1360
1361 // JumpIfFalse <imm8>
1362 // 1135 //
1363 // Jump by number of bytes represented by an immediate operand if the 1136 // Jump by number of bytes represented by an immediate operand if the
1364 // accumulator contains false. 1137 // accumulator contains false.
1365 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) { 1138 void Interpreter::DoJumpIfFalse(InterpreterAssembler* assembler) {
1366 Node* accumulator = __ GetAccumulator(); 1139 Node* accumulator = __ GetAccumulator();
1367 Node* relative_jump = __ BytecodeOperandImm(0); 1140 Node* relative_jump = __ BytecodeOperandImm(0);
1368 Node* false_value = __ BooleanConstant(false); 1141 Node* false_value = __ BooleanConstant(false);
1369 __ JumpIfWordEqual(accumulator, false_value, relative_jump); 1142 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
1370 } 1143 }
1371 1144
1372 1145 // JumpIfFalseConstant <idx>
1373 // JumpIfFalseConstant <idx8>
1374 // 1146 //
1375 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1147 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1376 // if the accumulator contains false. 1148 // if the accumulator contains false.
1377 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) { 1149 void Interpreter::DoJumpIfFalseConstant(InterpreterAssembler* assembler) {
1378 Node* accumulator = __ GetAccumulator(); 1150 Node* accumulator = __ GetAccumulator();
1379 Node* index = __ BytecodeOperandIdx(0); 1151 Node* index = __ BytecodeOperandIdx(0);
1380 Node* constant = __ LoadConstantPoolEntry(index); 1152 Node* constant = __ LoadConstantPoolEntry(index);
1381 Node* relative_jump = __ SmiUntag(constant); 1153 Node* relative_jump = __ SmiUntag(constant);
1382 Node* false_value = __ BooleanConstant(false); 1154 Node* false_value = __ BooleanConstant(false);
1383 __ JumpIfWordEqual(accumulator, false_value, relative_jump); 1155 __ JumpIfWordEqual(accumulator, false_value, relative_jump);
1384 } 1156 }
1385 1157
1386 1158 // JumpIfToBooleanTrue <imm>
1387 // JumpIfFalseConstant <idx16>
1388 //
1389 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1390 // if the accumulator contains false.
1391 void Interpreter::DoJumpIfFalseConstantWide(InterpreterAssembler* assembler) {
1392 DoJumpIfFalseConstant(assembler);
1393 }
1394
1395
1396 // JumpIfToBooleanTrue <imm8>
1397 // 1159 //
1398 // Jump by number of bytes represented by an immediate operand if the object 1160 // Jump by number of bytes represented by an immediate operand if the object
1399 // referenced by the accumulator is true when the object is cast to boolean. 1161 // referenced by the accumulator is true when the object is cast to boolean.
1400 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) { 1162 void Interpreter::DoJumpIfToBooleanTrue(InterpreterAssembler* assembler) {
1401 Callable callable = CodeFactory::ToBoolean(isolate_); 1163 Callable callable = CodeFactory::ToBoolean(isolate_);
1402 Node* target = __ HeapConstant(callable.code()); 1164 Node* target = __ HeapConstant(callable.code());
1403 Node* accumulator = __ GetAccumulator(); 1165 Node* accumulator = __ GetAccumulator();
1404 Node* context = __ GetContext(); 1166 Node* context = __ GetContext();
1405 Node* to_boolean_value = 1167 Node* to_boolean_value =
1406 __ CallStub(callable.descriptor(), target, context, accumulator); 1168 __ CallStub(callable.descriptor(), target, context, accumulator);
1407 Node* relative_jump = __ BytecodeOperandImm(0); 1169 Node* relative_jump = __ BytecodeOperandImm(0);
1408 Node* true_value = __ BooleanConstant(true); 1170 Node* true_value = __ BooleanConstant(true);
1409 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); 1171 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
1410 } 1172 }
1411 1173
1412 1174 // JumpIfToBooleanTrueConstant <idx>
1413 // JumpIfToBooleanTrueConstant <idx8>
1414 // 1175 //
1415 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1176 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1416 // if the object referenced by the accumulator is true when the object is cast 1177 // if the object referenced by the accumulator is true when the object is cast
1417 // to boolean. 1178 // to boolean.
1418 void Interpreter::DoJumpIfToBooleanTrueConstant( 1179 void Interpreter::DoJumpIfToBooleanTrueConstant(
1419 InterpreterAssembler* assembler) { 1180 InterpreterAssembler* assembler) {
1420 Callable callable = CodeFactory::ToBoolean(isolate_); 1181 Callable callable = CodeFactory::ToBoolean(isolate_);
1421 Node* target = __ HeapConstant(callable.code()); 1182 Node* target = __ HeapConstant(callable.code());
1422 Node* accumulator = __ GetAccumulator(); 1183 Node* accumulator = __ GetAccumulator();
1423 Node* context = __ GetContext(); 1184 Node* context = __ GetContext();
1424 Node* to_boolean_value = 1185 Node* to_boolean_value =
1425 __ CallStub(callable.descriptor(), target, context, accumulator); 1186 __ CallStub(callable.descriptor(), target, context, accumulator);
1426 Node* index = __ BytecodeOperandIdx(0); 1187 Node* index = __ BytecodeOperandIdx(0);
1427 Node* constant = __ LoadConstantPoolEntry(index); 1188 Node* constant = __ LoadConstantPoolEntry(index);
1428 Node* relative_jump = __ SmiUntag(constant); 1189 Node* relative_jump = __ SmiUntag(constant);
1429 Node* true_value = __ BooleanConstant(true); 1190 Node* true_value = __ BooleanConstant(true);
1430 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump); 1191 __ JumpIfWordEqual(to_boolean_value, true_value, relative_jump);
1431 } 1192 }
1432 1193
1433 1194 // JumpIfToBooleanFalse <imm>
1434 // JumpIfToBooleanTrueConstantWide <idx16>
1435 //
1436 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1437 // if the object referenced by the accumulator is true when the object is cast
1438 // to boolean.
1439 void Interpreter::DoJumpIfToBooleanTrueConstantWide(
1440 InterpreterAssembler* assembler) {
1441 DoJumpIfToBooleanTrueConstant(assembler);
1442 }
1443
1444
1445 // JumpIfToBooleanFalse <imm8>
1446 // 1195 //
1447 // Jump by number of bytes represented by an immediate operand if the object 1196 // Jump by number of bytes represented by an immediate operand if the object
1448 // referenced by the accumulator is false when the object is cast to boolean. 1197 // referenced by the accumulator is false when the object is cast to boolean.
1449 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) { 1198 void Interpreter::DoJumpIfToBooleanFalse(InterpreterAssembler* assembler) {
1450 Callable callable = CodeFactory::ToBoolean(isolate_); 1199 Callable callable = CodeFactory::ToBoolean(isolate_);
1451 Node* target = __ HeapConstant(callable.code()); 1200 Node* target = __ HeapConstant(callable.code());
1452 Node* accumulator = __ GetAccumulator(); 1201 Node* accumulator = __ GetAccumulator();
1453 Node* context = __ GetContext(); 1202 Node* context = __ GetContext();
1454 Node* to_boolean_value = 1203 Node* to_boolean_value =
1455 __ CallStub(callable.descriptor(), target, context, accumulator); 1204 __ CallStub(callable.descriptor(), target, context, accumulator);
1456 Node* relative_jump = __ BytecodeOperandImm(0); 1205 Node* relative_jump = __ BytecodeOperandImm(0);
1457 Node* false_value = __ BooleanConstant(false); 1206 Node* false_value = __ BooleanConstant(false);
1458 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); 1207 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
1459 } 1208 }
1460 1209
1461 1210 // JumpIfToBooleanFalseConstant <idx>
1462 // JumpIfToBooleanFalseConstant <idx8>
1463 // 1211 //
1464 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1212 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1465 // if the object referenced by the accumulator is false when the object is cast 1213 // if the object referenced by the accumulator is false when the object is cast
1466 // to boolean. 1214 // to boolean.
1467 void Interpreter::DoJumpIfToBooleanFalseConstant( 1215 void Interpreter::DoJumpIfToBooleanFalseConstant(
1468 InterpreterAssembler* assembler) { 1216 InterpreterAssembler* assembler) {
1469 Callable callable = CodeFactory::ToBoolean(isolate_); 1217 Callable callable = CodeFactory::ToBoolean(isolate_);
1470 Node* target = __ HeapConstant(callable.code()); 1218 Node* target = __ HeapConstant(callable.code());
1471 Node* accumulator = __ GetAccumulator(); 1219 Node* accumulator = __ GetAccumulator();
1472 Node* context = __ GetContext(); 1220 Node* context = __ GetContext();
1473 Node* to_boolean_value = 1221 Node* to_boolean_value =
1474 __ CallStub(callable.descriptor(), target, context, accumulator); 1222 __ CallStub(callable.descriptor(), target, context, accumulator);
1475 Node* index = __ BytecodeOperandIdx(0); 1223 Node* index = __ BytecodeOperandIdx(0);
1476 Node* constant = __ LoadConstantPoolEntry(index); 1224 Node* constant = __ LoadConstantPoolEntry(index);
1477 Node* relative_jump = __ SmiUntag(constant); 1225 Node* relative_jump = __ SmiUntag(constant);
1478 Node* false_value = __ BooleanConstant(false); 1226 Node* false_value = __ BooleanConstant(false);
1479 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump); 1227 __ JumpIfWordEqual(to_boolean_value, false_value, relative_jump);
1480 } 1228 }
1481 1229
1482 1230 // JumpIfNull <imm>
1483 // JumpIfToBooleanFalseConstantWide <idx16>
1484 //
1485 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1486 // if the object referenced by the accumulator is false when the object is cast
1487 // to boolean.
1488 void Interpreter::DoJumpIfToBooleanFalseConstantWide(
1489 InterpreterAssembler* assembler) {
1490 DoJumpIfToBooleanFalseConstant(assembler);
1491 }
1492
1493
1494 // JumpIfNull <imm8>
1495 // 1231 //
1496 // Jump by number of bytes represented by an immediate operand if the object 1232 // Jump by number of bytes represented by an immediate operand if the object
1497 // referenced by the accumulator is the null constant. 1233 // referenced by the accumulator is the null constant.
1498 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) { 1234 void Interpreter::DoJumpIfNull(InterpreterAssembler* assembler) {
1499 Node* accumulator = __ GetAccumulator(); 1235 Node* accumulator = __ GetAccumulator();
1500 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); 1236 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1501 Node* relative_jump = __ BytecodeOperandImm(0); 1237 Node* relative_jump = __ BytecodeOperandImm(0);
1502 __ JumpIfWordEqual(accumulator, null_value, relative_jump); 1238 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1503 } 1239 }
1504 1240
1505 1241 // JumpIfNullConstant <idx>
1506 // JumpIfNullConstant <idx8>
1507 // 1242 //
1508 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1243 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1509 // if the object referenced by the accumulator is the null constant. 1244 // if the object referenced by the accumulator is the null constant.
1510 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) { 1245 void Interpreter::DoJumpIfNullConstant(InterpreterAssembler* assembler) {
1511 Node* accumulator = __ GetAccumulator(); 1246 Node* accumulator = __ GetAccumulator();
1512 Node* null_value = __ HeapConstant(isolate_->factory()->null_value()); 1247 Node* null_value = __ HeapConstant(isolate_->factory()->null_value());
1513 Node* index = __ BytecodeOperandIdx(0); 1248 Node* index = __ BytecodeOperandIdx(0);
1514 Node* constant = __ LoadConstantPoolEntry(index); 1249 Node* constant = __ LoadConstantPoolEntry(index);
1515 Node* relative_jump = __ SmiUntag(constant); 1250 Node* relative_jump = __ SmiUntag(constant);
1516 __ JumpIfWordEqual(accumulator, null_value, relative_jump); 1251 __ JumpIfWordEqual(accumulator, null_value, relative_jump);
1517 } 1252 }
1518 1253
1519 1254 // JumpIfUndefined <imm>
1520 // JumpIfNullConstantWide <idx16>
1521 //
1522 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1523 // if the object referenced by the accumulator is the null constant.
1524 void Interpreter::DoJumpIfNullConstantWide(InterpreterAssembler* assembler) {
1525 DoJumpIfNullConstant(assembler);
1526 }
1527
1528 // JumpIfUndefined <imm8>
1529 // 1255 //
1530 // Jump by number of bytes represented by an immediate operand if the object 1256 // Jump by number of bytes represented by an immediate operand if the object
1531 // referenced by the accumulator is the undefined constant. 1257 // referenced by the accumulator is the undefined constant.
1532 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) { 1258 void Interpreter::DoJumpIfUndefined(InterpreterAssembler* assembler) {
1533 Node* accumulator = __ GetAccumulator(); 1259 Node* accumulator = __ GetAccumulator();
1534 Node* undefined_value = 1260 Node* undefined_value =
1535 __ HeapConstant(isolate_->factory()->undefined_value()); 1261 __ HeapConstant(isolate_->factory()->undefined_value());
1536 Node* relative_jump = __ BytecodeOperandImm(0); 1262 Node* relative_jump = __ BytecodeOperandImm(0);
1537 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); 1263 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1538 } 1264 }
1539 1265
1540 1266 // JumpIfUndefinedConstant <idx>
1541 // JumpIfUndefinedConstant <idx8>
1542 // 1267 //
1543 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1268 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1544 // if the object referenced by the accumulator is the undefined constant. 1269 // if the object referenced by the accumulator is the undefined constant.
1545 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) { 1270 void Interpreter::DoJumpIfUndefinedConstant(InterpreterAssembler* assembler) {
1546 Node* accumulator = __ GetAccumulator(); 1271 Node* accumulator = __ GetAccumulator();
1547 Node* undefined_value = 1272 Node* undefined_value =
1548 __ HeapConstant(isolate_->factory()->undefined_value()); 1273 __ HeapConstant(isolate_->factory()->undefined_value());
1549 Node* index = __ BytecodeOperandIdx(0); 1274 Node* index = __ BytecodeOperandIdx(0);
1550 Node* constant = __ LoadConstantPoolEntry(index); 1275 Node* constant = __ LoadConstantPoolEntry(index);
1551 Node* relative_jump = __ SmiUntag(constant); 1276 Node* relative_jump = __ SmiUntag(constant);
1552 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump); 1277 __ JumpIfWordEqual(accumulator, undefined_value, relative_jump);
1553 } 1278 }
1554 1279
1555 1280 // JumpIfNotHole <imm>
1556 // JumpIfUndefinedConstantWide <idx16>
1557 //
1558 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1559 // if the object referenced by the accumulator is the undefined constant.
1560 void Interpreter::DoJumpIfUndefinedConstantWide(
1561 InterpreterAssembler* assembler) {
1562 DoJumpIfUndefinedConstant(assembler);
1563 }
1564
1565 // JumpIfNotHole <imm8>
1566 // 1281 //
1567 // Jump by number of bytes represented by an immediate operand if the object 1282 // Jump by number of bytes represented by an immediate operand if the object
1568 // referenced by the accumulator is the hole. 1283 // referenced by the accumulator is the hole.
1569 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) { 1284 void Interpreter::DoJumpIfNotHole(InterpreterAssembler* assembler) {
1570 Node* accumulator = __ GetAccumulator(); 1285 Node* accumulator = __ GetAccumulator();
1571 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); 1286 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value());
1572 Node* relative_jump = __ BytecodeOperandImm(0); 1287 Node* relative_jump = __ BytecodeOperandImm(0);
1573 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); 1288 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump);
1574 } 1289 }
1575 1290
1576 // JumpIfNotHoleConstant <idx8> 1291 // JumpIfNotHoleConstant <idx>
1577 // 1292 //
1578 // Jump by number of bytes in the Smi in the |idx8| entry in the constant pool 1293 // Jump by number of bytes in the Smi in the |idx| entry in the constant pool
1579 // if the object referenced by the accumulator is the hole constant. 1294 // if the object referenced by the accumulator is the hole constant.
1580 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) { 1295 void Interpreter::DoJumpIfNotHoleConstant(InterpreterAssembler* assembler) {
1581 Node* accumulator = __ GetAccumulator(); 1296 Node* accumulator = __ GetAccumulator();
1582 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value()); 1297 Node* the_hole_value = __ HeapConstant(isolate_->factory()->the_hole_value());
1583 Node* index = __ BytecodeOperandIdx(0); 1298 Node* index = __ BytecodeOperandIdx(0);
1584 Node* constant = __ LoadConstantPoolEntry(index); 1299 Node* constant = __ LoadConstantPoolEntry(index);
1585 Node* relative_jump = __ SmiUntag(constant); 1300 Node* relative_jump = __ SmiUntag(constant);
1586 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump); 1301 __ JumpIfWordNotEqual(accumulator, the_hole_value, relative_jump);
1587 } 1302 }
1588 1303
1589 // JumpIfNotHoleConstantWide <idx16>
1590 //
1591 // Jump by number of bytes in the Smi in the |idx16| entry in the constant pool
1592 // if the object referenced by the accumulator is the hole constant.
1593 void Interpreter::DoJumpIfNotHoleConstantWide(InterpreterAssembler* assembler) {
1594 DoJumpIfNotHoleConstant(assembler);
1595 }
1596
1597 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id, 1304 void Interpreter::DoCreateLiteral(Runtime::FunctionId function_id,
1598 InterpreterAssembler* assembler) { 1305 InterpreterAssembler* assembler) {
1599 Node* index = __ BytecodeOperandIdx(0); 1306 Node* index = __ BytecodeOperandIdx(0);
1600 Node* constant_elements = __ LoadConstantPoolEntry(index); 1307 Node* constant_elements = __ LoadConstantPoolEntry(index);
1601 Node* literal_index_raw = __ BytecodeOperandIdx(1); 1308 Node* literal_index_raw = __ BytecodeOperandIdx(1);
1602 Node* literal_index = __ SmiTag(literal_index_raw); 1309 Node* literal_index = __ SmiTag(literal_index_raw);
1603 Node* flags_raw = __ BytecodeOperandImm(2); 1310 Node* flags_raw = __ BytecodeOperandFlag(2);
1604 Node* flags = __ SmiTag(flags_raw); 1311 Node* flags = __ SmiTag(flags_raw);
1605 Node* closure = __ LoadRegister(Register::function_closure()); 1312 Node* closure = __ LoadRegister(Register::function_closure());
1606 Node* context = __ GetContext(); 1313 Node* context = __ GetContext();
1607 Node* result = __ CallRuntime(function_id, context, closure, literal_index, 1314 Node* result = __ CallRuntime(function_id, context, closure, literal_index,
1608 constant_elements, flags); 1315 constant_elements, flags);
1609 __ SetAccumulator(result); 1316 __ SetAccumulator(result);
1610 __ Dispatch(); 1317 __ Dispatch();
1611 } 1318 }
1612 1319
1613 1320
1614 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags> 1321 // CreateRegExpLiteral <pattern_idx> <literal_idx> <flags>
1615 // 1322 //
1616 // Creates a regular expression literal for literal index <literal_idx> with 1323 // Creates a regular expression literal for literal index <literal_idx> with
1617 // <flags> and the pattern in <pattern_idx>. 1324 // <flags> and the pattern in <pattern_idx>.
1618 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) { 1325 void Interpreter::DoCreateRegExpLiteral(InterpreterAssembler* assembler) {
1619 Callable callable = CodeFactory::FastCloneRegExp(isolate_); 1326 Callable callable = CodeFactory::FastCloneRegExp(isolate_);
1620 Node* target = __ HeapConstant(callable.code()); 1327 Node* target = __ HeapConstant(callable.code());
1621 Node* index = __ BytecodeOperandIdx(0); 1328 Node* index = __ BytecodeOperandIdx(0);
1622 Node* pattern = __ LoadConstantPoolEntry(index); 1329 Node* pattern = __ LoadConstantPoolEntry(index);
1623 Node* literal_index_raw = __ BytecodeOperandIdx(1); 1330 Node* literal_index_raw = __ BytecodeOperandIdx(1);
1624 Node* literal_index = __ SmiTag(literal_index_raw); 1331 Node* literal_index = __ SmiTag(literal_index_raw);
1625 Node* flags_raw = __ BytecodeOperandImm(2); 1332 Node* flags_raw = __ BytecodeOperandFlag(2);
1626 Node* flags = __ SmiTag(flags_raw); 1333 Node* flags = __ SmiTag(flags_raw);
1627 Node* closure = __ LoadRegister(Register::function_closure()); 1334 Node* closure = __ LoadRegister(Register::function_closure());
1628 Node* context = __ GetContext(); 1335 Node* context = __ GetContext();
1629 Node* result = __ CallStub(callable.descriptor(), target, context, closure, 1336 Node* result = __ CallStub(callable.descriptor(), target, context, closure,
1630 literal_index, pattern, flags); 1337 literal_index, pattern, flags);
1631 __ SetAccumulator(result); 1338 __ SetAccumulator(result);
1632 __ Dispatch(); 1339 __ Dispatch();
1633 } 1340 }
1634 1341
1635
1636 // CreateRegExpLiteralWide <pattern_idx> <literal_idx> <flags>
1637 //
1638 // Creates a regular expression literal for literal index <literal_idx> with
1639 // <flags> and the pattern in <pattern_idx>.
1640 void Interpreter::DoCreateRegExpLiteralWide(InterpreterAssembler* assembler) {
1641 DoCreateRegExpLiteral(assembler);
1642 }
1643
1644
1645 // CreateArrayLiteral <element_idx> <literal_idx> <flags> 1342 // CreateArrayLiteral <element_idx> <literal_idx> <flags>
1646 // 1343 //
1647 // Creates an array literal for literal index <literal_idx> with flags <flags> 1344 // Creates an array literal for literal index <literal_idx> with flags <flags>
1648 // and constant elements in <element_idx>. 1345 // and constant elements in <element_idx>.
1649 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) { 1346 void Interpreter::DoCreateArrayLiteral(InterpreterAssembler* assembler) {
1650 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler); 1347 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler);
1651 } 1348 }
1652 1349
1653
1654 // CreateArrayLiteralWide <element_idx> <literal_idx> <flags>
1655 //
1656 // Creates an array literal for literal index <literal_idx> with flags <flags>
1657 // and constant elements in <element_idx>.
1658 void Interpreter::DoCreateArrayLiteralWide(InterpreterAssembler* assembler) {
1659 DoCreateLiteral(Runtime::kCreateArrayLiteral, assembler);
1660 }
1661
1662
1663 // CreateObjectLiteral <element_idx> <literal_idx> <flags> 1350 // CreateObjectLiteral <element_idx> <literal_idx> <flags>
1664 // 1351 //
1665 // Creates an object literal for literal index <literal_idx> with flags <flags> 1352 // Creates an object literal for literal index <literal_idx> with flags <flags>
1666 // and constant elements in <element_idx>. 1353 // and constant elements in <element_idx>.
1667 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) { 1354 void Interpreter::DoCreateObjectLiteral(InterpreterAssembler* assembler) {
1668 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler); 1355 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler);
1669 } 1356 }
1670 1357
1671
1672 // CreateObjectLiteralWide <element_idx> <literal_idx> <flags>
1673 //
1674 // Creates an object literal for literal index <literal_idx> with flags <flags>
1675 // and constant elements in <element_idx>.
1676 void Interpreter::DoCreateObjectLiteralWide(InterpreterAssembler* assembler) {
1677 DoCreateLiteral(Runtime::kCreateObjectLiteral, assembler);
1678 }
1679
1680
1681 // CreateClosure <index> <tenured> 1358 // CreateClosure <index> <tenured>
1682 // 1359 //
1683 // Creates a new closure for SharedFunctionInfo at position |index| in the 1360 // Creates a new closure for SharedFunctionInfo at position |index| in the
1684 // constant pool and with the PretenureFlag <tenured>. 1361 // constant pool and with the PretenureFlag <tenured>.
1685 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) { 1362 void Interpreter::DoCreateClosure(InterpreterAssembler* assembler) {
1686 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of 1363 // TODO(rmcilroy): Possibly call FastNewClosureStub when possible instead of
1687 // calling into the runtime. 1364 // calling into the runtime.
1688 Node* index = __ BytecodeOperandIdx(0); 1365 Node* index = __ BytecodeOperandIdx(0);
1689 Node* shared = __ LoadConstantPoolEntry(index); 1366 Node* shared = __ LoadConstantPoolEntry(index);
1690 Node* tenured_raw = __ BytecodeOperandImm(1); 1367 Node* tenured_raw = __ BytecodeOperandFlag(1);
1691 Node* tenured = __ SmiTag(tenured_raw); 1368 Node* tenured = __ SmiTag(tenured_raw);
1692 Node* context = __ GetContext(); 1369 Node* context = __ GetContext();
1693 Node* result = 1370 Node* result =
1694 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured); 1371 __ CallRuntime(Runtime::kInterpreterNewClosure, context, shared, tenured);
1695 __ SetAccumulator(result); 1372 __ SetAccumulator(result);
1696 __ Dispatch(); 1373 __ Dispatch();
1697 } 1374 }
1698 1375
1699
1700 // CreateClosureWide <index> <tenured>
1701 //
1702 // Creates a new closure for SharedFunctionInfo at position |index| in the
1703 // constant pool and with the PretenureFlag <tenured>.
1704 void Interpreter::DoCreateClosureWide(InterpreterAssembler* assembler) {
1705 return DoCreateClosure(assembler);
1706 }
1707
1708
1709 // CreateMappedArguments 1376 // CreateMappedArguments
1710 // 1377 //
1711 // Creates a new mapped arguments object. 1378 // Creates a new mapped arguments object.
1712 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) { 1379 void Interpreter::DoCreateMappedArguments(InterpreterAssembler* assembler) {
1713 Node* closure = __ LoadRegister(Register::function_closure()); 1380 Node* closure = __ LoadRegister(Register::function_closure());
1714 Node* context = __ GetContext(); 1381 Node* context = __ GetContext();
1715 Node* result = 1382 Node* result =
1716 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure); 1383 __ CallRuntime(Runtime::kNewSloppyArguments_Generic, context, closure);
1717 __ SetAccumulator(result); 1384 __ SetAccumulator(result);
1718 __ Dispatch(); 1385 __ Dispatch();
(...skipping 101 matching lines...)
1820 // 0 == cache_type, 1 == cache_array, 2 == cache_length 1487 // 0 == cache_type, 1 == cache_array, 2 == cache_length
1821 Node* output_register = __ BytecodeOperandReg(0); 1488 Node* output_register = __ BytecodeOperandReg(0);
1822 for (int i = 0; i < 3; i++) { 1489 for (int i = 0; i < 3; i++) {
1823 Node* cache_info = __ Projection(i, result_triple); 1490 Node* cache_info = __ Projection(i, result_triple);
1824 __ StoreRegister(cache_info, output_register); 1491 __ StoreRegister(cache_info, output_register);
1825 output_register = __ NextRegister(output_register); 1492 output_register = __ NextRegister(output_register);
1826 } 1493 }
1827 __ Dispatch(); 1494 __ Dispatch();
1828 } 1495 }
1829 1496
1830
1831 // ForInPrepareWide <cache_info_triple>
1832 //
1833 // Returns state for for..in loop execution based on the object in the
1834 // accumulator. The result is output in registers |cache_info_triple| to
1835 // |cache_info_triple + 2|, with the registers holding cache_type, cache_array,
1836 // and cache_length respectively.
1837 void Interpreter::DoForInPrepareWide(InterpreterAssembler* assembler) {
1838 DoForInPrepare(assembler);
1839 }
1840
1841
1842 // ForInNext <receiver> <index> <cache_info_pair> 1497 // ForInNext <receiver> <index> <cache_info_pair>
1843 // 1498 //
1844 // Returns the next enumerable property in the the accumulator. 1499 // Returns the next enumerable property in the the accumulator.
1845 void Interpreter::DoForInNext(InterpreterAssembler* assembler) { 1500 void Interpreter::DoForInNext(InterpreterAssembler* assembler) {
1846 Node* receiver_reg = __ BytecodeOperandReg(0); 1501 Node* receiver_reg = __ BytecodeOperandReg(0);
1847 Node* receiver = __ LoadRegister(receiver_reg); 1502 Node* receiver = __ LoadRegister(receiver_reg);
1848 Node* index_reg = __ BytecodeOperandReg(1); 1503 Node* index_reg = __ BytecodeOperandReg(1);
1849 Node* index = __ LoadRegister(index_reg); 1504 Node* index = __ LoadRegister(index_reg);
1850 Node* cache_type_reg = __ BytecodeOperandReg(2); 1505 Node* cache_type_reg = __ BytecodeOperandReg(2);
1851 Node* cache_type = __ LoadRegister(cache_type_reg); 1506 Node* cache_type = __ LoadRegister(cache_type_reg);
(...skipping 26 matching lines...)
1878 1533
1879 // Need to filter the {key} for the {receiver}. 1534 // Need to filter the {key} for the {receiver}.
1880 Node* context = __ GetContext(); 1535 Node* context = __ GetContext();
1881 Node* result = 1536 Node* result =
1882 __ CallRuntime(Runtime::kForInFilter, context, receiver, key); 1537 __ CallRuntime(Runtime::kForInFilter, context, receiver, key);
1883 __ SetAccumulator(result); 1538 __ SetAccumulator(result);
1884 __ Dispatch(); 1539 __ Dispatch();
1885 } 1540 }
1886 } 1541 }
1887 1542
1888
1889 // ForInNextWide <receiver> <index> <cache_info_pair>
1890 //
1891 // Returns the next enumerable property in the the accumulator.
1892 void Interpreter::DoForInNextWide(InterpreterAssembler* assembler) {
1893 return DoForInNext(assembler);
1894 }
1895
1896
1897 // ForInDone <index> <cache_length> 1543 // ForInDone <index> <cache_length>
1898 // 1544 //
1899 // Returns true if the end of the enumerable properties has been reached. 1545 // Returns true if the end of the enumerable properties has been reached.
1900 void Interpreter::DoForInDone(InterpreterAssembler* assembler) { 1546 void Interpreter::DoForInDone(InterpreterAssembler* assembler) {
1901 // TODO(oth): Implement directly rather than making a runtime call. 1547 // TODO(oth): Implement directly rather than making a runtime call.
1902 Node* index_reg = __ BytecodeOperandReg(0); 1548 Node* index_reg = __ BytecodeOperandReg(0);
1903 Node* index = __ LoadRegister(index_reg); 1549 Node* index = __ LoadRegister(index_reg);
1904 Node* cache_length_reg = __ BytecodeOperandReg(1); 1550 Node* cache_length_reg = __ BytecodeOperandReg(1);
1905 Node* cache_length = __ LoadRegister(cache_length_reg); 1551 Node* cache_length = __ LoadRegister(cache_length_reg);
1906 Node* context = __ GetContext(); 1552 Node* context = __ GetContext();
1907 Node* result = 1553 Node* result =
1908 __ CallRuntime(Runtime::kForInDone, context, index, cache_length); 1554 __ CallRuntime(Runtime::kForInDone, context, index, cache_length);
1909 __ SetAccumulator(result); 1555 __ SetAccumulator(result);
1910 __ Dispatch(); 1556 __ Dispatch();
1911 } 1557 }
1912 1558
1913
1914 // ForInStep <index> 1559 // ForInStep <index>
1915 // 1560 //
1916 // Increments the loop counter in register |index| and stores the result 1561 // Increments the loop counter in register |index| and stores the result
1917 // in the accumulator. 1562 // in the accumulator.
1918 void Interpreter::DoForInStep(InterpreterAssembler* assembler) { 1563 void Interpreter::DoForInStep(InterpreterAssembler* assembler) {
1919 Node* index_reg = __ BytecodeOperandReg(0); 1564 Node* index_reg = __ BytecodeOperandReg(0);
1920 Node* index = __ LoadRegister(index_reg); 1565 Node* index = __ LoadRegister(index_reg);
1921 Node* one = __ SmiConstant(Smi::FromInt(1)); 1566 Node* one = __ SmiConstant(Smi::FromInt(1));
1922 Node* result = __ SmiAdd(index, one); 1567 Node* result = __ SmiAdd(index, one);
1923 __ SetAccumulator(result); 1568 __ SetAccumulator(result);
1924 __ Dispatch(); 1569 __ Dispatch();
1925 } 1570 }
1926 1571
1572 // Wide
1573 //
1574 // Prefix bytecode indicating next bytecode has wide (16-bit) operands.
1575 void Interpreter::DoWide(InterpreterAssembler* assembler) {
1576 __ DispatchWide(OperandScale::k2X);
1577 }
1578
1579 // ExtraWide
1580 //
1581 // Prefix bytecode indicating next bytecode has extra-wide (32-bit) operands.
1582 void Interpreter::DoExtraWide(InterpreterAssembler* assembler) {
1583 __ DispatchWide(OperandScale::k4X);
1584 }
1585
1586 // Illegal
1587 //
1588 // An invalid bytecode aborting execution if dispatched.
1589 void Interpreter::DoIllegal(InterpreterAssembler* assembler) {
1590 __ Abort(kInvalidBytecode);
1591 }
1592
1927 } // namespace interpreter 1593 } // namespace interpreter
1928 } // namespace internal 1594 } // namespace internal
1929 } // namespace v8 1595 } // namespace v8
OLDNEW

Powered by Google App Engine