Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(441)

Side by Side Diff: src/interpreter/interpreter-assembler.cc

Issue 2552883012: [interpreter][stubs] Fixing issues found by machine graph verifier. (Closed)
Patch Set: Addressing nits Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/interpreter/interpreter-assembler.h ('k') | src/interpreter/interpreter-intrinsics.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/interpreter/interpreter-assembler.h" 5 #include "src/interpreter/interpreter-assembler.h"
6 6
7 #include <limits> 7 #include <limits>
8 #include <ostream> 8 #include <ostream>
9 9
10 #include "src/code-factory.h" 10 #include "src/code-factory.h"
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after
213 Node* operand_offset = OperandOffset(operand_index); 213 Node* operand_offset = OperandOffset(operand_index);
214 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), 214 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
215 IntPtrAdd(BytecodeOffset(), operand_offset)); 215 IntPtrAdd(BytecodeOffset(), operand_offset));
216 } 216 }
217 217
218 Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) { 218 Node* InterpreterAssembler::BytecodeOperandSignedByte(int operand_index) {
219 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 219 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
220 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize( 220 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
221 bytecode_, operand_index, operand_scale())); 221 bytecode_, operand_index, operand_scale()));
222 Node* operand_offset = OperandOffset(operand_index); 222 Node* operand_offset = OperandOffset(operand_index);
223 Node* load = Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), 223 return Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
224 IntPtrAdd(BytecodeOffset(), operand_offset)); 224 IntPtrAdd(BytecodeOffset(), operand_offset));
225
226 // Ensure that we sign extend to full pointer size
227 if (kPointerSize == 8) {
228 load = ChangeInt32ToInt64(load);
229 }
230 return load;
231 } 225 }
232 226
233 compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned( 227 compiler::Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
234 int relative_offset, MachineType result_type) { 228 int relative_offset, MachineType result_type) {
235 static const int kMaxCount = 4; 229 static const int kMaxCount = 4;
236 DCHECK(!TargetSupportsUnalignedAccess()); 230 DCHECK(!TargetSupportsUnalignedAccess());
237 231
238 int count; 232 int count;
239 switch (result_type.representation()) { 233 switch (result_type.representation()) {
240 case MachineRepresentation::kWord16: 234 case MachineRepresentation::kWord16:
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
296 } 290 }
297 } 291 }
298 292
299 Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) { 293 Node* InterpreterAssembler::BytecodeOperandSignedShort(int operand_index) {
300 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 294 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
301 DCHECK_EQ( 295 DCHECK_EQ(
302 OperandSize::kShort, 296 OperandSize::kShort,
303 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale())); 297 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
304 int operand_offset = 298 int operand_offset =
305 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); 299 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
306 Node* load;
307 if (TargetSupportsUnalignedAccess()) { 300 if (TargetSupportsUnalignedAccess()) {
308 load = Load(MachineType::Int16(), BytecodeArrayTaggedPointer(), 301 return Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
309 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 302 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
310 } else { 303 } else {
311 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16()); 304 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16());
312 } 305 }
313
314 // Ensure that we sign extend to full pointer size
315 if (kPointerSize == 8) {
316 load = ChangeInt32ToInt64(load);
317 }
318 return load;
319 } 306 }
320 307
321 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) { 308 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(int operand_index) {
322 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 309 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
323 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize( 310 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
324 bytecode_, operand_index, operand_scale())); 311 bytecode_, operand_index, operand_scale()));
325 int operand_offset = 312 int operand_offset =
326 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); 313 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
327 if (TargetSupportsUnalignedAccess()) { 314 if (TargetSupportsUnalignedAccess()) {
328 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(), 315 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
329 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 316 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
330 } else { 317 } else {
331 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32()); 318 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32());
332 } 319 }
333 } 320 }
334 321
335 Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) { 322 Node* InterpreterAssembler::BytecodeOperandSignedQuad(int operand_index) {
336 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_)); 323 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
337 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize( 324 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
338 bytecode_, operand_index, operand_scale())); 325 bytecode_, operand_index, operand_scale()));
339 int operand_offset = 326 int operand_offset =
340 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()); 327 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
341 Node* load;
342 if (TargetSupportsUnalignedAccess()) { 328 if (TargetSupportsUnalignedAccess()) {
343 load = Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), 329 return Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
344 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset))); 330 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)));
345 } else { 331 } else {
346 load = BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32()); 332 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32());
347 } 333 }
348
349 // Ensure that we sign extend to full pointer size
350 if (kPointerSize == 8) {
351 load = ChangeInt32ToInt64(load);
352 }
353 return load;
354 } 334 }
355 335
356 Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index, 336 Node* InterpreterAssembler::BytecodeSignedOperand(int operand_index,
357 OperandSize operand_size) { 337 OperandSize operand_size) {
358 DCHECK(!Bytecodes::IsUnsignedOperandType( 338 DCHECK(!Bytecodes::IsUnsignedOperandType(
359 Bytecodes::GetOperandType(bytecode_, operand_index))); 339 Bytecodes::GetOperandType(bytecode_, operand_index)));
360 switch (operand_size) { 340 switch (operand_size) {
361 case OperandSize::kByte: 341 case OperandSize::kByte:
362 return BytecodeOperandSignedByte(operand_index); 342 return BytecodeOperandSignedByte(operand_index);
363 case OperandSize::kShort: 343 case OperandSize::kShort:
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
413 } 393 }
414 394
415 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) { 395 Node* InterpreterAssembler::BytecodeOperandImm(int operand_index) {
416 DCHECK_EQ(OperandType::kImm, 396 DCHECK_EQ(OperandType::kImm,
417 Bytecodes::GetOperandType(bytecode_, operand_index)); 397 Bytecodes::GetOperandType(bytecode_, operand_index));
418 OperandSize operand_size = 398 OperandSize operand_size =
419 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); 399 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
420 return BytecodeSignedOperand(operand_index, operand_size); 400 return BytecodeSignedOperand(operand_index, operand_size);
421 } 401 }
422 402
403 Node* InterpreterAssembler::BytecodeOperandImmIntPtr(int operand_index) {
404 return ChangeInt32ToIntPtr(BytecodeOperandImm(operand_index));
405 }
406
407 Node* InterpreterAssembler::BytecodeOperandImmSmi(int operand_index) {
408 return SmiFromWord32(BytecodeOperandImm(operand_index));
409 }
410
423 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) { 411 Node* InterpreterAssembler::BytecodeOperandIdx(int operand_index) {
424 DCHECK(OperandType::kIdx == 412 DCHECK(OperandType::kIdx ==
425 Bytecodes::GetOperandType(bytecode_, operand_index)); 413 Bytecodes::GetOperandType(bytecode_, operand_index));
426 OperandSize operand_size = 414 OperandSize operand_size =
427 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); 415 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
428 return BytecodeUnsignedOperand(operand_index, operand_size); 416 return ChangeUint32ToWord(
417 BytecodeUnsignedOperand(operand_index, operand_size));
418 }
419
420 Node* InterpreterAssembler::BytecodeOperandIdxSmi(int operand_index) {
421 return SmiTag(BytecodeOperandIdx(operand_index));
429 } 422 }
430 423
431 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) { 424 Node* InterpreterAssembler::BytecodeOperandReg(int operand_index) {
432 DCHECK(Bytecodes::IsRegisterOperandType( 425 DCHECK(Bytecodes::IsRegisterOperandType(
433 Bytecodes::GetOperandType(bytecode_, operand_index))); 426 Bytecodes::GetOperandType(bytecode_, operand_index)));
434 OperandSize operand_size = 427 OperandSize operand_size =
435 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); 428 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
436 return BytecodeSignedOperand(operand_index, operand_size); 429 return ChangeInt32ToIntPtr(
430 BytecodeSignedOperand(operand_index, operand_size));
437 } 431 }
438 432
439 Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) { 433 Node* InterpreterAssembler::BytecodeOperandRuntimeId(int operand_index) {
440 DCHECK(OperandType::kRuntimeId == 434 DCHECK(OperandType::kRuntimeId ==
441 Bytecodes::GetOperandType(bytecode_, operand_index)); 435 Bytecodes::GetOperandType(bytecode_, operand_index));
442 OperandSize operand_size = 436 OperandSize operand_size =
443 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); 437 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
444 DCHECK_EQ(operand_size, OperandSize::kShort); 438 DCHECK_EQ(operand_size, OperandSize::kShort);
445 return BytecodeUnsignedOperand(operand_index, operand_size); 439 return BytecodeUnsignedOperand(operand_index, operand_size);
446 } 440 }
447 441
448 Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) { 442 Node* InterpreterAssembler::BytecodeOperandIntrinsicId(int operand_index) {
449 DCHECK(OperandType::kIntrinsicId == 443 DCHECK(OperandType::kIntrinsicId ==
450 Bytecodes::GetOperandType(bytecode_, operand_index)); 444 Bytecodes::GetOperandType(bytecode_, operand_index));
451 OperandSize operand_size = 445 OperandSize operand_size =
452 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()); 446 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
453 DCHECK_EQ(operand_size, OperandSize::kByte); 447 DCHECK_EQ(operand_size, OperandSize::kByte);
454 return BytecodeUnsignedOperand(operand_index, operand_size); 448 return BytecodeUnsignedOperand(operand_index, operand_size);
455 } 449 }
456 450
457 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) { 451 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
458 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), 452 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(),
459 BytecodeArray::kConstantPoolOffset); 453 BytecodeArray::kConstantPoolOffset);
460 Node* entry_offset = 454 return LoadFixedArrayElement(constant_pool, index, 0, INTPTR_PARAMETERS);
461 IntPtrAdd(IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
462 WordShl(index, kPointerSizeLog2));
463 return Load(MachineType::AnyTagged(), constant_pool, entry_offset);
464 } 455 }
465 456
466 Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) { 457 Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) {
467 Node* constant_pool = LoadObjectField(BytecodeArrayTaggedPointer(), 458 return SmiUntag(LoadConstantPoolEntry(index));
468 BytecodeArray::kConstantPoolOffset);
469 int offset = FixedArray::kHeaderSize - kHeapObjectTag;
470 #if V8_TARGET_LITTLE_ENDIAN
471 if (Is64()) {
472 offset += kPointerSize / 2;
473 }
474 #endif
475 Node* entry_offset =
476 IntPtrAdd(IntPtrConstant(offset), WordShl(index, kPointerSizeLog2));
477 if (Is64()) {
478 return ChangeInt32ToInt64(
479 Load(MachineType::Int32(), constant_pool, entry_offset));
480 } else {
481 return SmiUntag(
482 Load(MachineType::AnyTagged(), constant_pool, entry_offset));
483 }
484 } 459 }
485 460
486 Node* InterpreterAssembler::LoadTypeFeedbackVector() { 461 Node* InterpreterAssembler::LoadTypeFeedbackVector() {
487 Node* function = LoadRegister(Register::function_closure()); 462 Node* function = LoadRegister(Register::function_closure());
488 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset); 463 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset);
489 Node* vector = 464 Node* vector =
490 LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset); 465 LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset);
491 return vector; 466 return vector;
492 } 467 }
493 468
(...skipping 14 matching lines...) Expand all
508 stack_pointer_before_call_ = nullptr; 483 stack_pointer_before_call_ = nullptr;
509 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call, 484 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
510 kUnexpectedStackPointer); 485 kUnexpectedStackPointer);
511 } 486 }
512 } 487 }
513 488
514 Node* InterpreterAssembler::IncrementCallCount(Node* type_feedback_vector, 489 Node* InterpreterAssembler::IncrementCallCount(Node* type_feedback_vector,
515 Node* slot_id) { 490 Node* slot_id) {
516 Comment("increment call count"); 491 Comment("increment call count");
517 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1)); 492 Node* call_count_slot = IntPtrAdd(slot_id, IntPtrConstant(1));
518 Node* call_count = 493 Node* call_count = LoadFixedArrayElement(
519 LoadFixedArrayElement(type_feedback_vector, call_count_slot); 494 type_feedback_vector, call_count_slot, 0, INTPTR_PARAMETERS);
520 Node* new_count = SmiAdd(call_count, SmiTag(Int32Constant(1))); 495 Node* new_count = SmiAdd(call_count, SmiConstant(1));
521 // Count is Smi, so we don't need a write barrier. 496 // Count is Smi, so we don't need a write barrier.
522 return StoreFixedArrayElement(type_feedback_vector, call_count_slot, 497 return StoreFixedArrayElement(type_feedback_vector, call_count_slot,
523 new_count, SKIP_WRITE_BARRIER); 498 new_count, SKIP_WRITE_BARRIER, 0,
499 INTPTR_PARAMETERS);
524 } 500 }
525 501
526 Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context, 502 Node* InterpreterAssembler::CallJSWithFeedback(Node* function, Node* context,
527 Node* first_arg, Node* arg_count, 503 Node* first_arg, Node* arg_count,
528 Node* slot_id, 504 Node* slot_id,
529 Node* type_feedback_vector, 505 Node* type_feedback_vector,
530 TailCallMode tail_call_mode) { 506 TailCallMode tail_call_mode) {
531 // Static checks to assert it is safe to examine the type feedback element. 507 // Static checks to assert it is safe to examine the type feedback element.
532 // We don't know that we have a weak cell. We might have a private symbol 508 // We don't know that we have a weak cell. We might have a private symbol
533 // or an AllocationSite, but the memory is safe to examine. 509 // or an AllocationSite, but the memory is safe to examine.
534 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to 510 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
535 // FixedArray. 511 // FixedArray.
536 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) 512 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
537 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not 513 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
538 // computed, meaning that it can't appear to be a pointer. If the low bit is 514 // computed, meaning that it can't appear to be a pointer. If the low bit is
539 // 0, then hash is computed, but the 0 bit prevents the field from appearing 515 // 0, then hash is computed, but the 0 bit prevents the field from appearing
540 // to be a pointer. 516 // to be a pointer.
541 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); 517 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
542 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == 518 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
543 WeakCell::kValueOffset && 519 WeakCell::kValueOffset &&
544 WeakCell::kValueOffset == Symbol::kHashFieldSlot); 520 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
545 521
546 Variable return_value(this, MachineRepresentation::kTagged); 522 Variable return_value(this, MachineRepresentation::kTagged);
547 Label call_function(this), extra_checks(this, Label::kDeferred), call(this), 523 Label call_function(this), extra_checks(this, Label::kDeferred), call(this),
548 end(this); 524 end(this);
549 525
550 // The checks. First, does function match the recorded monomorphic target? 526 // The checks. First, does function match the recorded monomorphic target?
551 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id); 527 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id,
528 0, INTPTR_PARAMETERS);
552 Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element); 529 Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element);
553 Node* is_monomorphic = WordEqual(function, feedback_value); 530 Node* is_monomorphic = WordEqual(function, feedback_value);
554 GotoUnless(is_monomorphic, &extra_checks); 531 GotoUnless(is_monomorphic, &extra_checks);
555 532
556 // The compare above could have been a SMI/SMI comparison. Guard against 533 // The compare above could have been a SMI/SMI comparison. Guard against
557 // this convincing us that we have a monomorphic JSFunction. 534 // this convincing us that we have a monomorphic JSFunction.
558 Node* is_smi = TaggedIsSmi(function); 535 Node* is_smi = TaggedIsSmi(function);
559 Branch(is_smi, &extra_checks, &call_function); 536 Branch(is_smi, &extra_checks, &call_function);
560 537
561 Bind(&call_function); 538 Bind(&call_function);
(...skipping 17 matching lines...) Expand all
579 create_allocation_site(this); 556 create_allocation_site(this);
580 557
581 Comment("check if megamorphic"); 558 Comment("check if megamorphic");
582 // Check if it is a megamorphic target. 559 // Check if it is a megamorphic target.
583 Node* is_megamorphic = WordEqual( 560 Node* is_megamorphic = WordEqual(
584 feedback_element, 561 feedback_element,
585 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 562 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())));
586 GotoIf(is_megamorphic, &call); 563 GotoIf(is_megamorphic, &call);
587 564
588 Comment("check if it is an allocation site"); 565 Comment("check if it is an allocation site");
589 Node* is_allocation_site = WordEqual( 566 GotoUnless(IsAllocationSiteMap(LoadMap(feedback_element)),
590 LoadMap(feedback_element), LoadRoot(Heap::kAllocationSiteMapRootIndex)); 567 &check_initialized);
591 GotoUnless(is_allocation_site, &check_initialized);
592 568
593 // If it is not the Array() function, mark megamorphic. 569 // If it is not the Array() function, mark megamorphic.
594 Node* context_slot = LoadContextElement(LoadNativeContext(context), 570 Node* context_slot = LoadContextElement(LoadNativeContext(context),
595 Context::ARRAY_FUNCTION_INDEX); 571 Context::ARRAY_FUNCTION_INDEX);
596 Node* is_array_function = WordEqual(context_slot, function); 572 Node* is_array_function = WordEqual(context_slot, function);
597 GotoUnless(is_array_function, &mark_megamorphic); 573 GotoUnless(is_array_function, &mark_megamorphic);
598 574
599 // It is a monomorphic Array function. Increment the call count. 575 // It is a monomorphic Array function. Increment the call count.
600 IncrementCallCount(type_feedback_vector, slot_id); 576 IncrementCallCount(type_feedback_vector, slot_id);
601 577
(...skipping 17 matching lines...) Expand all
619 GotoUnless(is_uninitialized, &mark_megamorphic); 595 GotoUnless(is_uninitialized, &mark_megamorphic);
620 596
621 Comment("handle_unitinitialized"); 597 Comment("handle_unitinitialized");
622 // If it is not a JSFunction mark it as megamorphic. 598 // If it is not a JSFunction mark it as megamorphic.
623 Node* is_smi = TaggedIsSmi(function); 599 Node* is_smi = TaggedIsSmi(function);
624 GotoIf(is_smi, &mark_megamorphic); 600 GotoIf(is_smi, &mark_megamorphic);
625 601
626 // Check if function is an object of JSFunction type. 602 // Check if function is an object of JSFunction type.
627 Node* instance_type = LoadInstanceType(function); 603 Node* instance_type = LoadInstanceType(function);
628 Node* is_js_function = 604 Node* is_js_function =
629 WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE)); 605 Word32Equal(instance_type, Int32Constant(JS_FUNCTION_TYPE));
630 GotoUnless(is_js_function, &mark_megamorphic); 606 GotoUnless(is_js_function, &mark_megamorphic);
631 607
632 // Check if it is the Array() function. 608 // Check if it is the Array() function.
633 Node* context_slot = LoadContextElement(LoadNativeContext(context), 609 Node* context_slot = LoadContextElement(LoadNativeContext(context),
634 Context::ARRAY_FUNCTION_INDEX); 610 Context::ARRAY_FUNCTION_INDEX);
635 Node* is_array_function = WordEqual(context_slot, function); 611 Node* is_array_function = WordEqual(context_slot, function);
636 GotoIf(is_array_function, &create_allocation_site); 612 GotoIf(is_array_function, &create_allocation_site);
637 613
638 // Check if the function belongs to the same native context. 614 // Check if the function belongs to the same native context.
639 Node* native_context = LoadNativeContext( 615 Node* native_context = LoadNativeContext(
(...skipping 22 matching lines...) Expand all
662 638
663 Bind(&mark_megamorphic); 639 Bind(&mark_megamorphic);
664 { 640 {
665 // Mark it as a megamorphic. 641 // Mark it as a megamorphic.
666 // MegamorphicSentinel is created as a part of Heap::InitialObjects 642 // MegamorphicSentinel is created as a part of Heap::InitialObjects
667 // and will not move during a GC. So it is safe to skip write barrier. 643 // and will not move during a GC. So it is safe to skip write barrier.
668 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); 644 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
669 StoreFixedArrayElement( 645 StoreFixedArrayElement(
670 type_feedback_vector, slot_id, 646 type_feedback_vector, slot_id,
671 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), 647 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())),
672 SKIP_WRITE_BARRIER); 648 SKIP_WRITE_BARRIER, 0, INTPTR_PARAMETERS);
673 Goto(&call); 649 Goto(&call);
674 } 650 }
675 } 651 }
676 652
677 Bind(&call); 653 Bind(&call);
678 { 654 {
679 Comment("Increment call count and call using Call builtin"); 655 Comment("Increment call count and call using Call builtin");
680 // Increment the call count. 656 // Increment the call count.
681 IncrementCallCount(type_feedback_vector, slot_id); 657 IncrementCallCount(type_feedback_vector, slot_id);
682 658
(...skipping 10 matching lines...) Expand all
693 Bind(&end); 669 Bind(&end);
694 return return_value.value(); 670 return return_value.value();
695 } 671 }
696 672
697 Node* InterpreterAssembler::CallJS(Node* function, Node* context, 673 Node* InterpreterAssembler::CallJS(Node* function, Node* context,
698 Node* first_arg, Node* arg_count, 674 Node* first_arg, Node* arg_count,
699 TailCallMode tail_call_mode) { 675 TailCallMode tail_call_mode) {
700 Callable callable = CodeFactory::InterpreterPushArgsAndCall( 676 Callable callable = CodeFactory::InterpreterPushArgsAndCall(
701 isolate(), tail_call_mode, CallableType::kAny); 677 isolate(), tail_call_mode, CallableType::kAny);
702 Node* code_target = HeapConstant(callable.code()); 678 Node* code_target = HeapConstant(callable.code());
679
703 return CallStub(callable.descriptor(), code_target, context, arg_count, 680 return CallStub(callable.descriptor(), code_target, context, arg_count,
704 first_arg, function); 681 first_arg, function);
705 } 682 }
706 683
707 Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context, 684 Node* InterpreterAssembler::CallConstruct(Node* constructor, Node* context,
708 Node* new_target, Node* first_arg, 685 Node* new_target, Node* first_arg,
709 Node* arg_count, Node* slot_id, 686 Node* arg_count, Node* slot_id,
710 Node* type_feedback_vector) { 687 Node* type_feedback_vector) {
711 Variable return_value(this, MachineRepresentation::kTagged); 688 Variable return_value(this, MachineRepresentation::kTagged);
712 Variable allocation_feedback(this, MachineRepresentation::kTagged); 689 Variable allocation_feedback(this, MachineRepresentation::kTagged);
713 Label call_construct_function(this, &allocation_feedback), 690 Label call_construct_function(this, &allocation_feedback),
714 extra_checks(this, Label::kDeferred), call_construct(this), end(this); 691 extra_checks(this, Label::kDeferred), call_construct(this), end(this);
715 692
716 // Slot id of 0 is used to indicate no type feedback is available. 693 // Slot id of 0 is used to indicate no type feedback is available.
717 STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0); 694 STATIC_ASSERT(TypeFeedbackVector::kReservedIndexCount > 0);
718 Node* is_feedback_unavailable = Word32Equal(slot_id, Int32Constant(0)); 695 Node* is_feedback_unavailable = WordEqual(slot_id, IntPtrConstant(0));
719 GotoIf(is_feedback_unavailable, &call_construct); 696 GotoIf(is_feedback_unavailable, &call_construct);
720 697
721 // Check that the constructor is not a smi. 698 // Check that the constructor is not a smi.
722 Node* is_smi = TaggedIsSmi(constructor); 699 Node* is_smi = TaggedIsSmi(constructor);
723 GotoIf(is_smi, &call_construct); 700 GotoIf(is_smi, &call_construct);
724 701
725 // Check that constructor is a JSFunction. 702 // Check that constructor is a JSFunction.
726 Node* instance_type = LoadInstanceType(constructor); 703 Node* instance_type = LoadInstanceType(constructor);
727 Node* is_js_function = 704 Node* is_js_function =
728 WordEqual(instance_type, Int32Constant(JS_FUNCTION_TYPE)); 705 Word32Equal(instance_type, Int32Constant(JS_FUNCTION_TYPE));
729 GotoUnless(is_js_function, &call_construct); 706 GotoUnless(is_js_function, &call_construct);
730 707
731 // Check if it is a monomorphic constructor. 708 // Check if it is a monomorphic constructor.
732 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id); 709 Node* feedback_element = LoadFixedArrayElement(type_feedback_vector, slot_id,
710 0, INTPTR_PARAMETERS);
733 Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element); 711 Node* feedback_value = LoadWeakCellValueUnchecked(feedback_element);
734 Node* is_monomorphic = WordEqual(constructor, feedback_value); 712 Node* is_monomorphic = WordEqual(constructor, feedback_value);
735 allocation_feedback.Bind(UndefinedConstant()); 713 allocation_feedback.Bind(UndefinedConstant());
736 Branch(is_monomorphic, &call_construct_function, &extra_checks); 714 Branch(is_monomorphic, &call_construct_function, &extra_checks);
737 715
738 Bind(&call_construct_function); 716 Bind(&call_construct_function);
739 { 717 {
740 Comment("call using callConstructFunction"); 718 Comment("call using callConstructFunction");
741 IncrementCallCount(type_feedback_vector, slot_id); 719 IncrementCallCount(type_feedback_vector, slot_id);
742 Callable callable_function = CodeFactory::InterpreterPushArgsAndConstruct( 720 Callable callable_function = CodeFactory::InterpreterPushArgsAndConstruct(
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
827 805
828 Bind(&mark_megamorphic); 806 Bind(&mark_megamorphic);
829 { 807 {
830 // MegamorphicSentinel is an immortal immovable object so 808 // MegamorphicSentinel is an immortal immovable object so
831 // write-barrier is not needed. 809 // write-barrier is not needed.
832 Comment("transition to megamorphic"); 810 Comment("transition to megamorphic");
833 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex)); 811 DCHECK(Heap::RootIsImmortalImmovable(Heap::kmegamorphic_symbolRootIndex));
834 StoreFixedArrayElement( 812 StoreFixedArrayElement(
835 type_feedback_vector, slot_id, 813 type_feedback_vector, slot_id,
836 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())), 814 HeapConstant(TypeFeedbackVector::MegamorphicSentinel(isolate())),
837 SKIP_WRITE_BARRIER); 815 SKIP_WRITE_BARRIER, 0, INTPTR_PARAMETERS);
838 Goto(&call_construct_function); 816 Goto(&call_construct_function);
839 } 817 }
840 } 818 }
841 819
842 Bind(&call_construct); 820 Bind(&call_construct);
843 { 821 {
844 Comment("call using callConstruct builtin"); 822 Comment("call using callConstruct builtin");
845 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct( 823 Callable callable = CodeFactory::InterpreterPushArgsAndConstruct(
846 isolate(), CallableType::kAny); 824 isolate(), CallableType::kAny);
847 Node* code_target = HeapConstant(callable.code()); 825 Node* code_target = HeapConstant(callable.code());
(...skipping 11 matching lines...) Expand all
859 Node* first_arg, Node* arg_count, 837 Node* first_arg, Node* arg_count,
860 int result_size) { 838 int result_size) {
861 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size); 839 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
862 Node* code_target = HeapConstant(callable.code()); 840 Node* code_target = HeapConstant(callable.code());
863 841
864 // Get the function entry from the function id. 842 // Get the function entry from the function id.
865 Node* function_table = ExternalConstant( 843 Node* function_table = ExternalConstant(
866 ExternalReference::runtime_function_table_address(isolate())); 844 ExternalReference::runtime_function_table_address(isolate()));
867 Node* function_offset = 845 Node* function_offset =
868 Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function))); 846 Int32Mul(function_id, Int32Constant(sizeof(Runtime::Function)));
869 Node* function = IntPtrAdd(function_table, function_offset); 847 Node* function =
848 IntPtrAdd(function_table, ChangeUint32ToWord(function_offset));
870 Node* function_entry = 849 Node* function_entry =
871 Load(MachineType::Pointer(), function, 850 Load(MachineType::Pointer(), function,
872 IntPtrConstant(offsetof(Runtime::Function, entry))); 851 IntPtrConstant(offsetof(Runtime::Function, entry)));
873 852
874 return CallStub(callable.descriptor(), code_target, context, arg_count, 853 return CallStub(callable.descriptor(), code_target, context, arg_count,
875 first_arg, function_entry, result_size); 854 first_arg, function_entry, result_size);
876 } 855 }
877 856
878 void InterpreterAssembler::UpdateInterruptBudget(Node* weight) { 857 void InterpreterAssembler::UpdateInterruptBudget(Node* weight) {
879 // TODO(rmcilroy): It might be worthwhile to only update the budget for 858 // TODO(rmcilroy): It might be worthwhile to only update the budget for
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
920 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit); 899 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
921 } 900 }
922 Node* next_offset = IntPtrAdd(BytecodeOffset(), delta); 901 Node* next_offset = IntPtrAdd(BytecodeOffset(), delta);
923 bytecode_offset_.Bind(next_offset); 902 bytecode_offset_.Bind(next_offset);
924 return next_offset; 903 return next_offset;
925 } 904 }
926 905
927 Node* InterpreterAssembler::Jump(Node* delta) { 906 Node* InterpreterAssembler::Jump(Node* delta) {
928 DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_)); 907 DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_));
929 908
930 UpdateInterruptBudget(delta); 909 UpdateInterruptBudget(TruncateWordToWord32(delta));
931 Node* new_bytecode_offset = Advance(delta); 910 Node* new_bytecode_offset = Advance(delta);
932 Node* target_bytecode = LoadBytecode(new_bytecode_offset); 911 Node* target_bytecode = LoadBytecode(new_bytecode_offset);
933 return DispatchToBytecode(target_bytecode, new_bytecode_offset); 912 return DispatchToBytecode(target_bytecode, new_bytecode_offset);
934 } 913 }
935 914
936 void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) { 915 void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
937 Label match(this), no_match(this); 916 Label match(this), no_match(this);
938 917
939 Branch(condition, &match, &no_match); 918 Branch(condition, &match, &no_match);
940 Bind(&match); 919 Bind(&match);
941 Jump(delta); 920 Jump(delta);
942 Bind(&no_match); 921 Bind(&no_match);
943 Dispatch(); 922 Dispatch();
944 } 923 }
945 924
946 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) { 925 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
947 JumpConditional(WordEqual(lhs, rhs), delta); 926 JumpConditional(WordEqual(lhs, rhs), delta);
948 } 927 }
949 928
950 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs, 929 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
951 Node* delta) { 930 Node* delta) {
952 JumpConditional(WordNotEqual(lhs, rhs), delta); 931 JumpConditional(WordNotEqual(lhs, rhs), delta);
953 } 932 }
954 933
955 Node* InterpreterAssembler::LoadBytecode(compiler::Node* bytecode_offset) { 934 Node* InterpreterAssembler::LoadBytecode(compiler::Node* bytecode_offset) {
956 Node* bytecode = 935 Node* bytecode =
957 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset); 936 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset);
958 if (kPointerSize == 8) { 937 return ChangeUint32ToWord(bytecode);
959 bytecode = ChangeUint32ToUint64(bytecode);
960 }
961 return bytecode;
962 } 938 }
963 939
964 Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) { 940 Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) {
965 Label do_inline_star(this), done(this); 941 Label do_inline_star(this), done(this);
966 942
967 Variable var_bytecode(this, MachineType::PointerRepresentation()); 943 Variable var_bytecode(this, MachineType::PointerRepresentation());
968 var_bytecode.Bind(target_bytecode); 944 var_bytecode.Bind(target_bytecode);
969 945
970 Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar)); 946 Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar));
971 Node* is_star = WordEqual(target_bytecode, star_bytecode); 947 Node* is_star = WordEqual(target_bytecode, star_bytecode);
(...skipping 22 matching lines...) Expand all
994 StoreRegister(GetAccumulator(), BytecodeOperandReg(0)); 970 StoreRegister(GetAccumulator(), BytecodeOperandReg(0));
995 971
996 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_)); 972 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
997 973
998 Advance(); 974 Advance();
999 bytecode_ = previous_bytecode; 975 bytecode_ = previous_bytecode;
1000 accumulator_use_ = previous_acc_use; 976 accumulator_use_ = previous_acc_use;
1001 } 977 }
1002 978
1003 Node* InterpreterAssembler::Dispatch() { 979 Node* InterpreterAssembler::Dispatch() {
980 Comment("========= Dispatch");
1004 Node* target_offset = Advance(); 981 Node* target_offset = Advance();
1005 Node* target_bytecode = LoadBytecode(target_offset); 982 Node* target_bytecode = LoadBytecode(target_offset);
1006 983
1007 if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) { 984 if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) {
1008 target_bytecode = StarDispatchLookahead(target_bytecode); 985 target_bytecode = StarDispatchLookahead(target_bytecode);
1009 } 986 }
1010 return DispatchToBytecode(target_bytecode, BytecodeOffset()); 987 return DispatchToBytecode(target_bytecode, BytecodeOffset());
1011 } 988 }
1012 989
1013 Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode, 990 Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode,
1014 Node* new_bytecode_offset) { 991 Node* new_bytecode_offset) {
1015 if (FLAG_trace_ignition_dispatches) { 992 if (FLAG_trace_ignition_dispatches) {
1016 TraceBytecodeDispatch(target_bytecode); 993 TraceBytecodeDispatch(target_bytecode);
1017 } 994 }
1018 995
1019 Node* target_code_entry = 996 Node* target_code_entry =
1020 Load(MachineType::Pointer(), DispatchTableRawPointer(), 997 Load(MachineType::Pointer(), DispatchTableRawPointer(),
1021 WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2))); 998 WordShl(target_bytecode, IntPtrConstant(kPointerSizeLog2)));
1022 999
1023 return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset); 1000 return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset);
1024 } 1001 }
1025 1002
1026 Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler, 1003 Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
1027 Node* bytecode_offset) { 1004 Node* bytecode_offset) {
1005 // TODO(ishell): Add CSA::CodeEntryPoint(code).
1028 Node* handler_entry = 1006 Node* handler_entry =
1029 IntPtrAdd(handler, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag)); 1007 IntPtrAdd(BitcastTaggedToWord(handler),
1008 IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
1030 return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset); 1009 return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset);
1031 } 1010 }
1032 1011
1033 Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry( 1012 Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
1034 Node* handler_entry, Node* bytecode_offset) { 1013 Node* handler_entry, Node* bytecode_offset) {
1035 InterpreterDispatchDescriptor descriptor(isolate()); 1014 InterpreterDispatchDescriptor descriptor(isolate());
1036 Node* args[] = {GetAccumulatorUnchecked(), bytecode_offset, 1015 Node* args[] = {GetAccumulatorUnchecked(), bytecode_offset,
1037 BytecodeArrayTaggedPointer(), DispatchTableRawPointer()}; 1016 BytecodeArrayTaggedPointer(), DispatchTableRawPointer()};
1038 return TailCallBytecodeDispatch(descriptor, handler_entry, args); 1017 return TailCallBytecodeDispatch(descriptor, handler_entry, args);
1039 } 1018 }
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
1161 } 1140 }
1162 1141
1163 void InterpreterAssembler::UpdateInterruptBudgetOnReturn() { 1142 void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
1164 // TODO(rmcilroy): Investigate whether it is worth supporting self 1143 // TODO(rmcilroy): Investigate whether it is worth supporting self
1165 // optimization of primitive functions like FullCodegen. 1144 // optimization of primitive functions like FullCodegen.
1166 1145
1167 // Update profiling count by -BytecodeOffset to simulate backedge to start of 1146 // Update profiling count by -BytecodeOffset to simulate backedge to start of
1168 // function. 1147 // function.
1169 Node* profiling_weight = 1148 Node* profiling_weight =
1170 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize), 1149 Int32Sub(Int32Constant(kHeapObjectTag + BytecodeArray::kHeaderSize),
1171 BytecodeOffset()); 1150 TruncateWordToWord32(BytecodeOffset()));
1172 UpdateInterruptBudget(profiling_weight); 1151 UpdateInterruptBudget(profiling_weight);
1173 } 1152 }
1174 1153
1175 Node* InterpreterAssembler::StackCheckTriggeredInterrupt() { 1154 Node* InterpreterAssembler::StackCheckTriggeredInterrupt() {
1176 Node* sp = LoadStackPointer(); 1155 Node* sp = LoadStackPointer();
1177 Node* stack_limit = Load( 1156 Node* stack_limit = Load(
1178 MachineType::Pointer(), 1157 MachineType::Pointer(),
1179 ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))); 1158 ExternalConstant(ExternalReference::address_of_stack_limit(isolate())));
1180 return UintPtrLessThan(sp, stack_limit); 1159 return UintPtrLessThan(sp, stack_limit);
1181 } 1160 }
1182 1161
1183 Node* InterpreterAssembler::LoadOSRNestingLevel() { 1162 Node* InterpreterAssembler::LoadOSRNestingLevel() {
1184 Node* offset = 1163 return LoadObjectField(BytecodeArrayTaggedPointer(),
1185 IntPtrConstant(BytecodeArray::kOSRNestingLevelOffset - kHeapObjectTag); 1164 BytecodeArray::kOSRNestingLevelOffset,
1186 return Load(MachineType::Int8(), BytecodeArrayTaggedPointer(), offset); 1165 MachineType::Int8());
1187 } 1166 }
1188 1167
1189 void InterpreterAssembler::Abort(BailoutReason bailout_reason) { 1168 void InterpreterAssembler::Abort(BailoutReason bailout_reason) {
1190 disable_stack_check_across_call_ = true; 1169 disable_stack_check_across_call_ = true;
1191 Node* abort_id = SmiTag(Int32Constant(bailout_reason)); 1170 Node* abort_id = SmiTag(Int32Constant(bailout_reason));
1192 CallRuntime(Runtime::kAbort, GetContext(), abort_id); 1171 CallRuntime(Runtime::kAbort, GetContext(), abort_id);
1193 disable_stack_check_across_call_ = false; 1172 disable_stack_check_across_call_ = false;
1194 } 1173 }
1195 1174
1196 void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs, 1175 void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
1248 V8_TARGET_ARCH_PPC 1227 V8_TARGET_ARCH_PPC
1249 return true; 1228 return true;
1250 #else 1229 #else
1251 #error "Unknown Architecture" 1230 #error "Unknown Architecture"
1252 #endif 1231 #endif
1253 } 1232 }
1254 1233
1255 Node* InterpreterAssembler::RegisterCount() { 1234 Node* InterpreterAssembler::RegisterCount() {
1256 Node* bytecode_array = LoadRegister(Register::bytecode_array()); 1235 Node* bytecode_array = LoadRegister(Register::bytecode_array());
1257 Node* frame_size = LoadObjectField( 1236 Node* frame_size = LoadObjectField(
1258 bytecode_array, BytecodeArray::kFrameSizeOffset, MachineType::Int32()); 1237 bytecode_array, BytecodeArray::kFrameSizeOffset, MachineType::Uint32());
1259 return Word32Sar(frame_size, Int32Constant(kPointerSizeLog2)); 1238 return WordShr(ChangeUint32ToWord(frame_size),
1239 IntPtrConstant(kPointerSizeLog2));
1260 } 1240 }
1261 1241
1262 Node* InterpreterAssembler::ExportRegisterFile(Node* array) { 1242 Node* InterpreterAssembler::ExportRegisterFile(Node* array) {
1243 Node* register_count = RegisterCount();
1263 if (FLAG_debug_code) { 1244 if (FLAG_debug_code) {
1264 Node* array_size = LoadAndUntagFixedArrayBaseLength(array); 1245 Node* array_size = LoadAndUntagFixedArrayBaseLength(array);
1265 AbortIfWordNotEqual( 1246 AbortIfWordNotEqual(array_size, register_count,
1266 array_size, RegisterCount(), kInvalidRegisterFileInGenerator); 1247 kInvalidRegisterFileInGenerator);
1267 } 1248 }
1268 1249
1269 Variable var_index(this, MachineRepresentation::kWord32); 1250 Variable var_index(this, MachineType::PointerRepresentation());
1270 var_index.Bind(Int32Constant(0)); 1251 var_index.Bind(IntPtrConstant(0));
1271 1252
1272 // Iterate over register file and write values into array. 1253 // Iterate over register file and write values into array.
1273 // The mapping of register to array index must match that used in 1254 // The mapping of register to array index must match that used in
1274 // BytecodeGraphBuilder::VisitResumeGenerator. 1255 // BytecodeGraphBuilder::VisitResumeGenerator.
1275 Label loop(this, &var_index), done_loop(this); 1256 Label loop(this, &var_index), done_loop(this);
1276 Goto(&loop); 1257 Goto(&loop);
1277 Bind(&loop); 1258 Bind(&loop);
1278 { 1259 {
1279 Node* index = var_index.value(); 1260 Node* index = var_index.value();
1280 Node* condition = Int32LessThan(index, RegisterCount()); 1261 GotoUnless(UintPtrLessThan(index, register_count), &done_loop);
1281 GotoUnless(condition, &done_loop);
1282 1262
1283 Node* reg_index = 1263 Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
1284 Int32Sub(Int32Constant(Register(0).ToOperand()), index); 1264 Node* value = LoadRegister(reg_index);
1285 Node* value = LoadRegister(ChangeInt32ToIntPtr(reg_index));
1286 1265
1287 StoreFixedArrayElement(array, index, value); 1266 StoreFixedArrayElement(array, index, value, UPDATE_WRITE_BARRIER, 0,
1267 INTPTR_PARAMETERS);
1288 1268
1289 var_index.Bind(Int32Add(index, Int32Constant(1))); 1269 var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
1290 Goto(&loop); 1270 Goto(&loop);
1291 } 1271 }
1292 Bind(&done_loop); 1272 Bind(&done_loop);
1293 1273
1294 return array; 1274 return array;
1295 } 1275 }
1296 1276
1297 Node* InterpreterAssembler::ImportRegisterFile(Node* array) { 1277 Node* InterpreterAssembler::ImportRegisterFile(Node* array) {
1278 Node* register_count = RegisterCount();
1298 if (FLAG_debug_code) { 1279 if (FLAG_debug_code) {
1299 Node* array_size = LoadAndUntagFixedArrayBaseLength(array); 1280 Node* array_size = LoadAndUntagFixedArrayBaseLength(array);
1300 AbortIfWordNotEqual( 1281 AbortIfWordNotEqual(array_size, register_count,
1301 array_size, RegisterCount(), kInvalidRegisterFileInGenerator); 1282 kInvalidRegisterFileInGenerator);
1302 } 1283 }
1303 1284
1304 Variable var_index(this, MachineRepresentation::kWord32); 1285 Variable var_index(this, MachineType::PointerRepresentation());
1305 var_index.Bind(Int32Constant(0)); 1286 var_index.Bind(IntPtrConstant(0));
1306 1287
1307 // Iterate over array and write values into register file. Also erase the 1288 // Iterate over array and write values into register file. Also erase the
1308 // array contents to not keep them alive artificially. 1289 // array contents to not keep them alive artificially.
1309 Label loop(this, &var_index), done_loop(this); 1290 Label loop(this, &var_index), done_loop(this);
1310 Goto(&loop); 1291 Goto(&loop);
1311 Bind(&loop); 1292 Bind(&loop);
1312 { 1293 {
1313 Node* index = var_index.value(); 1294 Node* index = var_index.value();
1314 Node* condition = Int32LessThan(index, RegisterCount()); 1295 GotoUnless(UintPtrLessThan(index, register_count), &done_loop);
1315 GotoUnless(condition, &done_loop);
1316 1296
1317 Node* value = LoadFixedArrayElement(array, index); 1297 Node* value = LoadFixedArrayElement(array, index, 0, INTPTR_PARAMETERS);
1318 1298
1319 Node* reg_index = 1299 Node* reg_index = IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
1320 Int32Sub(Int32Constant(Register(0).ToOperand()), index); 1300 StoreRegister(value, reg_index);
1321 StoreRegister(value, ChangeInt32ToIntPtr(reg_index));
1322 1301
1323 StoreFixedArrayElement(array, index, StaleRegisterConstant()); 1302 StoreFixedArrayElement(array, index, StaleRegisterConstant(),
1303 UPDATE_WRITE_BARRIER, 0, INTPTR_PARAMETERS);
1324 1304
1325 var_index.Bind(Int32Add(index, Int32Constant(1))); 1305 var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
1326 Goto(&loop); 1306 Goto(&loop);
1327 } 1307 }
1328 Bind(&done_loop); 1308 Bind(&done_loop);
1329 1309
1330 return array; 1310 return array;
1331 } 1311 }
1332 1312
1333 } // namespace interpreter 1313 } // namespace interpreter
1334 } // namespace internal 1314 } // namespace internal
1335 } // namespace v8 1315 } // namespace v8
OLDNEW
« no previous file with comments | « src/interpreter/interpreter-assembler.h ('k') | src/interpreter/interpreter-intrinsics.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698