Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(262)

Side by Side Diff: test/unittests/interpreter/interpreter-assembler-unittest.cc

Issue 2498073002: [refactoring] Split CodeAssemblerState out of CodeAssembler (Closed)
Patch Set: one more attempt Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « test/unittests/interpreter/interpreter-assembler-unittest.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "test/unittests/interpreter/interpreter-assembler-unittest.h" 5 #include "test/unittests/interpreter/interpreter-assembler-unittest.h"
6 6
7 #include "src/code-factory.h" 7 #include "src/code-factory.h"
8 #include "src/compiler/graph.h"
9 #include "src/compiler/node.h" 8 #include "src/compiler/node.h"
10 #include "src/interface-descriptors.h" 9 #include "src/interface-descriptors.h"
11 #include "src/isolate.h" 10 #include "src/isolate.h"
12 #include "test/unittests/compiler/compiler-test-utils.h" 11 #include "test/unittests/compiler/compiler-test-utils.h"
13 #include "test/unittests/compiler/node-test-utils.h" 12 #include "test/unittests/compiler/node-test-utils.h"
14 13
15 using ::testing::_; 14 using ::testing::_;
16 15
17 namespace v8 { 16 namespace v8 {
18 namespace internal { 17 namespace internal {
19 18
20 using namespace compiler; 19 using namespace compiler;
21 20
22 namespace interpreter { 21 namespace interpreter {
23 22
23 InterpreterAssemblerTestState::InterpreterAssemblerTestState(
24 InterpreterAssemblerTest* test, Bytecode bytecode)
25 : compiler::CodeAssemblerState(
26 test->isolate(), test->zone(),
27 InterpreterDispatchDescriptor(test->isolate()),
28 Code::ComputeFlags(Code::BYTECODE_HANDLER),
29 Bytecodes::ToString(bytecode), Bytecodes::ReturnCount(bytecode)) {}
30
24 const interpreter::Bytecode kBytecodes[] = { 31 const interpreter::Bytecode kBytecodes[] = {
25 #define DEFINE_BYTECODE(Name, ...) interpreter::Bytecode::k##Name, 32 #define DEFINE_BYTECODE(Name, ...) interpreter::Bytecode::k##Name,
26 BYTECODE_LIST(DEFINE_BYTECODE) 33 BYTECODE_LIST(DEFINE_BYTECODE)
27 #undef DEFINE_BYTECODE 34 #undef DEFINE_BYTECODE
28 }; 35 };
29 36
30 Matcher<Node*> IsIntPtrConstant(const intptr_t value) { 37 Matcher<Node*> IsIntPtrConstant(const intptr_t value) {
31 return kPointerSize == 8 ? IsInt64Constant(static_cast<int64_t>(value)) 38 return kPointerSize == 8 ? IsInt64Constant(static_cast<int64_t>(value))
32 : IsInt32Constant(static_cast<int32_t>(value)); 39 : IsInt32Constant(static_cast<int32_t>(value));
33 } 40 }
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after
291 case OperandSize::kQuad: 298 case OperandSize::kQuad:
292 return IsUnsignedQuadOperand(offset); 299 return IsUnsignedQuadOperand(offset);
293 case OperandSize::kNone: 300 case OperandSize::kNone:
294 UNREACHABLE(); 301 UNREACHABLE();
295 } 302 }
296 return nullptr; 303 return nullptr;
297 } 304 }
298 305
299 TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) { 306 TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
300 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 307 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
301 InterpreterAssemblerForTest m(this, bytecode); 308 InterpreterAssemblerTestState state(this, bytecode);
309 InterpreterAssemblerForTest m(&state, bytecode);
302 Node* tail_call_node = m.Dispatch(); 310 Node* tail_call_node = m.Dispatch();
303 311
304 OperandScale operand_scale = OperandScale::kSingle; 312 OperandScale operand_scale = OperandScale::kSingle;
305 Matcher<Node*> next_bytecode_offset_matcher = 313 Matcher<Node*> next_bytecode_offset_matcher =
306 IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset), 314 IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
307 IsIntPtrConstant( 315 IsIntPtrConstant(
308 interpreter::Bytecodes::Size(bytecode, operand_scale))); 316 interpreter::Bytecodes::Size(bytecode, operand_scale)));
309 Matcher<Node*> target_bytecode_matcher = 317 Matcher<Node*> target_bytecode_matcher =
310 m.IsLoad(MachineType::Uint8(), 318 m.IsLoad(MachineType::Uint8(),
311 IsParameter(InterpreterDispatchDescriptor::kBytecodeArray), 319 IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
357 365
358 TARGET_TEST_F(InterpreterAssemblerTest, Jump) { 366 TARGET_TEST_F(InterpreterAssemblerTest, Jump) {
359 // If debug code is enabled we emit extra code in Jump. 367 // If debug code is enabled we emit extra code in Jump.
360 if (FLAG_debug_code) return; 368 if (FLAG_debug_code) return;
361 369
362 int jump_offsets[] = {-9710, -77, 0, +3, +97109}; 370 int jump_offsets[] = {-9710, -77, 0, +3, +97109};
363 TRACED_FOREACH(int, jump_offset, jump_offsets) { 371 TRACED_FOREACH(int, jump_offset, jump_offsets) {
364 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 372 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
365 if (!interpreter::Bytecodes::IsJump(bytecode)) return; 373 if (!interpreter::Bytecodes::IsJump(bytecode)) return;
366 374
367 InterpreterAssemblerForTest m(this, bytecode); 375 InterpreterAssemblerTestState state(this, bytecode);
376 InterpreterAssemblerForTest m(&state, bytecode);
368 Node* tail_call_node = m.Jump(m.IntPtrConstant(jump_offset)); 377 Node* tail_call_node = m.Jump(m.IntPtrConstant(jump_offset));
369 378
370 Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd( 379 Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
371 IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset), 380 IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
372 IsIntPtrConstant(jump_offset)); 381 IsIntPtrConstant(jump_offset));
373 Matcher<Node*> target_bytecode_matcher = 382 Matcher<Node*> target_bytecode_matcher =
374 m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher); 383 m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher);
375 if (kPointerSize == 8) { 384 if (kPointerSize == 8) {
376 target_bytecode_matcher = 385 target_bytecode_matcher =
377 IsChangeUint32ToUint64(target_bytecode_matcher); 386 IsChangeUint32ToUint64(target_bytecode_matcher);
(...skipping 13 matching lines...) Expand all
391 _, _)); 400 _, _));
392 } 401 }
393 } 402 }
394 } 403 }
395 404
396 TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) { 405 TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
397 static const OperandScale kOperandScales[] = { 406 static const OperandScale kOperandScales[] = {
398 OperandScale::kSingle, OperandScale::kDouble, OperandScale::kQuadruple}; 407 OperandScale::kSingle, OperandScale::kDouble, OperandScale::kQuadruple};
399 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 408 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
400 TRACED_FOREACH(interpreter::OperandScale, operand_scale, kOperandScales) { 409 TRACED_FOREACH(interpreter::OperandScale, operand_scale, kOperandScales) {
401 InterpreterAssemblerForTest m(this, bytecode, operand_scale); 410 InterpreterAssemblerTestState state(this, bytecode);
411 InterpreterAssemblerForTest m(&state, bytecode, operand_scale);
402 int number_of_operands = 412 int number_of_operands =
403 interpreter::Bytecodes::NumberOfOperands(bytecode); 413 interpreter::Bytecodes::NumberOfOperands(bytecode);
404 for (int i = 0; i < number_of_operands; i++) { 414 for (int i = 0; i < number_of_operands; i++) {
405 int offset = interpreter::Bytecodes::GetOperandOffset(bytecode, i, 415 int offset = interpreter::Bytecodes::GetOperandOffset(bytecode, i,
406 operand_scale); 416 operand_scale);
407 OperandType operand_type = 417 OperandType operand_type =
408 interpreter::Bytecodes::GetOperandType(bytecode, i); 418 interpreter::Bytecodes::GetOperandType(bytecode, i);
409 OperandSize operand_size = 419 OperandSize operand_size =
410 Bytecodes::SizeOfOperand(operand_type, operand_scale); 420 Bytecodes::SizeOfOperand(operand_type, operand_scale);
411 switch (interpreter::Bytecodes::GetOperandType(bytecode, i)) { 421 switch (interpreter::Bytecodes::GetOperandType(bytecode, i)) {
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
456 } 466 }
457 } 467 }
458 468
459 TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) { 469 TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) {
460 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 470 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
461 if (!interpreter::Bytecodes::ReadsAccumulator(bytecode) || 471 if (!interpreter::Bytecodes::ReadsAccumulator(bytecode) ||
462 !interpreter::Bytecodes::WritesAccumulator(bytecode)) { 472 !interpreter::Bytecodes::WritesAccumulator(bytecode)) {
463 continue; 473 continue;
464 } 474 }
465 475
466 InterpreterAssemblerForTest m(this, bytecode); 476 InterpreterAssemblerTestState state(this, bytecode);
477 InterpreterAssemblerForTest m(&state, bytecode);
467 // Should be incoming accumulator if not set. 478 // Should be incoming accumulator if not set.
468 EXPECT_THAT(m.GetAccumulator(), 479 EXPECT_THAT(m.GetAccumulator(),
469 IsParameter(InterpreterDispatchDescriptor::kAccumulator)); 480 IsParameter(InterpreterDispatchDescriptor::kAccumulator));
470 // Should be set by SetAccumulator. 481 // Should be set by SetAccumulator.
471 Node* accumulator_value_1 = m.Int32Constant(0xdeadbeef); 482 Node* accumulator_value_1 = m.Int32Constant(0xdeadbeef);
472 m.SetAccumulator(accumulator_value_1); 483 m.SetAccumulator(accumulator_value_1);
473 EXPECT_THAT(m.GetAccumulator(), accumulator_value_1); 484 EXPECT_THAT(m.GetAccumulator(), accumulator_value_1);
474 Node* accumulator_value_2 = m.Int32Constant(42); 485 Node* accumulator_value_2 = m.Int32Constant(42);
475 m.SetAccumulator(accumulator_value_2); 486 m.SetAccumulator(accumulator_value_2);
476 EXPECT_THAT(m.GetAccumulator(), accumulator_value_2); 487 EXPECT_THAT(m.GetAccumulator(), accumulator_value_2);
477 488
478 // Should be passed to next bytecode handler on dispatch. 489 // Should be passed to next bytecode handler on dispatch.
479 Node* tail_call_node = m.Dispatch(); 490 Node* tail_call_node = m.Dispatch();
480 491
481 EXPECT_THAT(tail_call_node, 492 EXPECT_THAT(tail_call_node,
482 IsTailCall(_, _, accumulator_value_2, _, _, _, _)); 493 IsTailCall(_, _, accumulator_value_2, _, _, _, _));
483 } 494 }
484 } 495 }
485 496
486 TARGET_TEST_F(InterpreterAssemblerTest, GetContext) { 497 TARGET_TEST_F(InterpreterAssemblerTest, GetContext) {
487 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 498 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
488 InterpreterAssemblerForTest m(this, bytecode); 499 InterpreterAssemblerTestState state(this, bytecode);
500 InterpreterAssemblerForTest m(&state, bytecode);
489 EXPECT_THAT( 501 EXPECT_THAT(
490 m.GetContext(), 502 m.GetContext(),
491 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(), 503 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
492 IsIntPtrConstant(Register::current_context().ToOperand() 504 IsIntPtrConstant(Register::current_context().ToOperand()
493 << kPointerSizeLog2))); 505 << kPointerSizeLog2)));
494 } 506 }
495 } 507 }
496 508
497 TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) { 509 TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
498 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 510 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
499 InterpreterAssemblerForTest m(this, bytecode); 511 InterpreterAssemblerTestState state(this, bytecode);
512 InterpreterAssemblerForTest m(&state, bytecode);
500 Node* reg_index_node = m.IntPtrConstant(44); 513 Node* reg_index_node = m.IntPtrConstant(44);
501 Node* reg_location_node = m.RegisterLocation(reg_index_node); 514 Node* reg_location_node = m.RegisterLocation(reg_index_node);
502 EXPECT_THAT(reg_location_node, 515 EXPECT_THAT(reg_location_node,
503 IsIntPtrAdd(IsLoadParentFramePointer(), 516 IsIntPtrAdd(IsLoadParentFramePointer(),
504 IsWordShl(reg_index_node, 517 IsWordShl(reg_index_node,
505 IsIntPtrConstant(kPointerSizeLog2)))); 518 IsIntPtrConstant(kPointerSizeLog2))));
506 } 519 }
507 } 520 }
508 521
509 TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) { 522 TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
510 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 523 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
511 InterpreterAssemblerForTest m(this, bytecode); 524 InterpreterAssemblerTestState state(this, bytecode);
525 InterpreterAssemblerForTest m(&state, bytecode);
512 Node* reg_index_node = m.IntPtrConstant(44); 526 Node* reg_index_node = m.IntPtrConstant(44);
513 Node* load_reg_node = m.LoadRegister(reg_index_node); 527 Node* load_reg_node = m.LoadRegister(reg_index_node);
514 EXPECT_THAT(load_reg_node, 528 EXPECT_THAT(load_reg_node,
515 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(), 529 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
516 IsWordShl(reg_index_node, 530 IsWordShl(reg_index_node,
517 IsIntPtrConstant(kPointerSizeLog2)))); 531 IsIntPtrConstant(kPointerSizeLog2))));
518 } 532 }
519 } 533 }
520 534
521 TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) { 535 TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) {
522 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 536 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
523 InterpreterAssemblerForTest m(this, bytecode); 537 InterpreterAssemblerTestState state(this, bytecode);
538 InterpreterAssemblerForTest m(&state, bytecode);
524 Node* store_value = m.Int32Constant(0xdeadbeef); 539 Node* store_value = m.Int32Constant(0xdeadbeef);
525 Node* reg_index_node = m.IntPtrConstant(44); 540 Node* reg_index_node = m.IntPtrConstant(44);
526 Node* store_reg_node = m.StoreRegister(store_value, reg_index_node); 541 Node* store_reg_node = m.StoreRegister(store_value, reg_index_node);
527 EXPECT_THAT( 542 EXPECT_THAT(
528 store_reg_node, 543 store_reg_node,
529 m.IsStore(StoreRepresentation(MachineRepresentation::kTagged, 544 m.IsStore(StoreRepresentation(MachineRepresentation::kTagged,
530 kNoWriteBarrier), 545 kNoWriteBarrier),
531 IsLoadParentFramePointer(), 546 IsLoadParentFramePointer(),
532 IsWordShl(reg_index_node, IsIntPtrConstant(kPointerSizeLog2)), 547 IsWordShl(reg_index_node, IsIntPtrConstant(kPointerSizeLog2)),
533 store_value)); 548 store_value));
534 } 549 }
535 } 550 }
536 551
537 TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) { 552 TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) {
538 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 553 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
539 InterpreterAssemblerForTest m(this, bytecode); 554 InterpreterAssemblerTestState state(this, bytecode);
555 InterpreterAssemblerForTest m(&state, bytecode);
540 Node* value = m.Int32Constant(44); 556 Node* value = m.Int32Constant(44);
541 EXPECT_THAT(m.SmiTag(value), IsBitcastWordToTaggedSigned(IsIntPtrConstant( 557 EXPECT_THAT(m.SmiTag(value), IsBitcastWordToTaggedSigned(IsIntPtrConstant(
542 static_cast<intptr_t>(44) 558 static_cast<intptr_t>(44)
543 << (kSmiShiftSize + kSmiTagSize)))); 559 << (kSmiShiftSize + kSmiTagSize))));
544 EXPECT_THAT(m.SmiUntag(value), 560 EXPECT_THAT(m.SmiUntag(value),
545 IsWordSar(IsBitcastTaggedToWord(value), 561 IsWordSar(IsBitcastTaggedToWord(value),
546 IsIntPtrConstant(kSmiShiftSize + kSmiTagSize))); 562 IsIntPtrConstant(kSmiShiftSize + kSmiTagSize)));
547 } 563 }
548 } 564 }
549 565
550 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) { 566 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) {
551 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 567 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
552 InterpreterAssemblerForTest m(this, bytecode); 568 InterpreterAssemblerTestState state(this, bytecode);
569 InterpreterAssemblerForTest m(&state, bytecode);
553 Node* a = m.Int32Constant(0); 570 Node* a = m.Int32Constant(0);
554 Node* b = m.Int32Constant(1); 571 Node* b = m.Int32Constant(1);
555 Node* add = m.IntPtrAdd(a, b); 572 Node* add = m.IntPtrAdd(a, b);
556 EXPECT_THAT(add, IsIntPtrAdd(a, b)); 573 EXPECT_THAT(add, IsIntPtrAdd(a, b));
557 } 574 }
558 } 575 }
559 576
560 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) { 577 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) {
561 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 578 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
562 InterpreterAssemblerForTest m(this, bytecode); 579 InterpreterAssemblerTestState state(this, bytecode);
580 InterpreterAssemblerForTest m(&state, bytecode);
563 Node* a = m.Int32Constant(0); 581 Node* a = m.Int32Constant(0);
564 Node* b = m.Int32Constant(1); 582 Node* b = m.Int32Constant(1);
565 Node* add = m.IntPtrSub(a, b); 583 Node* add = m.IntPtrSub(a, b);
566 EXPECT_THAT(add, IsIntPtrSub(a, b)); 584 EXPECT_THAT(add, IsIntPtrSub(a, b));
567 } 585 }
568 } 586 }
569 587
570 TARGET_TEST_F(InterpreterAssemblerTest, WordShl) { 588 TARGET_TEST_F(InterpreterAssemblerTest, WordShl) {
571 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 589 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
572 InterpreterAssemblerForTest m(this, bytecode); 590 InterpreterAssemblerTestState state(this, bytecode);
591 InterpreterAssemblerForTest m(&state, bytecode);
573 Node* a = m.IntPtrConstant(0); 592 Node* a = m.IntPtrConstant(0);
574 Node* add = m.WordShl(a, 10); 593 Node* add = m.WordShl(a, 10);
575 EXPECT_THAT(add, IsWordShl(a, IsIntPtrConstant(10))); 594 EXPECT_THAT(add, IsWordShl(a, IsIntPtrConstant(10)));
576 } 595 }
577 } 596 }
578 597
579 TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) { 598 TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) {
580 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 599 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
581 InterpreterAssemblerForTest m(this, bytecode); 600 InterpreterAssemblerTestState state(this, bytecode);
601 InterpreterAssemblerForTest m(&state, bytecode);
582 Node* index = m.IntPtrConstant(2); 602 Node* index = m.IntPtrConstant(2);
583 Node* load_constant = m.LoadConstantPoolEntry(index); 603 Node* load_constant = m.LoadConstantPoolEntry(index);
584 Matcher<Node*> constant_pool_matcher = m.IsLoad( 604 Matcher<Node*> constant_pool_matcher = m.IsLoad(
585 MachineType::AnyTagged(), 605 MachineType::AnyTagged(),
586 IsParameter(InterpreterDispatchDescriptor::kBytecodeArray), 606 IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
587 IsIntPtrConstant(BytecodeArray::kConstantPoolOffset - kHeapObjectTag)); 607 IsIntPtrConstant(BytecodeArray::kConstantPoolOffset - kHeapObjectTag));
588 EXPECT_THAT( 608 EXPECT_THAT(
589 load_constant, 609 load_constant,
590 m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher, 610 m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher,
591 IsIntPtrAdd( 611 IsIntPtrAdd(
592 IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), 612 IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
593 IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2))))); 613 IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2)))));
594 } 614 }
595 } 615 }
596 616
597 TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) { 617 TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) {
598 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 618 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
599 InterpreterAssemblerForTest m(this, bytecode); 619 InterpreterAssemblerTestState state(this, bytecode);
620 InterpreterAssemblerForTest m(&state, bytecode);
600 Node* object = m.IntPtrConstant(0xdeadbeef); 621 Node* object = m.IntPtrConstant(0xdeadbeef);
601 int offset = 16; 622 int offset = 16;
602 Node* load_field = m.LoadObjectField(object, offset); 623 Node* load_field = m.LoadObjectField(object, offset);
603 EXPECT_THAT(load_field, 624 EXPECT_THAT(load_field,
604 m.IsLoad(MachineType::AnyTagged(), object, 625 m.IsLoad(MachineType::AnyTagged(), object,
605 IsIntPtrConstant(offset - kHeapObjectTag))); 626 IsIntPtrConstant(offset - kHeapObjectTag)));
606 } 627 }
607 } 628 }
608 629
609 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime2) { 630 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime2) {
610 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 631 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
611 InterpreterAssemblerForTest m(this, bytecode); 632 InterpreterAssemblerTestState state(this, bytecode);
633 InterpreterAssemblerForTest m(&state, bytecode);
612 Node* arg1 = m.Int32Constant(2); 634 Node* arg1 = m.Int32Constant(2);
613 Node* arg2 = m.Int32Constant(3); 635 Node* arg2 = m.Int32Constant(3);
614 Node* context = m.Int32Constant(4); 636 Node* context = m.Int32Constant(4);
615 Node* call_runtime = m.CallRuntime(Runtime::kAdd, context, arg1, arg2); 637 Node* call_runtime = m.CallRuntime(Runtime::kAdd, context, arg1, arg2);
616 EXPECT_THAT(call_runtime, 638 EXPECT_THAT(call_runtime,
617 IsCall(_, _, arg1, arg2, _, IsInt32Constant(2), context, _, _)); 639 IsCall(_, _, arg1, arg2, _, IsInt32Constant(2), context, _, _));
618 } 640 }
619 } 641 }
620 642
621 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) { 643 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) {
622 const int kResultSizes[] = {1, 2}; 644 const int kResultSizes[] = {1, 2};
623 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 645 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
624 TRACED_FOREACH(int, result_size, kResultSizes) { 646 TRACED_FOREACH(int, result_size, kResultSizes) {
625 InterpreterAssemblerForTest m(this, bytecode); 647 InterpreterAssemblerTestState state(this, bytecode);
648 InterpreterAssemblerForTest m(&state, bytecode);
626 Callable builtin = CodeFactory::InterpreterCEntry(isolate(), result_size); 649 Callable builtin = CodeFactory::InterpreterCEntry(isolate(), result_size);
627 650
628 Node* function_id = m.Int32Constant(0); 651 Node* function_id = m.Int32Constant(0);
629 Node* first_arg = m.Int32Constant(1); 652 Node* first_arg = m.Int32Constant(1);
630 Node* arg_count = m.Int32Constant(2); 653 Node* arg_count = m.Int32Constant(2);
631 Node* context = m.Int32Constant(4); 654 Node* context = m.Int32Constant(4);
632 655
633 Matcher<Node*> function_table = IsExternalConstant( 656 Matcher<Node*> function_table = IsExternalConstant(
634 ExternalReference::runtime_function_table_address(isolate())); 657 ExternalReference::runtime_function_table_address(isolate()));
635 Matcher<Node*> function = IsIntPtrAdd( 658 Matcher<Node*> function = IsIntPtrAdd(
(...skipping 10 matching lines...) Expand all
646 first_arg, function_entry, context, _, _)); 669 first_arg, function_entry, context, _, _));
647 } 670 }
648 } 671 }
649 } 672 }
650 673
651 TARGET_TEST_F(InterpreterAssemblerTest, CallJS) { 674 TARGET_TEST_F(InterpreterAssemblerTest, CallJS) {
652 TailCallMode tail_call_modes[] = {TailCallMode::kDisallow, 675 TailCallMode tail_call_modes[] = {TailCallMode::kDisallow,
653 TailCallMode::kAllow}; 676 TailCallMode::kAllow};
654 TRACED_FOREACH(TailCallMode, tail_call_mode, tail_call_modes) { 677 TRACED_FOREACH(TailCallMode, tail_call_mode, tail_call_modes) {
655 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 678 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
656 InterpreterAssemblerForTest m(this, bytecode); 679 InterpreterAssemblerTestState state(this, bytecode);
680 InterpreterAssemblerForTest m(&state, bytecode);
657 Callable builtin = 681 Callable builtin =
658 CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode); 682 CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
659 Node* function = m.Int32Constant(0); 683 Node* function = m.Int32Constant(0);
660 Node* first_arg = m.Int32Constant(1); 684 Node* first_arg = m.Int32Constant(1);
661 Node* arg_count = m.Int32Constant(2); 685 Node* arg_count = m.Int32Constant(2);
662 Node* context = m.Int32Constant(3); 686 Node* context = m.Int32Constant(3);
663 Node* call_js = 687 Node* call_js =
664 m.CallJS(function, context, first_arg, arg_count, tail_call_mode); 688 m.CallJS(function, context, first_arg, arg_count, tail_call_mode);
665 EXPECT_THAT(call_js, IsCall(_, IsHeapConstant(builtin.code()), arg_count, 689 EXPECT_THAT(call_js, IsCall(_, IsHeapConstant(builtin.code()), arg_count,
666 first_arg, function, context, _, _)); 690 first_arg, function, context, _, _));
667 } 691 }
668 } 692 }
669 } 693 }
670 694
671 TARGET_TEST_F(InterpreterAssemblerTest, LoadTypeFeedbackVector) { 695 TARGET_TEST_F(InterpreterAssemblerTest, LoadTypeFeedbackVector) {
672 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 696 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
673 InterpreterAssemblerForTest m(this, bytecode); 697 InterpreterAssemblerTestState state(this, bytecode);
698 InterpreterAssemblerForTest m(&state, bytecode);
674 Node* feedback_vector = m.LoadTypeFeedbackVector(); 699 Node* feedback_vector = m.LoadTypeFeedbackVector();
675 700
676 Matcher<Node*> load_function_matcher = 701 Matcher<Node*> load_function_matcher =
677 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(), 702 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
678 IsIntPtrConstant(Register::function_closure().ToOperand() 703 IsIntPtrConstant(Register::function_closure().ToOperand()
679 << kPointerSizeLog2)); 704 << kPointerSizeLog2));
680 Matcher<Node*> load_literals_matcher = m.IsLoad( 705 Matcher<Node*> load_literals_matcher = m.IsLoad(
681 MachineType::AnyTagged(), load_function_matcher, 706 MachineType::AnyTagged(), load_function_matcher,
682 IsIntPtrConstant(JSFunction::kLiteralsOffset - kHeapObjectTag)); 707 IsIntPtrConstant(JSFunction::kLiteralsOffset - kHeapObjectTag));
683 708
684 EXPECT_THAT(feedback_vector, 709 EXPECT_THAT(feedback_vector,
685 m.IsLoad(MachineType::AnyTagged(), load_literals_matcher, 710 m.IsLoad(MachineType::AnyTagged(), load_literals_matcher,
686 IsIntPtrConstant(LiteralsArray::kFeedbackVectorOffset - 711 IsIntPtrConstant(LiteralsArray::kFeedbackVectorOffset -
687 kHeapObjectTag))); 712 kHeapObjectTag)));
688 } 713 }
689 } 714 }
690 715
691 } // namespace interpreter 716 } // namespace interpreter
692 } // namespace internal 717 } // namespace internal
693 } // namespace v8 718 } // namespace v8
OLDNEW
« no previous file with comments | « test/unittests/interpreter/interpreter-assembler-unittest.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698