Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: test/unittests/interpreter/interpreter-assembler-unittest.cc

Issue 2504913002: Revert of [refactoring] Split CodeAssemblerState out of CodeAssembler (Closed)
Patch Set: Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « test/unittests/interpreter/interpreter-assembler-unittest.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 the V8 project authors. All rights reserved. 1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "test/unittests/interpreter/interpreter-assembler-unittest.h" 5 #include "test/unittests/interpreter/interpreter-assembler-unittest.h"
6 6
7 #include "src/code-factory.h" 7 #include "src/code-factory.h"
8 #include "src/compiler/graph.h"
8 #include "src/compiler/node.h" 9 #include "src/compiler/node.h"
9 #include "src/interface-descriptors.h" 10 #include "src/interface-descriptors.h"
10 #include "src/isolate.h" 11 #include "src/isolate.h"
11 #include "test/unittests/compiler/compiler-test-utils.h" 12 #include "test/unittests/compiler/compiler-test-utils.h"
12 #include "test/unittests/compiler/node-test-utils.h" 13 #include "test/unittests/compiler/node-test-utils.h"
13 14
14 using ::testing::_; 15 using ::testing::_;
15 16
16 namespace v8 { 17 namespace v8 {
17 namespace internal { 18 namespace internal {
18 19
19 using namespace compiler; 20 using namespace compiler;
20 21
21 namespace interpreter { 22 namespace interpreter {
22 23
23 InterpreterAssemblerTestState::InterpreterAssemblerTestState(
24 InterpreterAssemblerTest* test, Bytecode bytecode)
25 : compiler::CodeAssemblerState(
26 test->isolate(), test->zone(),
27 InterpreterDispatchDescriptor(test->isolate()),
28 Code::ComputeFlags(Code::BYTECODE_HANDLER),
29 Bytecodes::ToString(bytecode), Bytecodes::ReturnCount(bytecode)) {}
30
31 const interpreter::Bytecode kBytecodes[] = { 24 const interpreter::Bytecode kBytecodes[] = {
32 #define DEFINE_BYTECODE(Name, ...) interpreter::Bytecode::k##Name, 25 #define DEFINE_BYTECODE(Name, ...) interpreter::Bytecode::k##Name,
33 BYTECODE_LIST(DEFINE_BYTECODE) 26 BYTECODE_LIST(DEFINE_BYTECODE)
34 #undef DEFINE_BYTECODE 27 #undef DEFINE_BYTECODE
35 }; 28 };
36 29
37 Matcher<Node*> IsIntPtrConstant(const intptr_t value) { 30 Matcher<Node*> IsIntPtrConstant(const intptr_t value) {
38 return kPointerSize == 8 ? IsInt64Constant(static_cast<int64_t>(value)) 31 return kPointerSize == 8 ? IsInt64Constant(static_cast<int64_t>(value))
39 : IsInt32Constant(static_cast<int32_t>(value)); 32 : IsInt32Constant(static_cast<int32_t>(value));
40 } 33 }
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after
298 case OperandSize::kQuad: 291 case OperandSize::kQuad:
299 return IsUnsignedQuadOperand(offset); 292 return IsUnsignedQuadOperand(offset);
300 case OperandSize::kNone: 293 case OperandSize::kNone:
301 UNREACHABLE(); 294 UNREACHABLE();
302 } 295 }
303 return nullptr; 296 return nullptr;
304 } 297 }
305 298
306 TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) { 299 TARGET_TEST_F(InterpreterAssemblerTest, Dispatch) {
307 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 300 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
308 InterpreterAssemblerTestState state(this, bytecode); 301 InterpreterAssemblerForTest m(this, bytecode);
309 InterpreterAssemblerForTest m(&state, bytecode);
310 Node* tail_call_node = m.Dispatch(); 302 Node* tail_call_node = m.Dispatch();
311 303
312 OperandScale operand_scale = OperandScale::kSingle; 304 OperandScale operand_scale = OperandScale::kSingle;
313 Matcher<Node*> next_bytecode_offset_matcher = 305 Matcher<Node*> next_bytecode_offset_matcher =
314 IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset), 306 IsIntPtrAdd(IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
315 IsIntPtrConstant( 307 IsIntPtrConstant(
316 interpreter::Bytecodes::Size(bytecode, operand_scale))); 308 interpreter::Bytecodes::Size(bytecode, operand_scale)));
317 Matcher<Node*> target_bytecode_matcher = 309 Matcher<Node*> target_bytecode_matcher =
318 m.IsLoad(MachineType::Uint8(), 310 m.IsLoad(MachineType::Uint8(),
319 IsParameter(InterpreterDispatchDescriptor::kBytecodeArray), 311 IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
365 357
366 TARGET_TEST_F(InterpreterAssemblerTest, Jump) { 358 TARGET_TEST_F(InterpreterAssemblerTest, Jump) {
367 // If debug code is enabled we emit extra code in Jump. 359 // If debug code is enabled we emit extra code in Jump.
368 if (FLAG_debug_code) return; 360 if (FLAG_debug_code) return;
369 361
370 int jump_offsets[] = {-9710, -77, 0, +3, +97109}; 362 int jump_offsets[] = {-9710, -77, 0, +3, +97109};
371 TRACED_FOREACH(int, jump_offset, jump_offsets) { 363 TRACED_FOREACH(int, jump_offset, jump_offsets) {
372 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 364 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
373 if (!interpreter::Bytecodes::IsJump(bytecode)) return; 365 if (!interpreter::Bytecodes::IsJump(bytecode)) return;
374 366
375 InterpreterAssemblerTestState state(this, bytecode); 367 InterpreterAssemblerForTest m(this, bytecode);
376 InterpreterAssemblerForTest m(&state, bytecode);
377 Node* tail_call_node = m.Jump(m.IntPtrConstant(jump_offset)); 368 Node* tail_call_node = m.Jump(m.IntPtrConstant(jump_offset));
378 369
379 Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd( 370 Matcher<Node*> next_bytecode_offset_matcher = IsIntPtrAdd(
380 IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset), 371 IsParameter(InterpreterDispatchDescriptor::kBytecodeOffset),
381 IsIntPtrConstant(jump_offset)); 372 IsIntPtrConstant(jump_offset));
382 Matcher<Node*> target_bytecode_matcher = 373 Matcher<Node*> target_bytecode_matcher =
383 m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher); 374 m.IsLoad(MachineType::Uint8(), _, next_bytecode_offset_matcher);
384 if (kPointerSize == 8) { 375 if (kPointerSize == 8) {
385 target_bytecode_matcher = 376 target_bytecode_matcher =
386 IsChangeUint32ToUint64(target_bytecode_matcher); 377 IsChangeUint32ToUint64(target_bytecode_matcher);
(...skipping 13 matching lines...) Expand all
400 _, _)); 391 _, _));
401 } 392 }
402 } 393 }
403 } 394 }
404 395
405 TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) { 396 TARGET_TEST_F(InterpreterAssemblerTest, BytecodeOperand) {
406 static const OperandScale kOperandScales[] = { 397 static const OperandScale kOperandScales[] = {
407 OperandScale::kSingle, OperandScale::kDouble, OperandScale::kQuadruple}; 398 OperandScale::kSingle, OperandScale::kDouble, OperandScale::kQuadruple};
408 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 399 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
409 TRACED_FOREACH(interpreter::OperandScale, operand_scale, kOperandScales) { 400 TRACED_FOREACH(interpreter::OperandScale, operand_scale, kOperandScales) {
410 InterpreterAssemblerTestState state(this, bytecode); 401 InterpreterAssemblerForTest m(this, bytecode, operand_scale);
411 InterpreterAssemblerForTest m(&state, bytecode, operand_scale);
412 int number_of_operands = 402 int number_of_operands =
413 interpreter::Bytecodes::NumberOfOperands(bytecode); 403 interpreter::Bytecodes::NumberOfOperands(bytecode);
414 for (int i = 0; i < number_of_operands; i++) { 404 for (int i = 0; i < number_of_operands; i++) {
415 int offset = interpreter::Bytecodes::GetOperandOffset(bytecode, i, 405 int offset = interpreter::Bytecodes::GetOperandOffset(bytecode, i,
416 operand_scale); 406 operand_scale);
417 OperandType operand_type = 407 OperandType operand_type =
418 interpreter::Bytecodes::GetOperandType(bytecode, i); 408 interpreter::Bytecodes::GetOperandType(bytecode, i);
419 OperandSize operand_size = 409 OperandSize operand_size =
420 Bytecodes::SizeOfOperand(operand_type, operand_scale); 410 Bytecodes::SizeOfOperand(operand_type, operand_scale);
421 switch (interpreter::Bytecodes::GetOperandType(bytecode, i)) { 411 switch (interpreter::Bytecodes::GetOperandType(bytecode, i)) {
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
466 } 456 }
467 } 457 }
468 458
469 TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) { 459 TARGET_TEST_F(InterpreterAssemblerTest, GetSetAccumulator) {
470 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 460 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
471 if (!interpreter::Bytecodes::ReadsAccumulator(bytecode) || 461 if (!interpreter::Bytecodes::ReadsAccumulator(bytecode) ||
472 !interpreter::Bytecodes::WritesAccumulator(bytecode)) { 462 !interpreter::Bytecodes::WritesAccumulator(bytecode)) {
473 continue; 463 continue;
474 } 464 }
475 465
476 InterpreterAssemblerTestState state(this, bytecode); 466 InterpreterAssemblerForTest m(this, bytecode);
477 InterpreterAssemblerForTest m(&state, bytecode);
478 // Should be incoming accumulator if not set. 467 // Should be incoming accumulator if not set.
479 EXPECT_THAT(m.GetAccumulator(), 468 EXPECT_THAT(m.GetAccumulator(),
480 IsParameter(InterpreterDispatchDescriptor::kAccumulator)); 469 IsParameter(InterpreterDispatchDescriptor::kAccumulator));
481 // Should be set by SetAccumulator. 470 // Should be set by SetAccumulator.
482 Node* accumulator_value_1 = m.Int32Constant(0xdeadbeef); 471 Node* accumulator_value_1 = m.Int32Constant(0xdeadbeef);
483 m.SetAccumulator(accumulator_value_1); 472 m.SetAccumulator(accumulator_value_1);
484 EXPECT_THAT(m.GetAccumulator(), accumulator_value_1); 473 EXPECT_THAT(m.GetAccumulator(), accumulator_value_1);
485 Node* accumulator_value_2 = m.Int32Constant(42); 474 Node* accumulator_value_2 = m.Int32Constant(42);
486 m.SetAccumulator(accumulator_value_2); 475 m.SetAccumulator(accumulator_value_2);
487 EXPECT_THAT(m.GetAccumulator(), accumulator_value_2); 476 EXPECT_THAT(m.GetAccumulator(), accumulator_value_2);
488 477
489 // Should be passed to next bytecode handler on dispatch. 478 // Should be passed to next bytecode handler on dispatch.
490 Node* tail_call_node = m.Dispatch(); 479 Node* tail_call_node = m.Dispatch();
491 480
492 EXPECT_THAT(tail_call_node, 481 EXPECT_THAT(tail_call_node,
493 IsTailCall(_, _, accumulator_value_2, _, _, _, _)); 482 IsTailCall(_, _, accumulator_value_2, _, _, _, _));
494 } 483 }
495 } 484 }
496 485
497 TARGET_TEST_F(InterpreterAssemblerTest, GetContext) { 486 TARGET_TEST_F(InterpreterAssemblerTest, GetContext) {
498 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 487 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
499 InterpreterAssemblerTestState state(this, bytecode); 488 InterpreterAssemblerForTest m(this, bytecode);
500 InterpreterAssemblerForTest m(&state, bytecode);
501 EXPECT_THAT( 489 EXPECT_THAT(
502 m.GetContext(), 490 m.GetContext(),
503 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(), 491 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
504 IsIntPtrConstant(Register::current_context().ToOperand() 492 IsIntPtrConstant(Register::current_context().ToOperand()
505 << kPointerSizeLog2))); 493 << kPointerSizeLog2)));
506 } 494 }
507 } 495 }
508 496
509 TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) { 497 TARGET_TEST_F(InterpreterAssemblerTest, RegisterLocation) {
510 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 498 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
511 InterpreterAssemblerTestState state(this, bytecode); 499 InterpreterAssemblerForTest m(this, bytecode);
512 InterpreterAssemblerForTest m(&state, bytecode);
513 Node* reg_index_node = m.IntPtrConstant(44); 500 Node* reg_index_node = m.IntPtrConstant(44);
514 Node* reg_location_node = m.RegisterLocation(reg_index_node); 501 Node* reg_location_node = m.RegisterLocation(reg_index_node);
515 EXPECT_THAT(reg_location_node, 502 EXPECT_THAT(reg_location_node,
516 IsIntPtrAdd(IsLoadParentFramePointer(), 503 IsIntPtrAdd(IsLoadParentFramePointer(),
517 IsWordShl(reg_index_node, 504 IsWordShl(reg_index_node,
518 IsIntPtrConstant(kPointerSizeLog2)))); 505 IsIntPtrConstant(kPointerSizeLog2))));
519 } 506 }
520 } 507 }
521 508
522 TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) { 509 TARGET_TEST_F(InterpreterAssemblerTest, LoadRegister) {
523 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 510 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
524 InterpreterAssemblerTestState state(this, bytecode); 511 InterpreterAssemblerForTest m(this, bytecode);
525 InterpreterAssemblerForTest m(&state, bytecode);
526 Node* reg_index_node = m.IntPtrConstant(44); 512 Node* reg_index_node = m.IntPtrConstant(44);
527 Node* load_reg_node = m.LoadRegister(reg_index_node); 513 Node* load_reg_node = m.LoadRegister(reg_index_node);
528 EXPECT_THAT(load_reg_node, 514 EXPECT_THAT(load_reg_node,
529 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(), 515 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
530 IsWordShl(reg_index_node, 516 IsWordShl(reg_index_node,
531 IsIntPtrConstant(kPointerSizeLog2)))); 517 IsIntPtrConstant(kPointerSizeLog2))));
532 } 518 }
533 } 519 }
534 520
535 TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) { 521 TARGET_TEST_F(InterpreterAssemblerTest, StoreRegister) {
536 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 522 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
537 InterpreterAssemblerTestState state(this, bytecode); 523 InterpreterAssemblerForTest m(this, bytecode);
538 InterpreterAssemblerForTest m(&state, bytecode);
539 Node* store_value = m.Int32Constant(0xdeadbeef); 524 Node* store_value = m.Int32Constant(0xdeadbeef);
540 Node* reg_index_node = m.IntPtrConstant(44); 525 Node* reg_index_node = m.IntPtrConstant(44);
541 Node* store_reg_node = m.StoreRegister(store_value, reg_index_node); 526 Node* store_reg_node = m.StoreRegister(store_value, reg_index_node);
542 EXPECT_THAT( 527 EXPECT_THAT(
543 store_reg_node, 528 store_reg_node,
544 m.IsStore(StoreRepresentation(MachineRepresentation::kTagged, 529 m.IsStore(StoreRepresentation(MachineRepresentation::kTagged,
545 kNoWriteBarrier), 530 kNoWriteBarrier),
546 IsLoadParentFramePointer(), 531 IsLoadParentFramePointer(),
547 IsWordShl(reg_index_node, IsIntPtrConstant(kPointerSizeLog2)), 532 IsWordShl(reg_index_node, IsIntPtrConstant(kPointerSizeLog2)),
548 store_value)); 533 store_value));
549 } 534 }
550 } 535 }
551 536
552 TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) { 537 TARGET_TEST_F(InterpreterAssemblerTest, SmiTag) {
553 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 538 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
554 InterpreterAssemblerTestState state(this, bytecode); 539 InterpreterAssemblerForTest m(this, bytecode);
555 InterpreterAssemblerForTest m(&state, bytecode);
556 Node* value = m.Int32Constant(44); 540 Node* value = m.Int32Constant(44);
557 EXPECT_THAT(m.SmiTag(value), IsBitcastWordToTaggedSigned(IsIntPtrConstant( 541 EXPECT_THAT(m.SmiTag(value), IsBitcastWordToTaggedSigned(IsIntPtrConstant(
558 static_cast<intptr_t>(44) 542 static_cast<intptr_t>(44)
559 << (kSmiShiftSize + kSmiTagSize)))); 543 << (kSmiShiftSize + kSmiTagSize))));
560 EXPECT_THAT(m.SmiUntag(value), 544 EXPECT_THAT(m.SmiUntag(value),
561 IsWordSar(IsBitcastTaggedToWord(value), 545 IsWordSar(IsBitcastTaggedToWord(value),
562 IsIntPtrConstant(kSmiShiftSize + kSmiTagSize))); 546 IsIntPtrConstant(kSmiShiftSize + kSmiTagSize)));
563 } 547 }
564 } 548 }
565 549
566 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) { 550 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrAdd) {
567 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 551 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
568 InterpreterAssemblerTestState state(this, bytecode); 552 InterpreterAssemblerForTest m(this, bytecode);
569 InterpreterAssemblerForTest m(&state, bytecode);
570 Node* a = m.Int32Constant(0); 553 Node* a = m.Int32Constant(0);
571 Node* b = m.Int32Constant(1); 554 Node* b = m.Int32Constant(1);
572 Node* add = m.IntPtrAdd(a, b); 555 Node* add = m.IntPtrAdd(a, b);
573 EXPECT_THAT(add, IsIntPtrAdd(a, b)); 556 EXPECT_THAT(add, IsIntPtrAdd(a, b));
574 } 557 }
575 } 558 }
576 559
577 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) { 560 TARGET_TEST_F(InterpreterAssemblerTest, IntPtrSub) {
578 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 561 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
579 InterpreterAssemblerTestState state(this, bytecode); 562 InterpreterAssemblerForTest m(this, bytecode);
580 InterpreterAssemblerForTest m(&state, bytecode);
581 Node* a = m.Int32Constant(0); 563 Node* a = m.Int32Constant(0);
582 Node* b = m.Int32Constant(1); 564 Node* b = m.Int32Constant(1);
583 Node* add = m.IntPtrSub(a, b); 565 Node* add = m.IntPtrSub(a, b);
584 EXPECT_THAT(add, IsIntPtrSub(a, b)); 566 EXPECT_THAT(add, IsIntPtrSub(a, b));
585 } 567 }
586 } 568 }
587 569
588 TARGET_TEST_F(InterpreterAssemblerTest, WordShl) { 570 TARGET_TEST_F(InterpreterAssemblerTest, WordShl) {
589 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 571 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
590 InterpreterAssemblerTestState state(this, bytecode); 572 InterpreterAssemblerForTest m(this, bytecode);
591 InterpreterAssemblerForTest m(&state, bytecode);
592 Node* a = m.IntPtrConstant(0); 573 Node* a = m.IntPtrConstant(0);
593 Node* add = m.WordShl(a, 10); 574 Node* add = m.WordShl(a, 10);
594 EXPECT_THAT(add, IsWordShl(a, IsIntPtrConstant(10))); 575 EXPECT_THAT(add, IsWordShl(a, IsIntPtrConstant(10)));
595 } 576 }
596 } 577 }
597 578
598 TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) { 579 TARGET_TEST_F(InterpreterAssemblerTest, LoadConstantPoolEntry) {
599 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 580 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
600 InterpreterAssemblerTestState state(this, bytecode); 581 InterpreterAssemblerForTest m(this, bytecode);
601 InterpreterAssemblerForTest m(&state, bytecode);
602 Node* index = m.IntPtrConstant(2); 582 Node* index = m.IntPtrConstant(2);
603 Node* load_constant = m.LoadConstantPoolEntry(index); 583 Node* load_constant = m.LoadConstantPoolEntry(index);
604 Matcher<Node*> constant_pool_matcher = m.IsLoad( 584 Matcher<Node*> constant_pool_matcher = m.IsLoad(
605 MachineType::AnyTagged(), 585 MachineType::AnyTagged(),
606 IsParameter(InterpreterDispatchDescriptor::kBytecodeArray), 586 IsParameter(InterpreterDispatchDescriptor::kBytecodeArray),
607 IsIntPtrConstant(BytecodeArray::kConstantPoolOffset - kHeapObjectTag)); 587 IsIntPtrConstant(BytecodeArray::kConstantPoolOffset - kHeapObjectTag));
608 EXPECT_THAT( 588 EXPECT_THAT(
609 load_constant, 589 load_constant,
610 m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher, 590 m.IsLoad(MachineType::AnyTagged(), constant_pool_matcher,
611 IsIntPtrAdd( 591 IsIntPtrAdd(
612 IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag), 592 IsIntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag),
613 IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2))))); 593 IsWordShl(index, IsIntPtrConstant(kPointerSizeLog2)))));
614 } 594 }
615 } 595 }
616 596
617 TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) { 597 TARGET_TEST_F(InterpreterAssemblerTest, LoadObjectField) {
618 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 598 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
619 InterpreterAssemblerTestState state(this, bytecode); 599 InterpreterAssemblerForTest m(this, bytecode);
620 InterpreterAssemblerForTest m(&state, bytecode);
621 Node* object = m.IntPtrConstant(0xdeadbeef); 600 Node* object = m.IntPtrConstant(0xdeadbeef);
622 int offset = 16; 601 int offset = 16;
623 Node* load_field = m.LoadObjectField(object, offset); 602 Node* load_field = m.LoadObjectField(object, offset);
624 EXPECT_THAT(load_field, 603 EXPECT_THAT(load_field,
625 m.IsLoad(MachineType::AnyTagged(), object, 604 m.IsLoad(MachineType::AnyTagged(), object,
626 IsIntPtrConstant(offset - kHeapObjectTag))); 605 IsIntPtrConstant(offset - kHeapObjectTag)));
627 } 606 }
628 } 607 }
629 608
630 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime2) { 609 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime2) {
631 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 610 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
632 InterpreterAssemblerTestState state(this, bytecode); 611 InterpreterAssemblerForTest m(this, bytecode);
633 InterpreterAssemblerForTest m(&state, bytecode);
634 Node* arg1 = m.Int32Constant(2); 612 Node* arg1 = m.Int32Constant(2);
635 Node* arg2 = m.Int32Constant(3); 613 Node* arg2 = m.Int32Constant(3);
636 Node* context = m.Int32Constant(4); 614 Node* context = m.Int32Constant(4);
637 Node* call_runtime = m.CallRuntime(Runtime::kAdd, context, arg1, arg2); 615 Node* call_runtime = m.CallRuntime(Runtime::kAdd, context, arg1, arg2);
638 EXPECT_THAT(call_runtime, 616 EXPECT_THAT(call_runtime,
639 IsCall(_, _, arg1, arg2, _, IsInt32Constant(2), context, _, _)); 617 IsCall(_, _, arg1, arg2, _, IsInt32Constant(2), context, _, _));
640 } 618 }
641 } 619 }
642 620
643 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) { 621 TARGET_TEST_F(InterpreterAssemblerTest, CallRuntime) {
644 const int kResultSizes[] = {1, 2}; 622 const int kResultSizes[] = {1, 2};
645 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 623 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
646 TRACED_FOREACH(int, result_size, kResultSizes) { 624 TRACED_FOREACH(int, result_size, kResultSizes) {
647 InterpreterAssemblerTestState state(this, bytecode); 625 InterpreterAssemblerForTest m(this, bytecode);
648 InterpreterAssemblerForTest m(&state, bytecode);
649 Callable builtin = CodeFactory::InterpreterCEntry(isolate(), result_size); 626 Callable builtin = CodeFactory::InterpreterCEntry(isolate(), result_size);
650 627
651 Node* function_id = m.Int32Constant(0); 628 Node* function_id = m.Int32Constant(0);
652 Node* first_arg = m.Int32Constant(1); 629 Node* first_arg = m.Int32Constant(1);
653 Node* arg_count = m.Int32Constant(2); 630 Node* arg_count = m.Int32Constant(2);
654 Node* context = m.Int32Constant(4); 631 Node* context = m.Int32Constant(4);
655 632
656 Matcher<Node*> function_table = IsExternalConstant( 633 Matcher<Node*> function_table = IsExternalConstant(
657 ExternalReference::runtime_function_table_address(isolate())); 634 ExternalReference::runtime_function_table_address(isolate()));
658 Matcher<Node*> function = IsIntPtrAdd( 635 Matcher<Node*> function = IsIntPtrAdd(
(...skipping 10 matching lines...) Expand all
669 first_arg, function_entry, context, _, _)); 646 first_arg, function_entry, context, _, _));
670 } 647 }
671 } 648 }
672 } 649 }
673 650
674 TARGET_TEST_F(InterpreterAssemblerTest, CallJS) { 651 TARGET_TEST_F(InterpreterAssemblerTest, CallJS) {
675 TailCallMode tail_call_modes[] = {TailCallMode::kDisallow, 652 TailCallMode tail_call_modes[] = {TailCallMode::kDisallow,
676 TailCallMode::kAllow}; 653 TailCallMode::kAllow};
677 TRACED_FOREACH(TailCallMode, tail_call_mode, tail_call_modes) { 654 TRACED_FOREACH(TailCallMode, tail_call_mode, tail_call_modes) {
678 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 655 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
679 InterpreterAssemblerTestState state(this, bytecode); 656 InterpreterAssemblerForTest m(this, bytecode);
680 InterpreterAssemblerForTest m(&state, bytecode);
681 Callable builtin = 657 Callable builtin =
682 CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode); 658 CodeFactory::InterpreterPushArgsAndCall(isolate(), tail_call_mode);
683 Node* function = m.Int32Constant(0); 659 Node* function = m.Int32Constant(0);
684 Node* first_arg = m.Int32Constant(1); 660 Node* first_arg = m.Int32Constant(1);
685 Node* arg_count = m.Int32Constant(2); 661 Node* arg_count = m.Int32Constant(2);
686 Node* context = m.Int32Constant(3); 662 Node* context = m.Int32Constant(3);
687 Node* call_js = 663 Node* call_js =
688 m.CallJS(function, context, first_arg, arg_count, tail_call_mode); 664 m.CallJS(function, context, first_arg, arg_count, tail_call_mode);
689 EXPECT_THAT(call_js, IsCall(_, IsHeapConstant(builtin.code()), arg_count, 665 EXPECT_THAT(call_js, IsCall(_, IsHeapConstant(builtin.code()), arg_count,
690 first_arg, function, context, _, _)); 666 first_arg, function, context, _, _));
691 } 667 }
692 } 668 }
693 } 669 }
694 670
695 TARGET_TEST_F(InterpreterAssemblerTest, LoadTypeFeedbackVector) { 671 TARGET_TEST_F(InterpreterAssemblerTest, LoadTypeFeedbackVector) {
696 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) { 672 TRACED_FOREACH(interpreter::Bytecode, bytecode, kBytecodes) {
697 InterpreterAssemblerTestState state(this, bytecode); 673 InterpreterAssemblerForTest m(this, bytecode);
698 InterpreterAssemblerForTest m(&state, bytecode);
699 Node* feedback_vector = m.LoadTypeFeedbackVector(); 674 Node* feedback_vector = m.LoadTypeFeedbackVector();
700 675
701 Matcher<Node*> load_function_matcher = 676 Matcher<Node*> load_function_matcher =
702 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(), 677 m.IsLoad(MachineType::AnyTagged(), IsLoadParentFramePointer(),
703 IsIntPtrConstant(Register::function_closure().ToOperand() 678 IsIntPtrConstant(Register::function_closure().ToOperand()
704 << kPointerSizeLog2)); 679 << kPointerSizeLog2));
705 Matcher<Node*> load_literals_matcher = m.IsLoad( 680 Matcher<Node*> load_literals_matcher = m.IsLoad(
706 MachineType::AnyTagged(), load_function_matcher, 681 MachineType::AnyTagged(), load_function_matcher,
707 IsIntPtrConstant(JSFunction::kLiteralsOffset - kHeapObjectTag)); 682 IsIntPtrConstant(JSFunction::kLiteralsOffset - kHeapObjectTag));
708 683
709 EXPECT_THAT(feedback_vector, 684 EXPECT_THAT(feedback_vector,
710 m.IsLoad(MachineType::AnyTagged(), load_literals_matcher, 685 m.IsLoad(MachineType::AnyTagged(), load_literals_matcher,
711 IsIntPtrConstant(LiteralsArray::kFeedbackVectorOffset - 686 IsIntPtrConstant(LiteralsArray::kFeedbackVectorOffset -
712 kHeapObjectTag))); 687 kHeapObjectTag)));
713 } 688 }
714 } 689 }
715 690
716 } // namespace interpreter 691 } // namespace interpreter
717 } // namespace internal 692 } // namespace internal
718 } // namespace v8 693 } // namespace v8
OLDNEW
« no previous file with comments | « test/unittests/interpreter/interpreter-assembler-unittest.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698