Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(475)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 140060: Enable code generation for array literals.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
148 " }" 148 " }"
149 " function test_local_variables(x, y){" 149 " function test_local_variables(x, y){"
150 " var w; y = x; x = w; w = y; y = x; return w;" 150 " var w; y = x; x = w; w = y; y = x; return w;"
151 " };" 151 " };"
152 " test_local_variables(2,3);" 152 " test_local_variables(2,3);"
153 " function test_nesting_calls(x, y, zee){return zee;};" 153 " function test_nesting_calls(x, y, zee){return zee;};"
154 " test_local_variables(" 154 " test_local_variables("
155 " test_nesting_calls(test_local_variables(1,3), 42, 47)," 155 " test_nesting_calls(test_local_variables(1,3), 42, 47),"
156 " test_local_variables(-25.3, 2));" 156 " test_local_variables(-25.3, 2));"
157 " var o = { x: 42 };" 157 " var o = { x: 42 };"
158 " var a = [ 1, 2, 3 ];"
158 " return test_if_then_else(1, 47, 39);" 159 " return test_if_then_else(1, 47, 39);"
159 "})()")), 160 "})()")),
160 Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")), 161 Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")),
161 0, 162 0,
162 0, 163 0,
163 NULL, 164 NULL,
164 NULL); 165 NULL);
165 166
166 Code* code_object = test_function->code(); // Local for debugging ease. 167 Code* code_object = test_function->code(); // Local for debugging ease.
167 USE(code_object); 168 USE(code_object);
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
303 scope_ = NULL; 304 scope_ = NULL;
304 } 305 }
305 306
306 void CodeGenerator::GenerateReturnSequence(Result* return_value) { 307 void CodeGenerator::GenerateReturnSequence(Result* return_value) {
307 // The return value is a live (but not currently reference counted) 308 // The return value is a live (but not currently reference counted)
308 // reference to rax. This is safe because the current frame does not 309 // reference to rax. This is safe because the current frame does not
309 // contain a reference to rax (it is prepared for the return by spilling 310 // contain a reference to rax (it is prepared for the return by spilling
310 // all registers). 311 // all registers).
311 if (FLAG_trace) { 312 if (FLAG_trace) {
312 frame_->Push(return_value); 313 frame_->Push(return_value);
313 // *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1); 314 *return_value = frame_->CallRuntime(Runtime::kTraceExit, 1);
314 } 315 }
315 return_value->ToRegister(rax); 316 return_value->ToRegister(rax);
316 317
317 // Add a label for checking the size of the code used for returning. 318 // Add a label for checking the size of the code used for returning.
318 Label check_exit_codesize; 319 Label check_exit_codesize;
319 masm_->bind(&check_exit_codesize); 320 masm_->bind(&check_exit_codesize);
320 321
321 // Leave the frame and return popping the arguments and the 322 // Leave the frame and return popping the arguments and the
322 // receiver. 323 // receiver.
323 frame_->Exit(); 324 frame_->Exit();
324 masm_->ret((scope_->num_parameters() + 1) * kPointerSize); 325 masm_->ret((scope_->num_parameters() + 1) * kPointerSize);
325 DeleteFrame(); 326 DeleteFrame();
326 327
328 // TODO(x64): introduce kX64JSReturnSequenceLength and enable assert.
329
327 // Check that the size of the code used for returning matches what is 330 // Check that the size of the code used for returning matches what is
328 // expected by the debugger. 331 // expected by the debugger.
329 // ASSERT_EQ(Debug::kIa32JSReturnSequenceLength, 332 // ASSERT_EQ(Debug::kIa32JSReturnSequenceLength,
330 // masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); 333 // masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
331 } 334 }
332 335
333 336
334 void CodeGenerator::GenerateFastCaseSwitchJumpTable(SwitchStatement* a, 337 void CodeGenerator::GenerateFastCaseSwitchJumpTable(SwitchStatement* a,
335 int b, 338 int b,
336 int c, 339 int c,
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
569 } 572 }
570 573
571 574
572 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { 575 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
573 ASSERT(!in_spilled_code()); 576 ASSERT(!in_spilled_code());
574 Comment cmnt(masm_, "[ ReturnStatement"); 577 Comment cmnt(masm_, "[ ReturnStatement");
575 578
576 CodeForStatementPosition(node); 579 CodeForStatementPosition(node);
577 Load(node->expression()); 580 Load(node->expression());
578 Result return_value = frame_->Pop(); 581 Result return_value = frame_->Pop();
579 /* if (function_return_is_shadowed_) { 582 if (function_return_is_shadowed_) {
580 function_return_.Jump(&return_value); 583 function_return_.Jump(&return_value);
581 } else { 584 } else {
582 frame_->PrepareForReturn(); 585 frame_->PrepareForReturn();
583 if (function_return_.is_bound()) { 586 if (function_return_.is_bound()) {
584 // If the function return label is already bound we reuse the 587 // If the function return label is already bound we reuse the
585 // code by jumping to the return site. 588 // code by jumping to the return site.
586 function_return_.Jump(&return_value); 589 function_return_.Jump(&return_value);
587 } else { 590 } else {
588 function_return_.Bind(&return_value); 591 function_return_.Bind(&return_value);
589 GenerateReturnSequence(&return_value); 592 GenerateReturnSequence(&return_value);
590 } 593 }
591 } 594 }
592 */
593 GenerateReturnSequence(&return_value);
594 } 595 }
595 596
596 597
597 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* a) { 598 void CodeGenerator::VisitWithEnterStatement(WithEnterStatement* a) {
598 UNIMPLEMENTED(); 599 UNIMPLEMENTED();
599 } 600 }
600 601
601 void CodeGenerator::VisitWithExitStatement(WithExitStatement* a) { 602 void CodeGenerator::VisitWithExitStatement(WithExitStatement* a) {
602 UNIMPLEMENTED(); 603 UNIMPLEMENTED();
603 } 604 }
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
676 677
677 void CodeGenerator::VisitVariableProxy(VariableProxy* node) { 678 void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
678 Comment cmnt(masm_, "[ VariableProxy"); 679 Comment cmnt(masm_, "[ VariableProxy");
679 Variable* var = node->var(); 680 Variable* var = node->var();
680 Expression* expr = var->rewrite(); 681 Expression* expr = var->rewrite();
681 if (expr != NULL) { 682 if (expr != NULL) {
682 Visit(expr); 683 Visit(expr);
683 } else { 684 } else {
684 ASSERT(var->is_global()); 685 ASSERT(var->is_global());
685 Reference ref(this, node); 686 Reference ref(this, node);
686 // ref.GetValue(typeof_state()); 687 ref.GetValue(typeof_state());
687 } 688 }
688 } 689 }
689 690
690 691
691 void CodeGenerator::VisitLiteral(Literal* node) { 692 void CodeGenerator::VisitLiteral(Literal* node) {
692 Comment cmnt(masm_, "[ Literal"); 693 Comment cmnt(masm_, "[ Literal");
693 frame_->Push(node->handle()); 694 frame_->Push(node->handle());
694 } 695 }
695 696
696 697
(...skipping 24 matching lines...) Expand all
721 722
722 723
723 void DeferredObjectLiteral::Generate() { 724 void DeferredObjectLiteral::Generate() {
724 // Since the entry is undefined we call the runtime system to 725 // Since the entry is undefined we call the runtime system to
725 // compute the literal. 726 // compute the literal.
726 // Literal array (0). 727 // Literal array (0).
727 __ push(literals_); 728 __ push(literals_);
728 // Literal index (1). 729 // Literal index (1).
729 __ push(Immediate(Smi::FromInt(node_->literal_index()))); 730 __ push(Immediate(Smi::FromInt(node_->literal_index())));
730 // Constant properties (2). 731 // Constant properties (2).
731 __ movq(kScratchRegister, 732 __ PushHeapObject(node_->constant_properties());
732 node_->constant_properties(),
733 RelocInfo::EMBEDDED_OBJECT);
734 __ push(kScratchRegister);
735 __ CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3); 733 __ CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3);
736 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); 734 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
737 } 735 }
738 736
739 737
740 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) { 738 void CodeGenerator::VisitObjectLiteral(ObjectLiteral* node) {
741 Comment cmnt(masm_, "[ ObjectLiteral"); 739 Comment cmnt(masm_, "[ ObjectLiteral");
742 740
743 // Retrieve the literals array and check the allocated entry. Begin 741 // Retrieve the literals array and check the allocated entry. Begin
744 // with a writable copy of the function of this activation in a 742 // with a writable copy of the function of this activation in a
(...skipping 11 matching lines...) Expand all
756 Result boilerplate = allocator_->Allocate(); 754 Result boilerplate = allocator_->Allocate();
757 ASSERT(boilerplate.is_valid()); 755 ASSERT(boilerplate.is_valid());
758 int literal_offset = 756 int literal_offset =
759 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; 757 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
760 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset)); 758 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
761 759
762 // Check whether we need to materialize the object literal boilerplate. 760 // Check whether we need to materialize the object literal boilerplate.
763 // If so, jump to the deferred code passing the literals array. 761 // If so, jump to the deferred code passing the literals array.
764 DeferredObjectLiteral* deferred = 762 DeferredObjectLiteral* deferred =
765 new DeferredObjectLiteral(boilerplate.reg(), literals.reg(), node); 763 new DeferredObjectLiteral(boilerplate.reg(), literals.reg(), node);
766 __ movq(kScratchRegister, 764 __ CmpHeapObject(boilerplate.reg(), Factory::undefined_value());
767 Factory::undefined_value(),
768 RelocInfo::EMBEDDED_OBJECT);
769 __ cmpq(boilerplate.reg(), kScratchRegister);
770 deferred->Branch(equal); 765 deferred->Branch(equal);
771 deferred->BindExit(); 766 deferred->BindExit();
772 literals.Unuse(); 767 literals.Unuse();
773 768
774 // Push the boilerplate object. 769 // Push the boilerplate object.
775 frame_->Push(&boilerplate); 770 frame_->Push(&boilerplate);
776 // Clone the boilerplate object. 771 // Clone the boilerplate object.
777 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; 772 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate;
778 if (node->depth() == 1) { 773 if (node->depth() == 1) {
779 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; 774 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
821 Load(property->value()); 816 Load(property->value());
822 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4); 817 Result ignored = frame_->CallRuntime(Runtime::kDefineAccessor, 4);
823 // Ignore the result. 818 // Ignore the result.
824 break; 819 break;
825 } 820 }
826 default: UNREACHABLE(); 821 default: UNREACHABLE();
827 } 822 }
828 } 823 }
829 } 824 }
830 825
831 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* a) { 826
832 UNIMPLEMENTED(); 827 // Materialize the array literal 'node' in the literals array 'literals'
828 // of the function. Leave the array boilerplate in 'boilerplate'.
829 class DeferredArrayLiteral: public DeferredCode {
830 public:
831 DeferredArrayLiteral(Register boilerplate,
832 Register literals,
833 ArrayLiteral* node)
834 : boilerplate_(boilerplate), literals_(literals), node_(node) {
835 set_comment("[ DeferredArrayLiteral");
836 }
837
838 void Generate();
839
840 private:
841 Register boilerplate_;
842 Register literals_;
843 ArrayLiteral* node_;
844 };
845
846
847 void DeferredArrayLiteral::Generate() {
848 // Since the entry is undefined we call the runtime system to
849 // compute the literal.
850 // Literal array (0).
851 __ push(literals_);
852 // Literal index (1).
853 __ push(Immediate(Smi::FromInt(node_->literal_index())));
854 // Constant properties (2).
855 __ PushHeapObject(node_->literals());
856 __ CallRuntime(Runtime::kCreateArrayLiteralBoilerplate, 3);
857 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
833 } 858 }
834 859
860
861 void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
862 Comment cmnt(masm_, "[ ArrayLiteral");
863
864 // Retrieve the literals array and check the allocated entry. Begin
865 // with a writable copy of the function of this activation in a
866 // register.
867 frame_->PushFunction();
868 Result literals = frame_->Pop();
869 literals.ToRegister();
870 frame_->Spill(literals.reg());
871
872 // Load the literals array of the function.
873 __ movq(literals.reg(),
874 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
875
876 // Load the literal at the ast saved index.
877 Result boilerplate = allocator_->Allocate();
878 ASSERT(boilerplate.is_valid());
879 int literal_offset =
880 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
881 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
882
883 // Check whether we need to materialize the object literal boilerplate.
884 // If so, jump to the deferred code passing the literals array.
885 DeferredArrayLiteral* deferred =
886 new DeferredArrayLiteral(boilerplate.reg(), literals.reg(), node);
887 __ CmpHeapObject(boilerplate.reg(), Factory::undefined_value());
888 deferred->Branch(equal);
889 deferred->BindExit();
890 literals.Unuse();
891
892 // Push the resulting array literal boilerplate on the stack.
893 frame_->Push(&boilerplate);
894 // Clone the boilerplate object.
895 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate;
896 if (node->depth() == 1) {
897 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate;
898 }
899 Result clone = frame_->CallRuntime(clone_function_id, 1);
900 // Push the newly cloned literal object as the result.
901 frame_->Push(&clone);
902
903 // Generate code to set the elements in the array that are not
904 // literals.
905 for (int i = 0; i < node->values()->length(); i++) {
906 Expression* value = node->values()->at(i);
907
908 // If value is a literal the property value is already set in the
909 // boilerplate object.
910 if (value->AsLiteral() != NULL) continue;
911 // If value is a materialized literal the property value is already set
912 // in the boilerplate object if it is simple.
913 if (CompileTimeValue::IsCompileTimeValue(value)) continue;
914
915 // The property must be set by generated code.
916 Load(value);
917
918 // Get the property value off the stack.
919 Result prop_value = frame_->Pop();
920 prop_value.ToRegister();
921
922 // Fetch the array literal while leaving a copy on the stack and
923 // use it to get the elements array.
924 frame_->Dup();
925 Result elements = frame_->Pop();
926 elements.ToRegister();
927 frame_->Spill(elements.reg());
928 // Get the elements array.
929 __ movq(elements.reg(),
930 FieldOperand(elements.reg(), JSObject::kElementsOffset));
931
932 // Write to the indexed properties array.
933 int offset = i * kPointerSize + Array::kHeaderSize;
934 __ movq(FieldOperand(elements.reg(), offset), prop_value.reg());
935
936 // Update the write barrier for the array address.
937 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier.
938 Result scratch = allocator_->Allocate();
939 ASSERT(scratch.is_valid());
940 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg());
941 }
942 }
943
944
835 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* a) { 945 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* a) {
836 UNIMPLEMENTED(); 946 UNIMPLEMENTED();
837 } 947 }
838 948
839 949
840 void CodeGenerator::VisitAssignment(Assignment* node) { 950 void CodeGenerator::VisitAssignment(Assignment* node) {
841 Comment cmnt(masm_, "[ Assignment"); 951 Comment cmnt(masm_, "[ Assignment");
842 CodeForStatementPosition(node); 952 CodeForStatementPosition(node);
843 953
844 { Reference target(this, node->target()); 954 { Reference target(this, node->target());
(...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after
1283 // 'false_target'/'true_target' as appropriate. 1393 // 'false_target'/'true_target' as appropriate.
1284 void CodeGenerator::ToBoolean(ControlDestination* dest) { 1394 void CodeGenerator::ToBoolean(ControlDestination* dest) {
1285 Comment cmnt(masm_, "[ ToBoolean"); 1395 Comment cmnt(masm_, "[ ToBoolean");
1286 1396
1287 // The value to convert should be popped from the frame. 1397 // The value to convert should be popped from the frame.
1288 Result value = frame_->Pop(); 1398 Result value = frame_->Pop();
1289 value.ToRegister(); 1399 value.ToRegister();
1290 // Fast case checks. 1400 // Fast case checks.
1291 1401
1292 // 'false' => false. 1402 // 'false' => false.
1293 __ movq(kScratchRegister, Factory::false_value(), RelocInfo::EMBEDDED_OBJECT); 1403 __ CmpHeapObject(value.reg(), Factory::false_value());
1294 __ cmpq(value.reg(), kScratchRegister);
1295 dest->false_target()->Branch(equal); 1404 dest->false_target()->Branch(equal);
1296 1405
1297 // 'true' => true. 1406 // 'true' => true.
1298 __ movq(kScratchRegister, Factory::true_value(), RelocInfo::EMBEDDED_OBJECT); 1407 __ CmpHeapObject(value.reg(), Factory::true_value());
1299 __ cmpq(value.reg(), kScratchRegister);
1300 dest->true_target()->Branch(equal); 1408 dest->true_target()->Branch(equal);
1301 1409
1302 // 'undefined' => false. 1410 // 'undefined' => false.
1303 __ movq(kScratchRegister, 1411 __ CmpHeapObject(value.reg(), Factory::undefined_value());
1304 Factory::undefined_value(),
1305 RelocInfo::EMBEDDED_OBJECT);
1306 __ cmpq(value.reg(), kScratchRegister);
1307 dest->false_target()->Branch(equal); 1412 dest->false_target()->Branch(equal);
1308 1413
1309 // Smi => false iff zero. 1414 // Smi => false iff zero.
1310 ASSERT(kSmiTag == 0); 1415 ASSERT(kSmiTag == 0);
1311 __ testq(value.reg(), value.reg()); 1416 __ testq(value.reg(), value.reg());
1312 dest->false_target()->Branch(zero); 1417 dest->false_target()->Branch(zero);
1313 __ testq(value.reg(), Immediate(kSmiTagMask)); 1418 __ testq(value.reg(), Immediate(kSmiTagMask));
1314 dest->true_target()->Branch(zero); 1419 dest->true_target()->Branch(zero);
1315 1420
1316 // Call the stub for all other cases. 1421 // Call the stub for all other cases.
(...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after
1625 // Allocate a fresh register to use as a temp in 1730 // Allocate a fresh register to use as a temp in
1626 // ContextSlotOperandCheckExtensions and to hold the result 1731 // ContextSlotOperandCheckExtensions and to hold the result
1627 // value. 1732 // value.
1628 value = allocator_->Allocate(); 1733 value = allocator_->Allocate();
1629 ASSERT(value.is_valid()); 1734 ASSERT(value.is_valid());
1630 __ movq(value.reg(), 1735 __ movq(value.reg(),
1631 ContextSlotOperandCheckExtensions(potential_slot, 1736 ContextSlotOperandCheckExtensions(potential_slot,
1632 value, 1737 value,
1633 &slow)); 1738 &slow));
1634 if (potential_slot->var()->mode() == Variable::CONST) { 1739 if (potential_slot->var()->mode() == Variable::CONST) {
1635 __ movq(kScratchRegister, Factory::the_hole_value(), 1740 __ CmpHeapObject(value.reg(), Factory::the_hole_value());
1636 RelocInfo::EMBEDDED_OBJECT);
1637 __ cmpq(value.reg(), kScratchRegister);
1638 done.Branch(not_equal, &value); 1741 done.Branch(not_equal, &value);
1639 __ movq(value.reg(), Factory::undefined_value(), 1742 __ movq(value.reg(), Factory::undefined_value(),
1640 RelocInfo::EMBEDDED_OBJECT); 1743 RelocInfo::EMBEDDED_OBJECT);
1641 } 1744 }
1642 // There is always control flow to slow from 1745 // There is always control flow to slow from
1643 // ContextSlotOperandCheckExtensions so we have to jump around 1746 // ContextSlotOperandCheckExtensions so we have to jump around
1644 // it. 1747 // it.
1645 done.Jump(&value); 1748 done.Jump(&value);
1646 } 1749 }
1647 } 1750 }
(...skipping 20 matching lines...) Expand all
1668 // Const slots may contain 'the hole' value (the constant hasn't been 1771 // Const slots may contain 'the hole' value (the constant hasn't been
1669 // initialized yet) which needs to be converted into the 'undefined' 1772 // initialized yet) which needs to be converted into the 'undefined'
1670 // value. 1773 // value.
1671 // 1774 //
1672 // We currently spill the virtual frame because constants use the 1775 // We currently spill the virtual frame because constants use the
1673 // potentially unsafe direct-frame access of SlotOperand. 1776 // potentially unsafe direct-frame access of SlotOperand.
1674 VirtualFrame::SpilledScope spilled_scope; 1777 VirtualFrame::SpilledScope spilled_scope;
1675 Comment cmnt(masm_, "[ Load const"); 1778 Comment cmnt(masm_, "[ Load const");
1676 JumpTarget exit; 1779 JumpTarget exit;
1677 __ movq(rcx, SlotOperand(slot, rcx)); 1780 __ movq(rcx, SlotOperand(slot, rcx));
1678 __ movq(kScratchRegister, Factory::the_hole_value(), 1781 __ CmpHeapObject(rcx, Factory::the_hole_value());
1679 RelocInfo::EMBEDDED_OBJECT);
1680 __ cmpq(rcx, kScratchRegister);
1681 exit.Branch(not_equal); 1782 exit.Branch(not_equal);
1682 __ movq(rcx, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT); 1783 __ movq(rcx, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT);
1683 exit.Bind(); 1784 exit.Bind();
1684 frame_->EmitPush(rcx); 1785 frame_->EmitPush(rcx);
1685 1786
1686 } else if (slot->type() == Slot::PARAMETER) { 1787 } else if (slot->type() == Slot::PARAMETER) {
1687 frame_->PushParameterAt(slot->index()); 1788 frame_->PushParameterAt(slot->index());
1688 1789
1689 } else if (slot->type() == Slot::LOCAL) { 1790 } else if (slot->type() == Slot::LOCAL) {
1690 frame_->PushLocalAt(slot->index()); 1791 frame_->PushLocalAt(slot->index());
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1754 // Only the first const initialization must be executed (the slot 1855 // Only the first const initialization must be executed (the slot
1755 // still contains 'the hole' value). When the assignment is executed, 1856 // still contains 'the hole' value). When the assignment is executed,
1756 // the code is identical to a normal store (see below). 1857 // the code is identical to a normal store (see below).
1757 // 1858 //
1758 // We spill the frame in the code below because the direct-frame 1859 // We spill the frame in the code below because the direct-frame
1759 // access of SlotOperand is potentially unsafe with an unspilled 1860 // access of SlotOperand is potentially unsafe with an unspilled
1760 // frame. 1861 // frame.
1761 VirtualFrame::SpilledScope spilled_scope; 1862 VirtualFrame::SpilledScope spilled_scope;
1762 Comment cmnt(masm_, "[ Init const"); 1863 Comment cmnt(masm_, "[ Init const");
1763 __ movq(rcx, SlotOperand(slot, rcx)); 1864 __ movq(rcx, SlotOperand(slot, rcx));
1764 __ movq(kScratchRegister, Factory::the_hole_value(), 1865 __ CmpHeapObject(rcx, Factory::the_hole_value());
1765 RelocInfo::EMBEDDED_OBJECT);
1766 __ cmpq(rcx, kScratchRegister);
1767 exit.Branch(not_equal); 1866 exit.Branch(not_equal);
1768 } 1867 }
1769 1868
1770 // We must execute the store. Storing a variable must keep the (new) 1869 // We must execute the store. Storing a variable must keep the (new)
1771 // value on the stack. This is necessary for compiling assignment 1870 // value on the stack. This is necessary for compiling assignment
1772 // expressions. 1871 // expressions.
1773 // 1872 //
1774 // Note: We will reach here even with slot->var()->mode() == 1873 // Note: We will reach here even with slot->var()->mode() ==
1775 // Variable::CONST because of const declarations which will initialize 1874 // Variable::CONST because of const declarations which will initialize
1776 // consts to 'the hole' value and by doing so, end up calling this code. 1875 // consts to 'the hole' value and by doing so, end up calling this code.
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
1843 #undef __ 1942 #undef __
1844 1943
1845 // End of CodeGenerator implementation. 1944 // End of CodeGenerator implementation.
1846 1945
1847 // ----------------------------------------------------------------------------- 1946 // -----------------------------------------------------------------------------
1848 // Implementation of stubs. 1947 // Implementation of stubs.
1849 1948
1850 // Stub classes have public member named masm, not masm_. 1949 // Stub classes have public member named masm, not masm_.
1851 #define __ ACCESS_MASM(masm) 1950 #define __ ACCESS_MASM(masm)
1852 1951
1952
1953 void Reference::GetValue(TypeofState typeof_state) {
1954 UNIMPLEMENTED();
1955 }
1956
1957
1853 void ToBooleanStub::Generate(MacroAssembler* masm) { 1958 void ToBooleanStub::Generate(MacroAssembler* masm) {
1854 Label false_result, true_result, not_string; 1959 Label false_result, true_result, not_string;
1855 __ movq(rax, Operand(rsp, 1 * kPointerSize)); 1960 __ movq(rax, Operand(rsp, 1 * kPointerSize));
1856 1961
1857 // 'null' => false. 1962 // 'null' => false.
1858 __ movq(kScratchRegister, Factory::null_value(), RelocInfo::EMBEDDED_OBJECT); 1963 __ CmpHeapObject(rax, Factory::null_value());
1859 __ cmpq(rax, kScratchRegister);
1860 __ j(equal, &false_result); 1964 __ j(equal, &false_result);
1861 1965
1862 // Get the map and type of the heap object. 1966 // Get the map and type of the heap object.
1863 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); 1967 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
1864 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset)); 1968 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
1865 1969
1866 // Undetectable => false. 1970 // Undetectable => false.
1867 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset)); 1971 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
1868 __ and_(rbx, Immediate(1 << Map::kIsUndetectable)); 1972 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
1869 __ j(not_zero, &false_result); 1973 __ j(not_zero, &false_result);
1870 1974
1871 // JavaScript object => true. 1975 // JavaScript object => true.
1872 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE)); 1976 __ cmpq(rcx, Immediate(FIRST_JS_OBJECT_TYPE));
1873 __ j(above_equal, &true_result); 1977 __ j(above_equal, &true_result);
1874 1978
1875 // String value => false iff empty. 1979 // String value => false iff empty.
1876 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE)); 1980 __ cmpq(rcx, Immediate(FIRST_NONSTRING_TYPE));
1877 __ j(above_equal, &not_string); 1981 __ j(above_equal, &not_string);
1878 __ and_(rcx, Immediate(kStringSizeMask)); 1982 __ and_(rcx, Immediate(kStringSizeMask));
1879 __ cmpq(rcx, Immediate(kShortStringTag)); 1983 __ cmpq(rcx, Immediate(kShortStringTag));
1880 __ j(not_equal, &true_result); // Empty string is always short. 1984 __ j(not_equal, &true_result); // Empty string is always short.
1881 __ movq(rdx, FieldOperand(rax, String::kLengthOffset)); 1985 __ movq(rdx, FieldOperand(rax, String::kLengthOffset));
1882 __ shr(rdx, Immediate(String::kShortLengthShift)); 1986 __ shr(rdx, Immediate(String::kShortLengthShift));
1883 __ j(zero, &false_result); 1987 __ j(zero, &false_result);
1884 __ jmp(&true_result); 1988 __ jmp(&true_result);
1885 1989
1886 __ bind(&not_string); 1990 __ bind(&not_string);
1887 // HeapNumber => false iff +0, -0, or NaN. 1991 // HeapNumber => false iff +0, -0, or NaN.
1888 __ movq(kScratchRegister, 1992 __ CmpHeapObject(rdx, Factory::heap_number_map());
1889 Factory::heap_number_map(),
1890 RelocInfo::EMBEDDED_OBJECT);
1891 __ cmpq(rdx, kScratchRegister);
1892 __ j(not_equal, &true_result); 1993 __ j(not_equal, &true_result);
1893 // TODO(x64): Don't use fp stack, use MMX registers? 1994 // TODO(x64): Don't use fp stack, use MMX registers?
1894 __ fldz(); // Load zero onto fp stack 1995 __ fldz(); // Load zero onto fp stack
1895 // Load heap-number double value onto fp stack 1996 // Load heap-number double value onto fp stack
1896 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); 1997 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
1897 __ fucompp(); // Compare and pop both values. 1998 __ fucompp(); // Compare and pop both values.
1898 __ movq(kScratchRegister, rax); 1999 __ movq(kScratchRegister, rax);
1899 __ fnstsw_ax(); // Store fp status word in ax, no checking for exceptions. 2000 __ fnstsw_ax(); // Store fp status word in ax, no checking for exceptions.
1900 __ testb(rax, Immediate(0x08)); // Test FP condition flag C3. 2001 __ testb(rax, Immediate(0x08)); // Test FP condition flag C3.
1901 __ movq(rax, kScratchRegister); 2002 __ movq(rax, kScratchRegister);
(...skipping 801 matching lines...) Expand 10 before | Expand all | Expand 10 after
2703 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers 2804 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
2704 2805
2705 // Restore frame pointer and return. 2806 // Restore frame pointer and return.
2706 __ pop(rbp); 2807 __ pop(rbp);
2707 __ ret(0); 2808 __ ret(0);
2708 } 2809 }
2709 2810
2710 #undef __ 2811 #undef __
2711 2812
2712 } } // namespace v8::internal 2813 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « no previous file | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698