OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4699 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4710 // assign the exception value to the catch variable. | 4710 // assign the exception value to the catch variable. |
4711 Comment cmnt(masm_, "[ CatchExtensionObject"); | 4711 Comment cmnt(masm_, "[ CatchExtensionObject"); |
4712 Load(node->key()); | 4712 Load(node->key()); |
4713 Load(node->value()); | 4713 Load(node->value()); |
4714 Result result = | 4714 Result result = |
4715 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2); | 4715 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2); |
4716 frame_->Push(&result); | 4716 frame_->Push(&result); |
4717 } | 4717 } |
4718 | 4718 |
4719 | 4719 |
| 4720 void CodeGenerator::EmitSlotAssignment(Assignment* node) { |
| 4721 #ifdef DEBUG |
| 4722 int original_height = frame_->height(); |
| 4723 #endif |
| 4724 Comment cmnt(masm_, "[ Variable Assignment"); |
| 4725 Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
| 4726 ASSERT(var != NULL); |
| 4727 Slot* slot = var->slot(); |
| 4728 ASSERT(slot != NULL); |
| 4729 |
| 4730 // Evaluate the right-hand side. |
| 4731 if (node->is_compound()) { |
| 4732 Result result = LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); |
| 4733 frame()->Push(&result); |
| 4734 Load(node->value()); |
| 4735 |
| 4736 bool overwrite_value = |
| 4737 (node->value()->AsBinaryOperation() != NULL && |
| 4738 node->value()->AsBinaryOperation()->ResultOverwriteAllowed()); |
| 4739 GenericBinaryOperation(node->binary_op(), |
| 4740 node->type(), |
| 4741 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 4742 } else { |
| 4743 Load(node->value()); |
| 4744 } |
| 4745 |
| 4746 // Perform the assignment. |
| 4747 if (var->mode() != Variable::CONST || node->op() == Token::INIT_CONST) { |
| 4748 CodeForSourcePosition(node->position()); |
| 4749 StoreToSlot(slot, |
| 4750 node->op() == Token::INIT_CONST ? CONST_INIT : NOT_CONST_INIT); |
| 4751 } |
| 4752 ASSERT(frame_->height() == original_height + 1); |
| 4753 } |
| 4754 |
| 4755 |
| 4756 void CodeGenerator::EmitNamedPropertyAssignment(Assignment* node) { |
| 4757 #ifdef DEBUG |
| 4758 int original_height = frame_->height(); |
| 4759 #endif |
| 4760 Comment cmnt(masm_, "[ Named Property Assignment"); |
| 4761 Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
| 4762 Property* prop = node->target()->AsProperty(); |
| 4763 ASSERT(var == NULL || prop == NULL); |
| 4764 |
| 4765 // Initialize name and evaluate the receiver subexpression. |
| 4766 Handle<String> name; |
| 4767 if (var != NULL) { |
| 4768 name = var->name(); |
| 4769 LoadGlobal(); |
| 4770 } else { |
| 4771 Literal* lit = prop->key()->AsLiteral(); |
| 4772 ASSERT(lit != NULL); |
| 4773 name = Handle<String>::cast(lit->handle()); |
| 4774 Load(prop->obj()); |
| 4775 } |
| 4776 |
| 4777 if (node->starts_initialization_block()) { |
| 4778 // Change to slow case in the beginning of an initialization block to |
| 4779 // avoid the quadratic behavior of repeatedly adding fast properties. |
| 4780 frame()->Dup(); |
| 4781 Result ignored = frame()->CallRuntime(Runtime::kToSlowProperties, 1); |
| 4782 } |
| 4783 |
| 4784 if (node->ends_initialization_block()) { |
| 4785 // Add an extra copy of the receiver to the frame, so that it can be |
| 4786 // converted back to fast case after the assignment. |
| 4787 frame()->Dup(); |
| 4788 } |
| 4789 |
| 4790 // Evaluate the right-hand side. |
| 4791 if (node->is_compound()) { |
| 4792 frame()->Dup(); |
| 4793 Result value = EmitNamedLoad(name, var != NULL); |
| 4794 frame()->Push(&value); |
| 4795 Load(node->value()); |
| 4796 |
| 4797 bool overwrite_value = |
| 4798 (node->value()->AsBinaryOperation() != NULL && |
| 4799 node->value()->AsBinaryOperation()->ResultOverwriteAllowed()); |
| 4800 GenericBinaryOperation(node->binary_op(), |
| 4801 node->type(), |
| 4802 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 4803 } else { |
| 4804 Load(node->value()); |
| 4805 } |
| 4806 |
| 4807 // Perform the assignment. It is safe to ignore constants here. |
| 4808 ASSERT(var == NULL || var->mode() != Variable::CONST); |
| 4809 ASSERT(node->op() != Token::INIT_CONST); |
| 4810 CodeForSourcePosition(node->position()); |
| 4811 Result answer = EmitNamedStore(name); |
| 4812 frame()->Push(&answer); |
| 4813 |
| 4814 if (node->ends_initialization_block()) { |
| 4815 // The argument to the runtime call is the extra copy of the receiver, |
| 4816 // which is below the value of the assignment. Swap the receiver and |
| 4817 // the value of the assignment expression. |
| 4818 Result result = frame()->Pop(); |
| 4819 Result receiver = frame()->Pop(); |
| 4820 frame()->Push(&result); |
| 4821 frame()->Push(&receiver); |
| 4822 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1); |
| 4823 } |
| 4824 |
| 4825 ASSERT(frame()->height() == original_height + 1); |
| 4826 } |
| 4827 |
| 4828 |
| 4829 void CodeGenerator::EmitKeyedPropertyAssignment(Assignment* node) { |
| 4830 #ifdef DEBUG |
| 4831 int original_height = frame_->height(); |
| 4832 #endif |
| 4833 Comment cmnt(masm_, "[ Named Property Assignment"); |
| 4834 Property* prop = node->target()->AsProperty(); |
| 4835 ASSERT(prop != NULL); |
| 4836 |
| 4837 // Evaluate the receiver subexpression. |
| 4838 Load(prop->obj()); |
| 4839 |
| 4840 if (node->starts_initialization_block()) { |
| 4841 // Change to slow case in the beginning of an initialization block to |
| 4842 // avoid the quadratic behavior of repeatedly adding fast properties. |
| 4843 frame_->Dup(); |
| 4844 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1); |
| 4845 } |
| 4846 |
| 4847 if (node->ends_initialization_block()) { |
| 4848 // Add an extra copy of the receiver to the frame, so that it can be |
| 4849 // converted back to fast case after the assignment. |
| 4850 frame_->Dup(); |
| 4851 } |
| 4852 |
| 4853 // Evaluate the key subexpression. |
| 4854 Load(prop->key()); |
| 4855 |
| 4856 // Evaluate the right-hand side. |
| 4857 if (node->is_compound()) { |
| 4858 // Duplicate receiver and key. |
| 4859 frame()->PushElementAt(1); |
| 4860 frame()->PushElementAt(1); |
| 4861 Result value = EmitKeyedLoad(); |
| 4862 frame()->Push(&value); |
| 4863 Load(node->value()); |
| 4864 |
| 4865 bool overwrite_value = |
| 4866 (node->value()->AsBinaryOperation() != NULL && |
| 4867 node->value()->AsBinaryOperation()->ResultOverwriteAllowed()); |
| 4868 GenericBinaryOperation(node->binary_op(), |
| 4869 node->type(), |
| 4870 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); |
| 4871 } else { |
| 4872 Load(node->value()); |
| 4873 } |
| 4874 |
| 4875 // Perform the assignment. It is safe to ignore constants here. |
| 4876 ASSERT(node->op() != Token::INIT_CONST); |
| 4877 CodeForSourcePosition(node->position()); |
| 4878 Result answer = EmitKeyedStore(prop->key()->type()); |
| 4879 frame()->Push(&answer); |
| 4880 |
| 4881 if (node->ends_initialization_block()) { |
| 4882 // The argument to the runtime call is the extra copy of the receiver, |
| 4883 // which is below the value of the assignment. Swap the receiver and |
| 4884 // the value of the assignment expression. |
| 4885 Result result = frame()->Pop(); |
| 4886 Result receiver = frame()->Pop(); |
| 4887 frame()->Push(&result); |
| 4888 frame()->Push(&receiver); |
| 4889 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1); |
| 4890 } |
| 4891 |
| 4892 ASSERT(frame()->height() == original_height + 1); |
| 4893 } |
| 4894 |
| 4895 |
4720 void CodeGenerator::VisitAssignment(Assignment* node) { | 4896 void CodeGenerator::VisitAssignment(Assignment* node) { |
4721 #ifdef DEBUG | 4897 #ifdef DEBUG |
4722 int original_height = frame_->height(); | 4898 int original_height = frame_->height(); |
4723 #endif | 4899 #endif |
4724 Comment cmnt(masm_, "[ Assignment"); | 4900 Variable* var = node->target()->AsVariableProxy()->AsVariable(); |
4725 | 4901 Property* prop = node->target()->AsProperty(); |
4726 { Reference target(this, node->target(), node->is_compound()); | 4902 |
4727 if (target.is_illegal()) { | 4903 if (var != NULL && !var->is_global()) { |
4728 // Fool the virtual frame into thinking that we left the assignment's | 4904 EmitSlotAssignment(node); |
4729 // value on the frame. | 4905 |
4730 frame_->Push(Smi::FromInt(0)); | 4906 } else if ((prop != NULL && prop->key()->IsPropertyName()) || |
4731 return; | 4907 (var != NULL && var->is_global())) { |
4732 } | 4908 // Properties whose keys are property names and global variables are |
4733 Variable* var = node->target()->AsVariableProxy()->AsVariable(); | 4909 // treated as named property references. We do not need to consider |
4734 | 4910 // global 'this' because it is not a valid left-hand side. |
4735 if (node->starts_initialization_block()) { | 4911 EmitNamedPropertyAssignment(node); |
4736 ASSERT(target.type() == Reference::NAMED || | 4912 |
4737 target.type() == Reference::KEYED); | 4913 } else if (prop != NULL) { |
4738 // Change to slow case in the beginning of an initialization | 4914 // Other properties (including rewritten parameters for a function that |
4739 // block to avoid the quadratic behavior of repeatedly adding | 4915 // uses arguments) are keyed property assignments. |
4740 // fast properties. | 4916 EmitKeyedPropertyAssignment(node); |
4741 | 4917 |
4742 // The receiver is the argument to the runtime call. It is the | 4918 } else { |
4743 // first value pushed when the reference was loaded to the | 4919 // Invalid left-hand side. |
4744 // frame. | 4920 Load(node->target()); |
4745 frame_->PushElementAt(target.size() - 1); | 4921 Result result = frame()->CallRuntime(Runtime::kThrowReferenceError, 1); |
4746 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1); | 4922 // The runtime call doesn't actually return but the code generator will |
4747 } | 4923 // still generate code and expects a certain frame height. |
4748 if (node->ends_initialization_block()) { | 4924 frame()->Push(&result); |
4749 // Add an extra copy of the receiver to the frame, so that it can be | 4925 } |
4750 // converted back to fast case after the assignment. | 4926 |
4751 ASSERT(target.type() == Reference::NAMED || | |
4752 target.type() == Reference::KEYED); | |
4753 if (target.type() == Reference::NAMED) { | |
4754 frame_->Dup(); | |
4755 // Dup target receiver on stack. | |
4756 } else { | |
4757 ASSERT(target.type() == Reference::KEYED); | |
4758 Result temp = frame_->Pop(); | |
4759 frame_->Dup(); | |
4760 frame_->Push(&temp); | |
4761 } | |
4762 } | |
4763 if (node->op() == Token::ASSIGN || | |
4764 node->op() == Token::INIT_VAR || | |
4765 node->op() == Token::INIT_CONST) { | |
4766 Load(node->value()); | |
4767 | |
4768 } else { // Assignment is a compound assignment. | |
4769 Literal* literal = node->value()->AsLiteral(); | |
4770 bool overwrite_value = | |
4771 (node->value()->AsBinaryOperation() != NULL && | |
4772 node->value()->AsBinaryOperation()->ResultOverwriteAllowed()); | |
4773 Variable* right_var = node->value()->AsVariableProxy()->AsVariable(); | |
4774 // There are two cases where the target is not read in the right hand | |
4775 // side, that are easy to test for: the right hand side is a literal, | |
4776 // or the right hand side is a different variable. TakeValue invalidates | |
4777 // the target, with an implicit promise that it will be written to again | |
4778 // before it is read. | |
4779 if (literal != NULL || (right_var != NULL && right_var != var)) { | |
4780 target.TakeValue(); | |
4781 } else { | |
4782 target.GetValue(); | |
4783 } | |
4784 Load(node->value()); | |
4785 GenericBinaryOperation(node->binary_op(), | |
4786 node->type(), | |
4787 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); | |
4788 } | |
4789 | |
4790 if (var != NULL && | |
4791 var->mode() == Variable::CONST && | |
4792 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) { | |
4793 // Assignment ignored - leave the value on the stack. | |
4794 UnloadReference(&target); | |
4795 } else { | |
4796 CodeForSourcePosition(node->position()); | |
4797 if (node->op() == Token::INIT_CONST) { | |
4798 // Dynamic constant initializations must use the function context | |
4799 // and initialize the actual constant declared. Dynamic variable | |
4800 // initializations are simply assignments and use SetValue. | |
4801 target.SetValue(CONST_INIT); | |
4802 } else { | |
4803 target.SetValue(NOT_CONST_INIT); | |
4804 } | |
4805 if (node->ends_initialization_block()) { | |
4806 ASSERT(target.type() == Reference::UNLOADED); | |
4807 // End of initialization block. Revert to fast case. The | |
4808 // argument to the runtime call is the extra copy of the receiver, | |
4809 // which is below the value of the assignment. | |
4810 // Swap the receiver and the value of the assignment expression. | |
4811 Result lhs = frame_->Pop(); | |
4812 Result receiver = frame_->Pop(); | |
4813 frame_->Push(&lhs); | |
4814 frame_->Push(&receiver); | |
4815 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1); | |
4816 } | |
4817 } | |
4818 } | |
4819 ASSERT(frame_->height() == original_height + 1); | 4927 ASSERT(frame_->height() == original_height + 1); |
4820 } | 4928 } |
4821 | 4929 |
4822 | 4930 |
4823 void CodeGenerator::VisitThrow(Throw* node) { | 4931 void CodeGenerator::VisitThrow(Throw* node) { |
4824 Comment cmnt(masm_, "[ Throw"); | 4932 Comment cmnt(masm_, "[ Throw"); |
4825 Load(node->exception()); | 4933 Load(node->exception()); |
4826 Result result = frame_->CallRuntime(Runtime::kThrow, 1); | 4934 Result result = frame_->CallRuntime(Runtime::kThrow, 1); |
4827 frame_->Push(&result); | 4935 frame_->Push(&result); |
4828 } | 4936 } |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5014 // Load the function to call from the property through a reference. | 5122 // Load the function to call from the property through a reference. |
5015 | 5123 |
5016 // Pass receiver to called function. | 5124 // Pass receiver to called function. |
5017 if (property->is_synthetic()) { | 5125 if (property->is_synthetic()) { |
5018 Reference ref(this, property); | 5126 Reference ref(this, property); |
5019 ref.GetValue(); | 5127 ref.GetValue(); |
5020 // Use global object as receiver. | 5128 // Use global object as receiver. |
5021 LoadGlobalReceiver(); | 5129 LoadGlobalReceiver(); |
5022 } else { | 5130 } else { |
5023 Load(property->obj()); | 5131 Load(property->obj()); |
5024 frame_->Dup(); | 5132 frame()->Dup(); |
5025 Load(property->key()); | 5133 Load(property->key()); |
5026 Result function = EmitKeyedLoad(false); | 5134 Result function = EmitKeyedLoad(); |
5027 Result receiver = frame_->Pop(); | 5135 Result receiver = frame_->Pop(); |
5028 frame_->Push(&function); | 5136 frame_->Push(&function); |
5029 frame_->Push(&receiver); | 5137 frame_->Push(&receiver); |
5030 } | 5138 } |
5031 | 5139 |
5032 // Call the function. | 5140 // Call the function. |
5033 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position()); | 5141 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position()); |
5034 } | 5142 } |
5035 | 5143 |
5036 } else { | 5144 } else { |
(...skipping 1410 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6447 __ IncrementCounter(&Counters::named_load_inline_miss, 1); | 6555 __ IncrementCounter(&Counters::named_load_inline_miss, 1); |
6448 | 6556 |
6449 if (!dst_.is(eax)) __ mov(dst_, eax); | 6557 if (!dst_.is(eax)) __ mov(dst_, eax); |
6450 } | 6558 } |
6451 | 6559 |
6452 | 6560 |
6453 class DeferredReferenceGetKeyedValue: public DeferredCode { | 6561 class DeferredReferenceGetKeyedValue: public DeferredCode { |
6454 public: | 6562 public: |
6455 explicit DeferredReferenceGetKeyedValue(Register dst, | 6563 explicit DeferredReferenceGetKeyedValue(Register dst, |
6456 Register receiver, | 6564 Register receiver, |
6457 Register key, | 6565 Register key) |
6458 bool is_global) | 6566 : dst_(dst), receiver_(receiver), key_(key) { |
6459 : dst_(dst), receiver_(receiver), key_(key), is_global_(is_global) { | |
6460 set_comment("[ DeferredReferenceGetKeyedValue"); | 6567 set_comment("[ DeferredReferenceGetKeyedValue"); |
6461 } | 6568 } |
6462 | 6569 |
6463 virtual void Generate(); | 6570 virtual void Generate(); |
6464 | 6571 |
6465 Label* patch_site() { return &patch_site_; } | 6572 Label* patch_site() { return &patch_site_; } |
6466 | 6573 |
6467 private: | 6574 private: |
6468 Label patch_site_; | 6575 Label patch_site_; |
6469 Register dst_; | 6576 Register dst_; |
6470 Register receiver_; | 6577 Register receiver_; |
6471 Register key_; | 6578 Register key_; |
6472 bool is_global_; | |
6473 }; | 6579 }; |
6474 | 6580 |
6475 | 6581 |
6476 void DeferredReferenceGetKeyedValue::Generate() { | 6582 void DeferredReferenceGetKeyedValue::Generate() { |
6477 if (!receiver_.is(eax)) { | 6583 if (!receiver_.is(eax)) { |
6478 // Register eax is available for key. | 6584 // Register eax is available for key. |
6479 if (!key_.is(eax)) { | 6585 if (!key_.is(eax)) { |
6480 __ mov(eax, key_); | 6586 __ mov(eax, key_); |
6481 } | 6587 } |
6482 if (!receiver_.is(edx)) { | 6588 if (!receiver_.is(edx)) { |
(...skipping 10 matching lines...) Expand all Loading... |
6493 } else { | 6599 } else { |
6494 __ xchg(edx, eax); | 6600 __ xchg(edx, eax); |
6495 } | 6601 } |
6496 // Calculate the delta from the IC call instruction to the map check | 6602 // Calculate the delta from the IC call instruction to the map check |
6497 // cmp instruction in the inlined version. This delta is stored in | 6603 // cmp instruction in the inlined version. This delta is stored in |
6498 // a test(eax, delta) instruction after the call so that we can find | 6604 // a test(eax, delta) instruction after the call so that we can find |
6499 // it in the IC initialization code and patch the cmp instruction. | 6605 // it in the IC initialization code and patch the cmp instruction. |
6500 // This means that we cannot allow test instructions after calls to | 6606 // This means that we cannot allow test instructions after calls to |
6501 // KeyedLoadIC stubs in other places. | 6607 // KeyedLoadIC stubs in other places. |
6502 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 6608 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
6503 RelocInfo::Mode mode = is_global_ | 6609 __ call(ic, RelocInfo::CODE_TARGET); |
6504 ? RelocInfo::CODE_TARGET_CONTEXT | |
6505 : RelocInfo::CODE_TARGET; | |
6506 __ call(ic, mode); | |
6507 // The delta from the start of the map-compare instruction to the | 6610 // The delta from the start of the map-compare instruction to the |
6508 // test instruction. We use masm_-> directly here instead of the __ | 6611 // test instruction. We use masm_-> directly here instead of the __ |
6509 // macro because the macro sometimes uses macro expansion to turn | 6612 // macro because the macro sometimes uses macro expansion to turn |
6510 // into something that can't return a value. This is encountered | 6613 // into something that can't return a value. This is encountered |
6511 // when doing generated code coverage tests. | 6614 // when doing generated code coverage tests. |
6512 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); | 6615 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); |
6513 // Here we use masm_-> instead of the __ macro because this is the | 6616 // Here we use masm_-> instead of the __ macro because this is the |
6514 // instruction that gets patched and coverage code gets in the way. | 6617 // instruction that gets patched and coverage code gets in the way. |
6515 masm_->test(eax, Immediate(-delta_to_patch_site)); | 6618 masm_->test(eax, Immediate(-delta_to_patch_site)); |
6516 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1); | 6619 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6560 // instruction that gets patched and coverage code gets in the way. | 6663 // instruction that gets patched and coverage code gets in the way. |
6561 masm_->test(eax, Immediate(-delta_to_patch_site)); | 6664 masm_->test(eax, Immediate(-delta_to_patch_site)); |
6562 // Restore value (returned from store IC), key and receiver | 6665 // Restore value (returned from store IC), key and receiver |
6563 // registers. | 6666 // registers. |
6564 if (!value_.is(eax)) __ mov(value_, eax); | 6667 if (!value_.is(eax)) __ mov(value_, eax); |
6565 __ pop(key_); | 6668 __ pop(key_); |
6566 __ pop(receiver_); | 6669 __ pop(receiver_); |
6567 } | 6670 } |
6568 | 6671 |
6569 | 6672 |
6570 Result CodeGenerator::EmitKeyedLoad(bool is_global) { | 6673 Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) { |
6571 Comment cmnt(masm_, "[ Load from keyed Property"); | 6674 #ifdef DEBUG |
6572 // Inline array load code if inside of a loop. We do not know | 6675 int original_height = frame()->height(); |
6573 // the receiver map yet, so we initially generate the code with | 6676 #endif |
6574 // a check against an invalid map. In the inline cache code, we | 6677 Result result; |
6575 // patch the map check if appropriate. | 6678 // Do not inline the inobject property case for loads from the global |
| 6679 // object. Also do not inline for unoptimized code. This saves time in |
| 6680 // the code generator. Unoptimized code is toplevel code or code that is |
| 6681 // not in a loop. |
| 6682 if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) { |
| 6683 Comment cmnt(masm(), "[ Load from named Property"); |
| 6684 frame()->Push(name); |
| 6685 |
| 6686 RelocInfo::Mode mode = is_contextual |
| 6687 ? RelocInfo::CODE_TARGET_CONTEXT |
| 6688 : RelocInfo::CODE_TARGET; |
| 6689 result = frame()->CallLoadIC(mode); |
| 6690 // A test eax instruction following the call signals that the inobject |
| 6691 // property case was inlined. Ensure that there is not a test eax |
| 6692 // instruction here. |
| 6693 __ nop(); |
| 6694 } else { |
| 6695 // Inline the inobject property case. |
| 6696 Comment cmnt(masm(), "[ Inlined named property load"); |
| 6697 Result receiver = frame()->Pop(); |
| 6698 receiver.ToRegister(); |
| 6699 |
| 6700 result = allocator()->Allocate(); |
| 6701 ASSERT(result.is_valid()); |
| 6702 DeferredReferenceGetNamedValue* deferred = |
| 6703 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name); |
| 6704 |
| 6705 // Check that the receiver is a heap object. |
| 6706 __ test(receiver.reg(), Immediate(kSmiTagMask)); |
| 6707 deferred->Branch(zero); |
| 6708 |
| 6709 __ bind(deferred->patch_site()); |
| 6710 // This is the map check instruction that will be patched (so we can't |
| 6711 // use the double underscore macro that may insert instructions). |
| 6712 // Initially use an invalid map to force a failure. |
| 6713 masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), |
| 6714 Immediate(Factory::null_value())); |
| 6715 // This branch is always a forwards branch so it's always a fixed size |
| 6716 // which allows the assert below to succeed and patching to work. |
| 6717 deferred->Branch(not_equal); |
| 6718 |
| 6719 // The delta from the patch label to the load offset must be statically |
| 6720 // known. |
| 6721 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) == |
| 6722 LoadIC::kOffsetToLoadInstruction); |
| 6723 // The initial (invalid) offset has to be large enough to force a 32-bit |
| 6724 // instruction encoding to allow patching with an arbitrary offset. Use |
| 6725 // kMaxInt (minus kHeapObjectTag). |
| 6726 int offset = kMaxInt; |
| 6727 masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset)); |
| 6728 |
| 6729 __ IncrementCounter(&Counters::named_load_inline, 1); |
| 6730 deferred->BindExit(); |
| 6731 } |
| 6732 ASSERT(frame()->height() == original_height - 1); |
| 6733 return result; |
| 6734 } |
| 6735 |
| 6736 |
| 6737 Result CodeGenerator::EmitNamedStore(Handle<String> name) { |
| 6738 #ifdef DEBUG |
| 6739 int original_height = frame()->height(); |
| 6740 #endif |
| 6741 frame()->Push(name); |
| 6742 Result result = frame()->CallStoreIC(); |
| 6743 |
| 6744 ASSERT(frame()->height() == original_height - 2); |
| 6745 return result; |
| 6746 } |
| 6747 |
| 6748 |
| 6749 Result CodeGenerator::EmitKeyedLoad() { |
| 6750 #ifdef DEBUG |
| 6751 int original_height = frame()->height(); |
| 6752 #endif |
| 6753 Result result; |
| 6754 // Inline array load code if inside of a loop. We do not know the |
| 6755 // receiver map yet, so we initially generate the code with a check |
| 6756 // against an invalid map. In the inline cache code, we patch the map |
| 6757 // check if appropriate. |
6576 if (loop_nesting() > 0) { | 6758 if (loop_nesting() > 0) { |
6577 Comment cmnt(masm_, "[ Inlined load from keyed Property"); | 6759 Comment cmnt(masm_, "[ Inlined load from keyed Property"); |
6578 | 6760 |
6579 Result key = frame_->Pop(); | 6761 Result key = frame_->Pop(); |
6580 Result receiver = frame_->Pop(); | 6762 Result receiver = frame_->Pop(); |
6581 key.ToRegister(); | 6763 key.ToRegister(); |
6582 receiver.ToRegister(); | 6764 receiver.ToRegister(); |
6583 | 6765 |
6584 // Use a fresh temporary to load the elements without destroying | 6766 // Use a fresh temporary to load the elements without destroying |
6585 // the receiver which is needed for the deferred slow case. | 6767 // the receiver which is needed for the deferred slow case. |
6586 Result elements = allocator()->Allocate(); | 6768 Result elements = allocator()->Allocate(); |
6587 ASSERT(elements.is_valid()); | 6769 ASSERT(elements.is_valid()); |
6588 | 6770 |
6589 // Use a fresh temporary for the index and later the loaded | 6771 // Use a fresh temporary for the index and later the loaded |
6590 // value. | 6772 // value. |
6591 Result index = allocator()->Allocate(); | 6773 result = allocator()->Allocate(); |
6592 ASSERT(index.is_valid()); | 6774 ASSERT(result.is_valid()); |
6593 | 6775 |
6594 DeferredReferenceGetKeyedValue* deferred = | 6776 DeferredReferenceGetKeyedValue* deferred = |
6595 new DeferredReferenceGetKeyedValue(index.reg(), | 6777 new DeferredReferenceGetKeyedValue(result.reg(), |
6596 receiver.reg(), | 6778 receiver.reg(), |
6597 key.reg(), | 6779 key.reg()); |
6598 is_global); | |
6599 | 6780 |
6600 // Check that the receiver is not a smi (only needed if this | 6781 __ test(receiver.reg(), Immediate(kSmiTagMask)); |
6601 // is not a load from the global context) and that it has the | 6782 deferred->Branch(zero); |
6602 // expected map. | |
6603 if (!is_global) { | |
6604 __ test(receiver.reg(), Immediate(kSmiTagMask)); | |
6605 deferred->Branch(zero); | |
6606 } | |
6607 | 6783 |
6608 // Initially, use an invalid map. The map is patched in the IC | 6784 // Initially, use an invalid map. The map is patched in the IC |
6609 // initialization code. | 6785 // initialization code. |
6610 __ bind(deferred->patch_site()); | 6786 __ bind(deferred->patch_site()); |
6611 // Use masm-> here instead of the double underscore macro since extra | 6787 // Use masm-> here instead of the double underscore macro since extra |
6612 // coverage code can interfere with the patching. | 6788 // coverage code can interfere with the patching. |
6613 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), | 6789 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), |
6614 Immediate(Factory::null_value())); | 6790 Immediate(Factory::null_value())); |
6615 deferred->Branch(not_equal); | 6791 deferred->Branch(not_equal); |
6616 | 6792 |
6617 // Check that the key is a smi. | 6793 // Check that the key is a smi. |
6618 __ test(key.reg(), Immediate(kSmiTagMask)); | 6794 __ test(key.reg(), Immediate(kSmiTagMask)); |
6619 deferred->Branch(not_zero); | 6795 deferred->Branch(not_zero); |
6620 | 6796 |
6621 // Get the elements array from the receiver and check that it | 6797 // Get the elements array from the receiver and check that it |
6622 // is not a dictionary. | 6798 // is not a dictionary. |
6623 __ mov(elements.reg(), | 6799 __ mov(elements.reg(), |
6624 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); | 6800 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
6625 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), | 6801 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset), |
6626 Immediate(Factory::fixed_array_map())); | 6802 Immediate(Factory::fixed_array_map())); |
6627 deferred->Branch(not_equal); | 6803 deferred->Branch(not_equal); |
6628 | 6804 |
6629 // Shift the key to get the actual index value and check that | 6805 // Shift the key to get the actual index value and check that |
6630 // it is within bounds. | 6806 // it is within bounds. |
6631 __ mov(index.reg(), key.reg()); | 6807 __ mov(result.reg(), key.reg()); |
6632 __ SmiUntag(index.reg()); | 6808 __ SmiUntag(result.reg()); |
6633 __ cmp(index.reg(), | 6809 __ cmp(result.reg(), |
6634 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); | 6810 FieldOperand(elements.reg(), FixedArray::kLengthOffset)); |
6635 deferred->Branch(above_equal); | 6811 deferred->Branch(above_equal); |
6636 | 6812 |
6637 // Load and check that the result is not the hole. We could | 6813 // Load and check that the result is not the hole. |
6638 // reuse the index or elements register for the value. | 6814 __ mov(result.reg(), Operand(elements.reg(), |
6639 // | 6815 result.reg(), |
6640 // TODO(206): Consider whether it makes sense to try some | 6816 times_4, |
6641 // heuristic about which register to reuse. For example, if | 6817 FixedArray::kHeaderSize - kHeapObjectTag)); |
6642 // one is eax, the we can reuse that one because the value | |
6643 // coming from the deferred code will be in eax. | |
6644 Result value = index; | |
6645 __ mov(value.reg(), Operand(elements.reg(), | |
6646 index.reg(), | |
6647 times_4, | |
6648 FixedArray::kHeaderSize - kHeapObjectTag)); | |
6649 elements.Unuse(); | 6818 elements.Unuse(); |
6650 index.Unuse(); | 6819 __ cmp(Operand(result.reg()), Immediate(Factory::the_hole_value())); |
6651 __ cmp(Operand(value.reg()), Immediate(Factory::the_hole_value())); | |
6652 deferred->Branch(equal); | 6820 deferred->Branch(equal); |
6653 __ IncrementCounter(&Counters::keyed_load_inline, 1); | 6821 __ IncrementCounter(&Counters::keyed_load_inline, 1); |
6654 | 6822 |
6655 deferred->BindExit(); | 6823 deferred->BindExit(); |
6656 return value; | |
6657 } else { | 6824 } else { |
6658 Comment cmnt(masm_, "[ Load from keyed Property"); | 6825 Comment cmnt(masm_, "[ Load from keyed Property"); |
6659 RelocInfo::Mode mode = is_global | 6826 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); |
6660 ? RelocInfo::CODE_TARGET_CONTEXT | |
6661 : RelocInfo::CODE_TARGET; | |
6662 Result answer = frame_->CallKeyedLoadIC(mode); | |
6663 // Make sure that we do not have a test instruction after the | 6827 // Make sure that we do not have a test instruction after the |
6664 // call. A test instruction after the call is used to | 6828 // call. A test instruction after the call is used to |
6665 // indicate that we have generated an inline version of the | 6829 // indicate that we have generated an inline version of the |
6666 // keyed load. The explicit nop instruction is here because | 6830 // keyed load. The explicit nop instruction is here because |
6667 // the push that follows might be peep-hole optimized away. | 6831 // the push that follows might be peep-hole optimized away. |
6668 __ nop(); | 6832 __ nop(); |
6669 return answer; | |
6670 } | 6833 } |
| 6834 ASSERT(frame()->height() == original_height - 2); |
| 6835 return result; |
6671 } | 6836 } |
6672 | 6837 |
6673 | 6838 |
| 6839 Result CodeGenerator::EmitKeyedStore(StaticType* key_type) { |
| 6840 #ifdef DEBUG |
| 6841 int original_height = frame()->height(); |
| 6842 #endif |
| 6843 Result result; |
| 6844 // Generate inlined version of the keyed store if the code is in a loop |
| 6845 // and the key is likely to be a smi. |
| 6846 if (loop_nesting() > 0 && key_type->IsLikelySmi()) { |
| 6847 Comment cmnt(masm(), "[ Inlined store to keyed Property"); |
| 6848 |
| 6849 // Get the receiver, key and value into registers. |
| 6850 result = frame()->Pop(); |
| 6851 Result key = frame()->Pop(); |
| 6852 Result receiver = frame()->Pop(); |
| 6853 |
| 6854 Result tmp = allocator_->Allocate(); |
| 6855 ASSERT(tmp.is_valid()); |
| 6856 |
| 6857 // Determine whether the value is a constant before putting it in a |
| 6858 // register. |
| 6859 bool value_is_constant = result.is_constant(); |
| 6860 |
| 6861 // Make sure that value, key and receiver are in registers. |
| 6862 result.ToRegister(); |
| 6863 key.ToRegister(); |
| 6864 receiver.ToRegister(); |
| 6865 |
| 6866 DeferredReferenceSetKeyedValue* deferred = |
| 6867 new DeferredReferenceSetKeyedValue(result.reg(), |
| 6868 key.reg(), |
| 6869 receiver.reg()); |
| 6870 |
| 6871 // Check that the value is a smi if it is not a constant. We can skip |
| 6872 // the write barrier for smis and constants. |
| 6873 if (!value_is_constant) { |
| 6874 __ test(result.reg(), Immediate(kSmiTagMask)); |
| 6875 deferred->Branch(not_zero); |
| 6876 } |
| 6877 |
| 6878 // Check that the key is a non-negative smi. |
| 6879 __ test(key.reg(), Immediate(kSmiTagMask | 0x80000000)); |
| 6880 deferred->Branch(not_zero); |
| 6881 |
| 6882 // Check that the receiver is not a smi. |
| 6883 __ test(receiver.reg(), Immediate(kSmiTagMask)); |
| 6884 deferred->Branch(zero); |
| 6885 |
| 6886 // Check that the receiver is a JSArray. |
| 6887 __ mov(tmp.reg(), |
| 6888 FieldOperand(receiver.reg(), HeapObject::kMapOffset)); |
| 6889 __ movzx_b(tmp.reg(), |
| 6890 FieldOperand(tmp.reg(), Map::kInstanceTypeOffset)); |
| 6891 __ cmp(tmp.reg(), JS_ARRAY_TYPE); |
| 6892 deferred->Branch(not_equal); |
| 6893 |
| 6894 // Check that the key is within bounds. Both the key and the length of |
| 6895 // the JSArray are smis. |
| 6896 __ cmp(key.reg(), |
| 6897 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); |
| 6898 deferred->Branch(greater_equal); |
| 6899 |
| 6900 // Get the elements array from the receiver and check that it is not a |
| 6901 // dictionary. |
| 6902 __ mov(tmp.reg(), |
| 6903 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); |
| 6904 // Bind the deferred code patch site to be able to locate the fixed |
| 6905 // array map comparison. When debugging, we patch this comparison to |
| 6906 // always fail so that we will hit the IC call in the deferred code |
| 6907 // which will allow the debugger to break for fast case stores. |
| 6908 __ bind(deferred->patch_site()); |
| 6909 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset), |
| 6910 Immediate(Factory::fixed_array_map())); |
| 6911 deferred->Branch(not_equal); |
| 6912 |
| 6913 // Store the value. |
| 6914 __ mov(Operand(tmp.reg(), |
| 6915 key.reg(), |
| 6916 times_2, |
| 6917 FixedArray::kHeaderSize - kHeapObjectTag), |
| 6918 result.reg()); |
| 6919 __ IncrementCounter(&Counters::keyed_store_inline, 1); |
| 6920 |
| 6921 deferred->BindExit(); |
| 6922 } else { |
| 6923 result = frame()->CallKeyedStoreIC(); |
| 6924 // Make sure that we do not have a test instruction after the |
| 6925 // call. A test instruction after the call is used to |
| 6926 // indicate that we have generated an inline version of the |
| 6927 // keyed store. |
| 6928 __ nop(); |
| 6929 frame()->Drop(2); |
| 6930 } |
| 6931 ASSERT(frame()->height() == original_height - 3); |
| 6932 return result; |
| 6933 } |
| 6934 |
| 6935 |
6674 #undef __ | 6936 #undef __ |
6675 #define __ ACCESS_MASM(masm) | 6937 #define __ ACCESS_MASM(masm) |
6676 | 6938 |
6677 | 6939 |
6678 Handle<String> Reference::GetName() { | 6940 Handle<String> Reference::GetName() { |
6679 ASSERT(type_ == NAMED); | 6941 ASSERT(type_ == NAMED); |
6680 Property* property = expression_->AsProperty(); | 6942 Property* property = expression_->AsProperty(); |
6681 if (property == NULL) { | 6943 if (property == NULL) { |
6682 // Global variable reference treated as a named property reference. | 6944 // Global variable reference treated as a named property reference. |
6683 VariableProxy* proxy = expression_->AsVariableProxy(); | 6945 VariableProxy* proxy = expression_->AsVariableProxy(); |
6684 ASSERT(proxy->AsVariable() != NULL); | 6946 ASSERT(proxy->AsVariable() != NULL); |
6685 ASSERT(proxy->AsVariable()->is_global()); | 6947 ASSERT(proxy->AsVariable()->is_global()); |
6686 return proxy->name(); | 6948 return proxy->name(); |
6687 } else { | 6949 } else { |
6688 Literal* raw_name = property->key()->AsLiteral(); | 6950 Literal* raw_name = property->key()->AsLiteral(); |
6689 ASSERT(raw_name != NULL); | 6951 ASSERT(raw_name != NULL); |
6690 return Handle<String>(String::cast(*raw_name->handle())); | 6952 return Handle<String>::cast(raw_name->handle()); |
6691 } | 6953 } |
6692 } | 6954 } |
6693 | 6955 |
6694 | 6956 |
6695 void Reference::GetValue() { | 6957 void Reference::GetValue() { |
6696 ASSERT(!cgen_->in_spilled_code()); | 6958 ASSERT(!cgen_->in_spilled_code()); |
6697 ASSERT(cgen_->HasValidEntryRegisters()); | 6959 ASSERT(cgen_->HasValidEntryRegisters()); |
6698 ASSERT(!is_illegal()); | 6960 ASSERT(!is_illegal()); |
6699 MacroAssembler* masm = cgen_->masm(); | 6961 MacroAssembler* masm = cgen_->masm(); |
6700 | 6962 |
(...skipping 12 matching lines...) Expand all Loading... |
6713 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); | 6975 cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); |
6714 if (!persist_after_get_) set_unloaded(); | 6976 if (!persist_after_get_) set_unloaded(); |
6715 cgen_->frame()->Push(&result); | 6977 cgen_->frame()->Push(&result); |
6716 break; | 6978 break; |
6717 } | 6979 } |
6718 | 6980 |
6719 case NAMED: { | 6981 case NAMED: { |
6720 Variable* var = expression_->AsVariableProxy()->AsVariable(); | 6982 Variable* var = expression_->AsVariableProxy()->AsVariable(); |
6721 bool is_global = var != NULL; | 6983 bool is_global = var != NULL; |
6722 ASSERT(!is_global || var->is_global()); | 6984 ASSERT(!is_global || var->is_global()); |
6723 | 6985 if (persist_after_get_) cgen_->frame()->Dup(); |
6724 if (persist_after_get_) { | 6986 Result result = cgen_->EmitNamedLoad(GetName(), is_global); |
6725 cgen_->frame()->Dup(); | 6987 if (!persist_after_get_) set_unloaded(); |
6726 } | 6988 cgen_->frame()->Push(&result); |
6727 // Do not inline the inobject property case for loads from the global | |
6728 // object. Also do not inline for unoptimized code. This saves time | |
6729 // in the code generator. Unoptimized code is toplevel code or code | |
6730 // that is not in a loop. | |
6731 if (is_global || | |
6732 cgen_->scope()->is_global_scope() || | |
6733 cgen_->loop_nesting() == 0) { | |
6734 Comment cmnt(masm, "[ Load from named Property"); | |
6735 cgen_->frame()->Push(GetName()); | |
6736 | |
6737 RelocInfo::Mode mode = is_global | |
6738 ? RelocInfo::CODE_TARGET_CONTEXT | |
6739 : RelocInfo::CODE_TARGET; | |
6740 Result answer = cgen_->frame()->CallLoadIC(mode); | |
6741 // A test eax instruction following the call signals that the | |
6742 // inobject property case was inlined. Ensure that there is not | |
6743 // a test eax instruction here. | |
6744 __ nop(); | |
6745 cgen_->frame()->Push(&answer); | |
6746 } else { | |
6747 // Inline the inobject property case. | |
6748 Comment cmnt(masm, "[ Inlined named property load"); | |
6749 Result receiver = cgen_->frame()->Pop(); | |
6750 receiver.ToRegister(); | |
6751 | |
6752 Result value = cgen_->allocator()->Allocate(); | |
6753 ASSERT(value.is_valid()); | |
6754 DeferredReferenceGetNamedValue* deferred = | |
6755 new DeferredReferenceGetNamedValue(value.reg(), | |
6756 receiver.reg(), | |
6757 GetName()); | |
6758 | |
6759 // Check that the receiver is a heap object. | |
6760 __ test(receiver.reg(), Immediate(kSmiTagMask)); | |
6761 deferred->Branch(zero); | |
6762 | |
6763 __ bind(deferred->patch_site()); | |
6764 // This is the map check instruction that will be patched (so we can't | |
6765 // use the double underscore macro that may insert instructions). | |
6766 // Initially use an invalid map to force a failure. | |
6767 masm->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset), | |
6768 Immediate(Factory::null_value())); | |
6769 // This branch is always a forwards branch so it's always a fixed | |
6770 // size which allows the assert below to succeed and patching to work. | |
6771 deferred->Branch(not_equal); | |
6772 | |
6773 // The delta from the patch label to the load offset must be | |
6774 // statically known. | |
6775 ASSERT(masm->SizeOfCodeGeneratedSince(deferred->patch_site()) == | |
6776 LoadIC::kOffsetToLoadInstruction); | |
6777 // The initial (invalid) offset has to be large enough to force | |
6778 // a 32-bit instruction encoding to allow patching with an | |
6779 // arbitrary offset. Use kMaxInt (minus kHeapObjectTag). | |
6780 int offset = kMaxInt; | |
6781 masm->mov(value.reg(), FieldOperand(receiver.reg(), offset)); | |
6782 | |
6783 __ IncrementCounter(&Counters::named_load_inline, 1); | |
6784 deferred->BindExit(); | |
6785 cgen_->frame()->Push(&value); | |
6786 } | |
6787 if (!persist_after_get_) { | |
6788 set_unloaded(); | |
6789 } | |
6790 break; | 6989 break; |
6791 } | 6990 } |
6792 | 6991 |
6793 case KEYED: { | 6992 case KEYED: { |
6794 if (persist_after_get_) { | 6993 if (persist_after_get_) { |
6795 cgen_->frame()->PushElementAt(1); | 6994 cgen_->frame()->PushElementAt(1); |
6796 cgen_->frame()->PushElementAt(1); | 6995 cgen_->frame()->PushElementAt(1); |
6797 } | 6996 } |
6798 Variable* var = expression_->AsVariableProxy()->AsVariable(); | 6997 Result value = cgen_->EmitKeyedLoad(); |
6799 bool is_global = var != NULL; | |
6800 ASSERT(!is_global || var->is_global()); | |
6801 Result value = cgen_->EmitKeyedLoad(is_global); | |
6802 cgen_->frame()->Push(&value); | 6998 cgen_->frame()->Push(&value); |
6803 if (!persist_after_get_) { | 6999 if (!persist_after_get_) set_unloaded(); |
6804 set_unloaded(); | |
6805 } | |
6806 break; | 7000 break; |
6807 } | 7001 } |
6808 | 7002 |
6809 default: | 7003 default: |
6810 UNREACHABLE(); | 7004 UNREACHABLE(); |
6811 } | 7005 } |
6812 } | 7006 } |
6813 | 7007 |
6814 | 7008 |
6815 void Reference::TakeValue() { | 7009 void Reference::TakeValue() { |
6816 // For non-constant frame-allocated slots, we invalidate the value in the | 7010 // For non-constant frame-allocated slots, we invalidate the value in the |
6817 // slot. For all others, we fall back on GetValue. | 7011 // slot. For all others, we fall back on GetValue. |
6818 ASSERT(!cgen_->in_spilled_code()); | 7012 ASSERT(!cgen_->in_spilled_code()); |
6819 ASSERT(!is_illegal()); | 7013 ASSERT(!is_illegal()); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6851 void Reference::SetValue(InitState init_state) { | 7045 void Reference::SetValue(InitState init_state) { |
6852 ASSERT(cgen_->HasValidEntryRegisters()); | 7046 ASSERT(cgen_->HasValidEntryRegisters()); |
6853 ASSERT(!is_illegal()); | 7047 ASSERT(!is_illegal()); |
6854 MacroAssembler* masm = cgen_->masm(); | 7048 MacroAssembler* masm = cgen_->masm(); |
6855 switch (type_) { | 7049 switch (type_) { |
6856 case SLOT: { | 7050 case SLOT: { |
6857 Comment cmnt(masm, "[ Store to Slot"); | 7051 Comment cmnt(masm, "[ Store to Slot"); |
6858 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); | 7052 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); |
6859 ASSERT(slot != NULL); | 7053 ASSERT(slot != NULL); |
6860 cgen_->StoreToSlot(slot, init_state); | 7054 cgen_->StoreToSlot(slot, init_state); |
6861 cgen_->UnloadReference(this); | 7055 set_unloaded(); |
6862 break; | 7056 break; |
6863 } | 7057 } |
6864 | 7058 |
6865 case NAMED: { | 7059 case NAMED: { |
6866 Comment cmnt(masm, "[ Store to named Property"); | 7060 Comment cmnt(masm, "[ Store to named Property"); |
6867 cgen_->frame()->Push(GetName()); | 7061 Result answer = cgen_->EmitNamedStore(GetName()); |
6868 Result answer = cgen_->frame()->CallStoreIC(); | |
6869 cgen_->frame()->Push(&answer); | 7062 cgen_->frame()->Push(&answer); |
6870 set_unloaded(); | 7063 set_unloaded(); |
6871 break; | 7064 break; |
6872 } | 7065 } |
6873 | 7066 |
6874 case KEYED: { | 7067 case KEYED: { |
6875 Comment cmnt(masm, "[ Store to keyed Property"); | 7068 Comment cmnt(masm, "[ Store to keyed Property"); |
6876 | |
6877 // Generate inlined version of the keyed store if the code is in | |
6878 // a loop and the key is likely to be a smi. | |
6879 Property* property = expression()->AsProperty(); | 7069 Property* property = expression()->AsProperty(); |
6880 ASSERT(property != NULL); | 7070 ASSERT(property != NULL); |
6881 StaticType* key_smi_analysis = property->key()->type(); | 7071 Result answer = cgen_->EmitKeyedStore(property->key()->type()); |
6882 | 7072 cgen_->frame()->Push(&answer); |
6883 if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) { | 7073 set_unloaded(); |
6884 Comment cmnt(masm, "[ Inlined store to keyed Property"); | |
6885 | |
6886 // Get the receiver, key and value into registers. | |
6887 Result value = cgen_->frame()->Pop(); | |
6888 Result key = cgen_->frame()->Pop(); | |
6889 Result receiver = cgen_->frame()->Pop(); | |
6890 | |
6891 Result tmp = cgen_->allocator_->Allocate(); | |
6892 ASSERT(tmp.is_valid()); | |
6893 | |
6894 // Determine whether the value is a constant before putting it | |
6895 // in a register. | |
6896 bool value_is_constant = value.is_constant(); | |
6897 | |
6898 // Make sure that value, key and receiver are in registers. | |
6899 value.ToRegister(); | |
6900 key.ToRegister(); | |
6901 receiver.ToRegister(); | |
6902 | |
6903 DeferredReferenceSetKeyedValue* deferred = | |
6904 new DeferredReferenceSetKeyedValue(value.reg(), | |
6905 key.reg(), | |
6906 receiver.reg()); | |
6907 | |
6908 // Check that the value is a smi if it is not a constant. We | |
6909 // can skip the write barrier for smis and constants. | |
6910 if (!value_is_constant) { | |
6911 __ test(value.reg(), Immediate(kSmiTagMask)); | |
6912 deferred->Branch(not_zero); | |
6913 } | |
6914 | |
6915 // Check that the key is a non-negative smi. | |
6916 __ test(key.reg(), Immediate(kSmiTagMask | 0x80000000)); | |
6917 deferred->Branch(not_zero); | |
6918 | |
6919 // Check that the receiver is not a smi. | |
6920 __ test(receiver.reg(), Immediate(kSmiTagMask)); | |
6921 deferred->Branch(zero); | |
6922 | |
6923 // Check that the receiver is a JSArray. | |
6924 __ mov(tmp.reg(), | |
6925 FieldOperand(receiver.reg(), HeapObject::kMapOffset)); | |
6926 __ movzx_b(tmp.reg(), | |
6927 FieldOperand(tmp.reg(), Map::kInstanceTypeOffset)); | |
6928 __ cmp(tmp.reg(), JS_ARRAY_TYPE); | |
6929 deferred->Branch(not_equal); | |
6930 | |
6931 // Check that the key is within bounds. Both the key and the | |
6932 // length of the JSArray are smis. | |
6933 __ cmp(key.reg(), | |
6934 FieldOperand(receiver.reg(), JSArray::kLengthOffset)); | |
6935 deferred->Branch(greater_equal); | |
6936 | |
6937 // Get the elements array from the receiver and check that it | |
6938 // is not a dictionary. | |
6939 __ mov(tmp.reg(), | |
6940 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); | |
6941 // Bind the deferred code patch site to be able to locate the | |
6942 // fixed array map comparison. When debugging, we patch this | |
6943 // comparison to always fail so that we will hit the IC call | |
6944 // in the deferred code which will allow the debugger to | |
6945 // break for fast case stores. | |
6946 __ bind(deferred->patch_site()); | |
6947 __ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset), | |
6948 Immediate(Factory::fixed_array_map())); | |
6949 deferred->Branch(not_equal); | |
6950 | |
6951 // Store the value. | |
6952 __ mov(Operand(tmp.reg(), | |
6953 key.reg(), | |
6954 times_2, | |
6955 FixedArray::kHeaderSize - kHeapObjectTag), | |
6956 value.reg()); | |
6957 __ IncrementCounter(&Counters::keyed_store_inline, 1); | |
6958 | |
6959 deferred->BindExit(); | |
6960 | |
6961 cgen_->frame()->Push(&receiver); | |
6962 cgen_->frame()->Push(&key); | |
6963 cgen_->frame()->Push(&value); | |
6964 } else { | |
6965 Result answer = cgen_->frame()->CallKeyedStoreIC(); | |
6966 // Make sure that we do not have a test instruction after the | |
6967 // call. A test instruction after the call is used to | |
6968 // indicate that we have generated an inline version of the | |
6969 // keyed store. | |
6970 __ nop(); | |
6971 cgen_->frame()->Push(&answer); | |
6972 } | |
6973 cgen_->UnloadReference(this); | |
6974 break; | 7074 break; |
6975 } | 7075 } |
6976 | 7076 |
6977 default: | 7077 case UNLOADED: |
| 7078 case ILLEGAL: |
6978 UNREACHABLE(); | 7079 UNREACHABLE(); |
6979 } | 7080 } |
6980 } | 7081 } |
6981 | 7082 |
6982 | 7083 |
6983 void FastNewClosureStub::Generate(MacroAssembler* masm) { | 7084 void FastNewClosureStub::Generate(MacroAssembler* masm) { |
6984 // Clone the boilerplate in new space. Set the context to the | 7085 // Clone the boilerplate in new space. Set the context to the |
6985 // current context in esi. | 7086 // current context in esi. |
6986 Label gc; | 7087 Label gc; |
6987 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); | 7088 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); |
(...skipping 3647 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
10635 | 10736 |
10636 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 10737 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
10637 // tagged as a small integer. | 10738 // tagged as a small integer. |
10638 __ bind(&runtime); | 10739 __ bind(&runtime); |
10639 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1); | 10740 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1); |
10640 } | 10741 } |
10641 | 10742 |
10642 #undef __ | 10743 #undef __ |
10643 | 10744 |
10644 } } // namespace v8::internal | 10745 } } // namespace v8::internal |
OLD | NEW |