Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(144)

Side by Side Diff: src/arm/codegen-arm.cc

Issue 2848023: ARM: Remove spilled scopes. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 10 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 5147 matching lines...) Expand 10 before | Expand all | Expand 10 after
5158 frame_->EmitPush(r0); 5158 frame_->EmitPush(r0);
5159 } 5159 }
5160 ASSERT_EQ(original_height + 1, frame_->height()); 5160 ASSERT_EQ(original_height + 1, frame_->height());
5161 } 5161 }
5162 5162
5163 5163
5164 void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { 5164 void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
5165 #ifdef DEBUG 5165 #ifdef DEBUG
5166 int original_height = frame_->height(); 5166 int original_height = frame_->height();
5167 #endif 5167 #endif
5168 VirtualFrame::SpilledScope spilled_scope(frame_);
5169 Comment cmnt(masm_, "[ UnaryOperation"); 5168 Comment cmnt(masm_, "[ UnaryOperation");
5170 5169
5171 Token::Value op = node->op(); 5170 Token::Value op = node->op();
5172 5171
5173 if (op == Token::NOT) { 5172 if (op == Token::NOT) {
5174 LoadCondition(node->expression(), false_target(), true_target(), true); 5173 LoadCondition(node->expression(), false_target(), true_target(), true);
5175 // LoadCondition may (and usually does) leave a test and branch to 5174 // LoadCondition may (and usually does) leave a test and branch to
5176 // be emitted by the caller. In that case, negate the condition. 5175 // be emitted by the caller. In that case, negate the condition.
5177 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_); 5176 if (has_cc()) cc_reg_ = NegateCondition(cc_reg_);
5178 5177
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
5230 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); 5229 node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
5231 Load(node->expression()); 5230 Load(node->expression());
5232 switch (op) { 5231 switch (op) {
5233 case Token::NOT: 5232 case Token::NOT:
5234 case Token::DELETE: 5233 case Token::DELETE:
5235 case Token::TYPEOF: 5234 case Token::TYPEOF:
5236 UNREACHABLE(); // handled above 5235 UNREACHABLE(); // handled above
5237 break; 5236 break;
5238 5237
5239 case Token::SUB: { 5238 case Token::SUB: {
5240 VirtualFrame::SpilledScope spilled(frame_); 5239 frame_->PopToR0();
5241 frame_->EmitPop(r0);
5242 GenericUnaryOpStub stub(Token::SUB, overwrite); 5240 GenericUnaryOpStub stub(Token::SUB, overwrite);
5243 frame_->CallStub(&stub, 0); 5241 frame_->CallStub(&stub, 0);
5244 frame_->EmitPush(r0); // r0 has result 5242 frame_->EmitPush(r0); // r0 has result
5245 break; 5243 break;
5246 } 5244 }
5247 5245
5248 case Token::BIT_NOT: { 5246 case Token::BIT_NOT: {
5249 // smi check 5247 Register tos = frame_->PopToRegister();
5250 VirtualFrame::SpilledScope spilled(frame_); 5248 JumpTarget not_smi_label;
5251 frame_->EmitPop(r0);
5252 JumpTarget smi_label;
5253 JumpTarget continue_label; 5249 JumpTarget continue_label;
5254 __ tst(r0, Operand(kSmiTagMask)); 5250 // Smi check.
5255 smi_label.Branch(eq); 5251 __ tst(tos, Operand(kSmiTagMask));
5252 not_smi_label.Branch(ne);
5256 5253
5254 __ mvn(tos, Operand(tos));
5255 __ bic(tos, tos, Operand(kSmiTagMask)); // Bit-clear inverted smi-tag.
5256 frame_->EmitPush(tos);
5257 // The fast case is the first to jump to the continue label, so it gets
5258 // to decide the virtual frame layout.
5259 continue_label.Jump();
5260
5261 not_smi_label.Bind();
5262 frame_->SpillAll();
5263 __ Move(r0, tos);
5257 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite); 5264 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
5258 frame_->CallStub(&stub, 0); 5265 frame_->CallStub(&stub, 0);
5259 continue_label.Jump(); 5266 frame_->EmitPush(r0);
5260 5267
5261 smi_label.Bind();
5262 __ mvn(r0, Operand(r0));
5263 __ bic(r0, r0, Operand(kSmiTagMask)); // bit-clear inverted smi-tag
5264 continue_label.Bind(); 5268 continue_label.Bind();
5265 frame_->EmitPush(r0); // r0 has result
5266 break; 5269 break;
5267 } 5270 }
5268 5271
5269 case Token::VOID: 5272 case Token::VOID:
5270 frame_->Drop(); 5273 frame_->Drop();
5271 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex); 5274 frame_->EmitPushRoot(Heap::kUndefinedValueRootIndex);
5272 break; 5275 break;
5273 5276
5274 case Token::ADD: { 5277 case Token::ADD: {
5275 VirtualFrame::SpilledScope spilled(frame_); 5278 Register tos = frame_->Peek();
5276 frame_->EmitPop(r0);
5277 // Smi check. 5279 // Smi check.
5278 JumpTarget continue_label; 5280 JumpTarget continue_label;
5279 __ tst(r0, Operand(kSmiTagMask)); 5281 __ tst(tos, Operand(kSmiTagMask));
5280 continue_label.Branch(eq); 5282 continue_label.Branch(eq);
5283
5284 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1);
5281 frame_->EmitPush(r0); 5285 frame_->EmitPush(r0);
5282 frame_->InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS, 1); 5286
5283 continue_label.Bind(); 5287 continue_label.Bind();
5284 frame_->EmitPush(r0); // r0 has result
5285 break; 5288 break;
5286 } 5289 }
5287 default: 5290 default:
5288 UNREACHABLE(); 5291 UNREACHABLE();
5289 } 5292 }
5290 } 5293 }
5291 ASSERT(!has_valid_frame() || 5294 ASSERT(!has_valid_frame() ||
5292 (has_cc() && frame_->height() == original_height) || 5295 (has_cc() && frame_->height() == original_height) ||
5293 (!has_cc() && frame_->height() == original_height + 1)); 5296 (!has_cc() && frame_->height() == original_height + 1));
5294 } 5297 }
5295 5298
5296 5299
5297 void CodeGenerator::VisitCountOperation(CountOperation* node) { 5300 void CodeGenerator::VisitCountOperation(CountOperation* node) {
5298 #ifdef DEBUG 5301 #ifdef DEBUG
5299 int original_height = frame_->height(); 5302 int original_height = frame_->height();
5300 #endif 5303 #endif
5301 Comment cmnt(masm_, "[ CountOperation"); 5304 Comment cmnt(masm_, "[ CountOperation");
5305 VirtualFrame::RegisterAllocationScope scope(this);
5302 5306
5303 bool is_postfix = node->is_postfix(); 5307 bool is_postfix = node->is_postfix();
5304 bool is_increment = node->op() == Token::INC; 5308 bool is_increment = node->op() == Token::INC;
5305 5309
5306 Variable* var = node->expression()->AsVariableProxy()->AsVariable(); 5310 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
5307 bool is_const = (var != NULL && var->mode() == Variable::CONST); 5311 bool is_const = (var != NULL && var->mode() == Variable::CONST);
5308 bool is_slot = (var != NULL && var->mode() == Variable::VAR); 5312 bool is_slot = (var != NULL && var->mode() == Variable::VAR);
5309 5313
5310 if (!is_const && is_slot && type_info(var->slot()).IsSmi()) { 5314 if (!is_const && is_slot && type_info(var->slot()).IsSmi()) {
5311 // The type info declares that this variable is always a Smi. That 5315 // The type info declares that this variable is always a Smi. That
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
5435 // operators must yield the result of one of the two expressions 5439 // operators must yield the result of one of the two expressions
5436 // before any ToBoolean() conversions. This means that the value 5440 // before any ToBoolean() conversions. This means that the value
5437 // produced by a && or || operator is not necessarily a boolean. 5441 // produced by a && or || operator is not necessarily a boolean.
5438 5442
5439 // NOTE: If the left hand side produces a materialized value (not in 5443 // NOTE: If the left hand side produces a materialized value (not in
5440 // the CC register), we force the right hand side to do the 5444 // the CC register), we force the right hand side to do the
5441 // same. This is necessary because we may have to branch to the exit 5445 // same. This is necessary because we may have to branch to the exit
5442 // after evaluating the left hand side (due to the shortcut 5446 // after evaluating the left hand side (due to the shortcut
5443 // semantics), but the compiler must (statically) know if the result 5447 // semantics), but the compiler must (statically) know if the result
5444 // of compiling the binary operation is materialized or not. 5448 // of compiling the binary operation is materialized or not.
5445 VirtualFrame::SpilledScope spilled_scope(frame_);
5446 if (node->op() == Token::AND) { 5449 if (node->op() == Token::AND) {
5447 JumpTarget is_true; 5450 JumpTarget is_true;
5448 LoadCondition(node->left(), &is_true, false_target(), false); 5451 LoadCondition(node->left(), &is_true, false_target(), false);
5449 if (has_valid_frame() && !has_cc()) { 5452 if (has_valid_frame() && !has_cc()) {
5450 // The left-hand side result is on top of the virtual frame. 5453 // The left-hand side result is on top of the virtual frame.
5451 JumpTarget pop_and_continue; 5454 JumpTarget pop_and_continue;
5452 JumpTarget exit; 5455 JumpTarget exit;
5453 5456
5454 frame_->Dup(); 5457 frame_->Dup();
5455 // Avoid popping the result if it converts to 'false' using the 5458 // Avoid popping the result if it converts to 'false' using the
(...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after
5620 // equality. 5623 // equality.
5621 if (op == Token::EQ || op == Token::EQ_STRICT) { 5624 if (op == Token::EQ || op == Token::EQ_STRICT) {
5622 bool left_is_null = 5625 bool left_is_null =
5623 left->AsLiteral() != NULL && left->AsLiteral()->IsNull(); 5626 left->AsLiteral() != NULL && left->AsLiteral()->IsNull();
5624 bool right_is_null = 5627 bool right_is_null =
5625 right->AsLiteral() != NULL && right->AsLiteral()->IsNull(); 5628 right->AsLiteral() != NULL && right->AsLiteral()->IsNull();
5626 // The 'null' value can only be equal to 'null' or 'undefined'. 5629 // The 'null' value can only be equal to 'null' or 'undefined'.
5627 if (left_is_null || right_is_null) { 5630 if (left_is_null || right_is_null) {
5628 Load(left_is_null ? right : left); 5631 Load(left_is_null ? right : left);
5629 Register tos = frame_->PopToRegister(); 5632 Register tos = frame_->PopToRegister();
5630 // JumpTargets can't cope with register allocation yet.
5631 frame_->SpillAll();
5632 __ LoadRoot(ip, Heap::kNullValueRootIndex); 5633 __ LoadRoot(ip, Heap::kNullValueRootIndex);
5633 __ cmp(tos, ip); 5634 __ cmp(tos, ip);
5634 5635
5635 // The 'null' value is only equal to 'undefined' if using non-strict 5636 // The 'null' value is only equal to 'undefined' if using non-strict
5636 // comparisons. 5637 // comparisons.
5637 if (op != Token::EQ_STRICT) { 5638 if (op != Token::EQ_STRICT) {
5638 true_target()->Branch(eq); 5639 true_target()->Branch(eq);
5639 5640
5640 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 5641 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
5641 __ cmp(tos, Operand(ip)); 5642 __ cmp(tos, Operand(ip));
(...skipping 22 matching lines...) Expand all
5664 if ((op == Token::EQ || op == Token::EQ_STRICT) && 5665 if ((op == Token::EQ || op == Token::EQ_STRICT) &&
5665 (operation != NULL && operation->op() == Token::TYPEOF) && 5666 (operation != NULL && operation->op() == Token::TYPEOF) &&
5666 (right->AsLiteral() != NULL && 5667 (right->AsLiteral() != NULL &&
5667 right->AsLiteral()->handle()->IsString())) { 5668 right->AsLiteral()->handle()->IsString())) {
5668 Handle<String> check(String::cast(*right->AsLiteral()->handle())); 5669 Handle<String> check(String::cast(*right->AsLiteral()->handle()));
5669 5670
5670 // Load the operand, move it to a register. 5671 // Load the operand, move it to a register.
5671 LoadTypeofExpression(operation->expression()); 5672 LoadTypeofExpression(operation->expression());
5672 Register tos = frame_->PopToRegister(); 5673 Register tos = frame_->PopToRegister();
5673 5674
5674 // JumpTargets can't cope with register allocation yet.
5675 frame_->SpillAll();
5676
5677 Register scratch = VirtualFrame::scratch0(); 5675 Register scratch = VirtualFrame::scratch0();
5678 5676
5679 if (check->Equals(Heap::number_symbol())) { 5677 if (check->Equals(Heap::number_symbol())) {
5680 __ tst(tos, Operand(kSmiTagMask)); 5678 __ tst(tos, Operand(kSmiTagMask));
5681 true_target()->Branch(eq); 5679 true_target()->Branch(eq);
5682 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset)); 5680 __ ldr(tos, FieldMemOperand(tos, HeapObject::kMapOffset));
5683 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 5681 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
5684 __ cmp(tos, ip); 5682 __ cmp(tos, ip);
5685 cc_reg_ = eq; 5683 cc_reg_ = eq;
5686 5684
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
5787 5785
5788 case Token::GTE: 5786 case Token::GTE:
5789 Comparison(ge, left, right); 5787 Comparison(ge, left, right);
5790 break; 5788 break;
5791 5789
5792 case Token::EQ_STRICT: 5790 case Token::EQ_STRICT:
5793 Comparison(eq, left, right, true); 5791 Comparison(eq, left, right, true);
5794 break; 5792 break;
5795 5793
5796 case Token::IN: { 5794 case Token::IN: {
5797 VirtualFrame::SpilledScope scope(frame_);
5798 Load(left); 5795 Load(left);
5799 Load(right); 5796 Load(right);
5800 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2); 5797 frame_->InvokeBuiltin(Builtins::IN, CALL_JS, 2);
5801 frame_->EmitPush(r0); 5798 frame_->EmitPush(r0);
5802 break; 5799 break;
5803 } 5800 }
5804 5801
5805 case Token::INSTANCEOF: { 5802 case Token::INSTANCEOF: {
5806 VirtualFrame::SpilledScope scope(frame_);
5807 Load(left); 5803 Load(left);
5808 Load(right); 5804 Load(right);
5809 InstanceofStub stub; 5805 InstanceofStub stub;
5810 frame_->CallStub(&stub, 2); 5806 frame_->CallStub(&stub, 2);
5811 // At this point if instanceof succeeded then r0 == 0. 5807 // At this point if instanceof succeeded then r0 == 0.
5812 __ tst(r0, Operand(r0)); 5808 __ tst(r0, Operand(r0));
5813 cc_reg_ = eq; 5809 cc_reg_ = eq;
5814 break; 5810 break;
5815 } 5811 }
5816 5812
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
5894 } 5890 }
5895 5891
5896 virtual void Generate(); 5892 virtual void Generate();
5897 5893
5898 private: 5894 private:
5899 Register key_; 5895 Register key_;
5900 Register receiver_; 5896 Register receiver_;
5901 }; 5897 };
5902 5898
5903 5899
5900 // Takes key and register in r0 and r1 or vice versa. Returns result
5901 // in r0.
5904 void DeferredReferenceGetKeyedValue::Generate() { 5902 void DeferredReferenceGetKeyedValue::Generate() {
5905 ASSERT((key_.is(r0) && receiver_.is(r1)) || 5903 ASSERT((key_.is(r0) && receiver_.is(r1)) ||
5906 (key_.is(r1) && receiver_.is(r0))); 5904 (key_.is(r1) && receiver_.is(r0)));
5907 5905
5906 VirtualFrame copied_frame(*frame_state()->frame());
5907 copied_frame.SpillAll();
5908
5908 Register scratch1 = VirtualFrame::scratch0(); 5909 Register scratch1 = VirtualFrame::scratch0();
5909 Register scratch2 = VirtualFrame::scratch1(); 5910 Register scratch2 = VirtualFrame::scratch1();
5910 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2); 5911 __ DecrementCounter(&Counters::keyed_load_inline, 1, scratch1, scratch2);
5911 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2); 5912 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1, scratch1, scratch2);
5912 5913
5913 // Ensure key in r0 and receiver in r1 to match keyed load ic calling 5914 // Ensure key in r0 and receiver in r1 to match keyed load ic calling
5914 // convention. 5915 // convention.
5915 if (key_.is(r1)) { 5916 if (key_.is(r1)) {
5916 __ Swap(r0, r1, ip); 5917 __ Swap(r0, r1, ip);
5917 } 5918 }
5918 5919
5919 // The rest of the instructions in the deferred code must be together. 5920 // The rest of the instructions in the deferred code must be together.
5920 { Assembler::BlockConstPoolScope block_const_pool(masm_); 5921 { Assembler::BlockConstPoolScope block_const_pool(masm_);
5921 // Call keyed load IC. It has the arguments key and receiver in r0 and r1. 5922 // Call keyed load IC. It has the arguments key and receiver in r0 and r1.
5922 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 5923 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
5923 __ Call(ic, RelocInfo::CODE_TARGET); 5924 __ Call(ic, RelocInfo::CODE_TARGET);
5924 // The call must be followed by a nop instruction to indicate that the 5925 // The call must be followed by a nop instruction to indicate that the
5925 // keyed load has been inlined. 5926 // keyed load has been inlined.
5926 __ nop(PROPERTY_ACCESS_INLINED); 5927 __ nop(PROPERTY_ACCESS_INLINED);
5927 5928
5929 // Now go back to the frame that we entered with. This will not overwrite
5930 // the receiver or key registers since they were not in use when we came
5931 // in. The instructions emitted by this merge are skipped over by the
5932 // inline load patching mechanism when looking for the branch instruction
5933 // that tells it where the code to patch is.
5934 copied_frame.MergeTo(frame_state()->frame());
5935
5928 // Block the constant pool for one more instruction after leaving this 5936 // Block the constant pool for one more instruction after leaving this
5929 // constant pool block scope to include the branch instruction ending the 5937 // constant pool block scope to include the branch instruction ending the
5930 // deferred code. 5938 // deferred code.
5931 __ BlockConstPoolFor(1); 5939 __ BlockConstPoolFor(1);
5932 } 5940 }
5933 } 5941 }
5934 5942
5935 5943
5936 class DeferredReferenceSetKeyedValue: public DeferredCode { 5944 class DeferredReferenceSetKeyedValue: public DeferredCode {
5937 public: 5945 public:
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
6071 6079
6072 // Counter will be decremented in the deferred code. Placed here to avoid 6080 // Counter will be decremented in the deferred code. Placed here to avoid
6073 // having it in the instruction stream below where patching will occur. 6081 // having it in the instruction stream below where patching will occur.
6074 __ IncrementCounter(&Counters::keyed_load_inline, 1, 6082 __ IncrementCounter(&Counters::keyed_load_inline, 1,
6075 frame_->scratch0(), frame_->scratch1()); 6083 frame_->scratch0(), frame_->scratch1());
6076 6084
6077 // Load the key and receiver from the stack. 6085 // Load the key and receiver from the stack.
6078 bool key_is_known_smi = frame_->KnownSmiAt(0); 6086 bool key_is_known_smi = frame_->KnownSmiAt(0);
6079 Register key = frame_->PopToRegister(); 6087 Register key = frame_->PopToRegister();
6080 Register receiver = frame_->PopToRegister(key); 6088 Register receiver = frame_->PopToRegister(key);
6081 VirtualFrame::SpilledScope spilled(frame_);
6082 6089
6083 // The deferred code expects key and receiver in registers. 6090 // The deferred code expects key and receiver in registers.
6084 DeferredReferenceGetKeyedValue* deferred = 6091 DeferredReferenceGetKeyedValue* deferred =
6085 new DeferredReferenceGetKeyedValue(key, receiver); 6092 new DeferredReferenceGetKeyedValue(key, receiver);
6086 6093
6087 // Check that the receiver is a heap object. 6094 // Check that the receiver is a heap object.
6088 __ tst(receiver, Operand(kSmiTagMask)); 6095 __ tst(receiver, Operand(kSmiTagMask));
6089 deferred->Branch(eq); 6096 deferred->Branch(eq);
6090 6097
6091 // The following instructions are the part of the inlined load keyed 6098 // The following instructions are the part of the inlined load keyed
(...skipping 4604 matching lines...) Expand 10 before | Expand all | Expand 10 after
10696 __ bind(&string_add_runtime); 10703 __ bind(&string_add_runtime);
10697 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); 10704 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
10698 } 10705 }
10699 10706
10700 10707
10701 #undef __ 10708 #undef __
10702 10709
10703 } } // namespace v8::internal 10710 } } // namespace v8::internal
10704 10711
10705 #endif // V8_TARGET_ARCH_ARM 10712 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698