OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3908 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3919 __ bind(&done); | 3919 __ bind(&done); |
3920 | 3920 |
3921 context()->Plug(v0); | 3921 context()->Plug(v0); |
3922 } else { | 3922 } else { |
3923 // This expression cannot throw a reference error at the top level. | 3923 // This expression cannot throw a reference error at the top level. |
3924 VisitInCurrentContext(expr); | 3924 VisitInCurrentContext(expr); |
3925 } | 3925 } |
3926 } | 3926 } |
3927 | 3927 |
3928 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, | 3928 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, |
3929 Handle<String> check, | 3929 Handle<String> check) { |
3930 Label* if_true, | 3930 Label materialize_true, materialize_false; |
3931 Label* if_false, | 3931 Label* if_true = NULL; |
3932 Label* fall_through) { | 3932 Label* if_false = NULL; |
| 3933 Label* fall_through = NULL; |
| 3934 context()->PrepareTest(&materialize_true, &materialize_false, |
| 3935 &if_true, &if_false, &fall_through); |
| 3936 |
3933 { AccumulatorValueContext context(this); | 3937 { AccumulatorValueContext context(this); |
3934 VisitForTypeofValue(expr); | 3938 VisitForTypeofValue(expr); |
3935 } | 3939 } |
3936 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | 3940 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
3937 | 3941 |
3938 if (check->Equals(isolate()->heap()->number_symbol())) { | 3942 if (check->Equals(isolate()->heap()->number_symbol())) { |
3939 __ JumpIfSmi(v0, if_true); | 3943 __ JumpIfSmi(v0, if_true); |
3940 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); | 3944 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); |
3941 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3945 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
3942 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | 3946 Split(eq, v0, Operand(at), if_true, if_false, fall_through); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3984 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 3988 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
3985 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset)); | 3989 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset)); |
3986 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 3990 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
3987 // Check for undetectable objects => false. | 3991 // Check for undetectable objects => false. |
3988 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); | 3992 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); |
3989 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | 3993 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
3990 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); | 3994 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); |
3991 } else { | 3995 } else { |
3992 if (if_false != fall_through) __ jmp(if_false); | 3996 if (if_false != fall_through) __ jmp(if_false); |
3993 } | 3997 } |
3994 } | 3998 context()->Plug(if_true, if_false); |
3995 | |
3996 | |
3997 void FullCodeGenerator::EmitLiteralCompareUndefined(Expression* expr, | |
3998 Label* if_true, | |
3999 Label* if_false, | |
4000 Label* fall_through) { | |
4001 VisitForAccumulatorValue(expr); | |
4002 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | |
4003 | |
4004 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
4005 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | |
4006 } | 3999 } |
4007 | 4000 |
4008 | 4001 |
4009 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | 4002 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
4010 Comment cmnt(masm_, "[ CompareOperation"); | 4003 Comment cmnt(masm_, "[ CompareOperation"); |
4011 SetSourcePosition(expr->position()); | 4004 SetSourcePosition(expr->position()); |
4012 | 4005 |
| 4006 // First we try a fast inlined version of the compare when one of |
| 4007 // the operands is a literal. |
| 4008 if (TryLiteralCompare(expr)) return; |
| 4009 |
4013 // Always perform the comparison for its control flow. Pack the result | 4010 // Always perform the comparison for its control flow. Pack the result |
4014 // into the expression's context after the comparison is performed. | 4011 // into the expression's context after the comparison is performed. |
4015 | |
4016 Label materialize_true, materialize_false; | 4012 Label materialize_true, materialize_false; |
4017 Label* if_true = NULL; | 4013 Label* if_true = NULL; |
4018 Label* if_false = NULL; | 4014 Label* if_false = NULL; |
4019 Label* fall_through = NULL; | 4015 Label* fall_through = NULL; |
4020 context()->PrepareTest(&materialize_true, &materialize_false, | 4016 context()->PrepareTest(&materialize_true, &materialize_false, |
4021 &if_true, &if_false, &fall_through); | 4017 &if_true, &if_false, &fall_through); |
4022 | 4018 |
4023 // First we try a fast inlined version of the compare when one of | |
4024 // the operands is a literal. | |
4025 if (TryLiteralCompare(expr, if_true, if_false, fall_through)) { | |
4026 context()->Plug(if_true, if_false); | |
4027 return; | |
4028 } | |
4029 | |
4030 Token::Value op = expr->op(); | 4019 Token::Value op = expr->op(); |
4031 VisitForStackValue(expr->left()); | 4020 VisitForStackValue(expr->left()); |
4032 switch (op) { | 4021 switch (op) { |
4033 case Token::IN: | 4022 case Token::IN: |
4034 VisitForStackValue(expr->right()); | 4023 VisitForStackValue(expr->right()); |
4035 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); | 4024 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); |
4036 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); | 4025 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
4037 __ LoadRoot(t0, Heap::kTrueValueRootIndex); | 4026 __ LoadRoot(t0, Heap::kTrueValueRootIndex); |
4038 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); | 4027 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); |
4039 break; | 4028 break; |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4107 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); | 4096 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); |
4108 } | 4097 } |
4109 } | 4098 } |
4110 | 4099 |
4111 // Convert the result of the comparison into one expected for this | 4100 // Convert the result of the comparison into one expected for this |
4112 // expression's context. | 4101 // expression's context. |
4113 context()->Plug(if_true, if_false); | 4102 context()->Plug(if_true, if_false); |
4114 } | 4103 } |
4115 | 4104 |
4116 | 4105 |
4117 void FullCodeGenerator::EmitLiteralCompareNull(Expression* expr, | 4106 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, |
4118 bool is_strict, | 4107 Expression* sub_expr, |
4119 Label* if_true, | 4108 NilValue nil) { |
4120 Label* if_false, | 4109 Label materialize_true, materialize_false; |
4121 Label* fall_through) { | 4110 Label* if_true = NULL; |
4122 VisitForAccumulatorValue(expr); | 4111 Label* if_false = NULL; |
| 4112 Label* fall_through = NULL; |
| 4113 context()->PrepareTest(&materialize_true, &materialize_false, |
| 4114 &if_true, &if_false, &fall_through); |
| 4115 |
| 4116 VisitForAccumulatorValue(sub_expr); |
4123 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | 4117 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 4118 Heap::RootListIndex nil_value = nil == kNullValue ? |
| 4119 Heap::kNullValueRootIndex : |
| 4120 Heap::kUndefinedValueRootIndex; |
4124 __ mov(a0, result_register()); | 4121 __ mov(a0, result_register()); |
4125 __ LoadRoot(a1, Heap::kNullValueRootIndex); | 4122 __ LoadRoot(a1, nil_value); |
4126 if (is_strict) { | 4123 if (expr->op() == Token::EQ_STRICT) { |
4127 Split(eq, a0, Operand(a1), if_true, if_false, fall_through); | 4124 Split(eq, a0, Operand(a1), if_true, if_false, fall_through); |
4128 } else { | 4125 } else { |
| 4126 Heap::RootListIndex other_nil_value = nil == kNullValue ? |
| 4127 Heap::kUndefinedValueRootIndex : |
| 4128 Heap::kNullValueRootIndex; |
4129 __ Branch(if_true, eq, a0, Operand(a1)); | 4129 __ Branch(if_true, eq, a0, Operand(a1)); |
4130 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | 4130 __ LoadRoot(a1, other_nil_value); |
4131 __ Branch(if_true, eq, a0, Operand(a1)); | 4131 __ Branch(if_true, eq, a0, Operand(a1)); |
4132 __ And(at, a0, Operand(kSmiTagMask)); | 4132 __ And(at, a0, Operand(kSmiTagMask)); |
4133 __ Branch(if_false, eq, at, Operand(zero_reg)); | 4133 __ Branch(if_false, eq, at, Operand(zero_reg)); |
4134 // It can be an undetectable object. | 4134 // It can be an undetectable object. |
4135 __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset)); | 4135 __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset)); |
4136 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); | 4136 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); |
4137 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | 4137 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
4138 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); | 4138 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); |
4139 } | 4139 } |
| 4140 context()->Plug(if_true, if_false); |
4140 } | 4141 } |
4141 | 4142 |
4142 | 4143 |
4143 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | 4144 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
4144 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 4145 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
4145 context()->Plug(v0); | 4146 context()->Plug(v0); |
4146 } | 4147 } |
4147 | 4148 |
4148 | 4149 |
4149 Register FullCodeGenerator::result_register() { | 4150 Register FullCodeGenerator::result_register() { |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4243 *context_length = 0; | 4244 *context_length = 0; |
4244 return previous_; | 4245 return previous_; |
4245 } | 4246 } |
4246 | 4247 |
4247 | 4248 |
4248 #undef __ | 4249 #undef __ |
4249 | 4250 |
4250 } } // namespace v8::internal | 4251 } } // namespace v8::internal |
4251 | 4252 |
4252 #endif // V8_TARGET_ARCH_MIPS | 4253 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |