| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 55 | 55 |
| 56 | 56 |
| 57 static unsigned GetPropertyId(Property* property) { | 57 static unsigned GetPropertyId(Property* property) { |
| 58 return property->id(); | 58 return property->id(); |
| 59 } | 59 } |
| 60 | 60 |
| 61 | 61 |
| 62 // A patch site is a location in the code which it is possible to patch. This | 62 // A patch site is a location in the code which it is possible to patch. This |
| 63 // class has a number of methods to emit the code which is patchable and the | 63 // class has a number of methods to emit the code which is patchable and the |
| 64 // method EmitPatchInfo to record a marker back to the patchable code. This | 64 // method EmitPatchInfo to record a marker back to the patchable code. This |
| 65 // marker is a andi at, rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 | 65 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy |
| 66 // bit immediate value is used) is the delta from the pc to the first | 66 // (raw 16 bit immediate value is used) is the delta from the pc to the first |
| 67 // instruction of the patchable code. | 67 // instruction of the patchable code. |
| 68 // The marker instruction is effectively a NOP (dest is zero_reg) and will |
| 69 // never be emitted by normal code. |
| 68 class JumpPatchSite BASE_EMBEDDED { | 70 class JumpPatchSite BASE_EMBEDDED { |
| 69 public: | 71 public: |
| 70 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { | 72 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
| 71 #ifdef DEBUG | 73 #ifdef DEBUG |
| 72 info_emitted_ = false; | 74 info_emitted_ = false; |
| 73 #endif | 75 #endif |
| 74 } | 76 } |
| 75 | 77 |
| 76 ~JumpPatchSite() { | 78 ~JumpPatchSite() { |
| 77 ASSERT(patch_site_.is_bound() == info_emitted_); | 79 ASSERT(patch_site_.is_bound() == info_emitted_); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 96 __ bind(&patch_site_); | 98 __ bind(&patch_site_); |
| 97 __ andi(at, reg, 0); | 99 __ andi(at, reg, 0); |
| 98 // Never taken before patched. | 100 // Never taken before patched. |
| 99 __ Branch(target, ne, at, Operand(zero_reg)); | 101 __ Branch(target, ne, at, Operand(zero_reg)); |
| 100 } | 102 } |
| 101 | 103 |
| 102 void EmitPatchInfo() { | 104 void EmitPatchInfo() { |
| 103 if (patch_site_.is_bound()) { | 105 if (patch_site_.is_bound()) { |
| 104 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | 106 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
| 105 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); | 107 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); |
| 106 __ andi(at, reg, delta_to_patch_site % kImm16Mask); | 108 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask); |
| 107 #ifdef DEBUG | 109 #ifdef DEBUG |
| 108 info_emitted_ = true; | 110 info_emitted_ = true; |
| 109 #endif | 111 #endif |
| 110 } else { | 112 } else { |
| 111 __ nop(); // Signals no inlined code. | 113 __ nop(); // Signals no inlined code. |
| 112 } | 114 } |
| 113 } | 115 } |
| 114 | 116 |
| 115 private: | 117 private: |
| 116 MacroAssembler* masm_; | 118 MacroAssembler* masm_; |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 155 // function calls. | 157 // function calls. |
| 156 if (info->is_strict_mode() || info->is_native()) { | 158 if (info->is_strict_mode() || info->is_native()) { |
| 157 Label ok; | 159 Label ok; |
| 158 __ Branch(&ok, eq, t1, Operand(zero_reg)); | 160 __ Branch(&ok, eq, t1, Operand(zero_reg)); |
| 159 int receiver_offset = info->scope()->num_parameters() * kPointerSize; | 161 int receiver_offset = info->scope()->num_parameters() * kPointerSize; |
| 160 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); | 162 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
| 161 __ sw(a2, MemOperand(sp, receiver_offset)); | 163 __ sw(a2, MemOperand(sp, receiver_offset)); |
| 162 __ bind(&ok); | 164 __ bind(&ok); |
| 163 } | 165 } |
| 164 | 166 |
| 167 // Open a frame scope to indicate that there is a frame on the stack. The |
| 168 // MANUAL indicates that the scope shouldn't actually generate code to set up |
| 169 // the frame (that is done below). |
| 170 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
| 171 |
| 165 int locals_count = info->scope()->num_stack_slots(); | 172 int locals_count = info->scope()->num_stack_slots(); |
| 166 | 173 |
| 167 __ Push(ra, fp, cp, a1); | 174 __ Push(ra, fp, cp, a1); |
| 168 if (locals_count > 0) { | 175 if (locals_count > 0) { |
| 169 // Load undefined value here, so the value is ready for the loop | 176 // Load undefined value here, so the value is ready for the loop |
| 170 // below. | 177 // below. |
| 171 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 178 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
| 172 } | 179 } |
| 173 // Adjust fp to point to caller's fp. | 180 // Adjust fp to point to caller's fp. |
| 174 __ Addu(fp, sp, Operand(2 * kPointerSize)); | 181 __ Addu(fp, sp, Operand(2 * kPointerSize)); |
| (...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 303 } | 310 } |
| 304 | 311 |
| 305 | 312 |
| 306 void FullCodeGenerator::ClearAccumulator() { | 313 void FullCodeGenerator::ClearAccumulator() { |
| 307 ASSERT(Smi::FromInt(0) == 0); | 314 ASSERT(Smi::FromInt(0) == 0); |
| 308 __ mov(v0, zero_reg); | 315 __ mov(v0, zero_reg); |
| 309 } | 316 } |
| 310 | 317 |
| 311 | 318 |
| 312 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { | 319 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { |
| 320 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need |
| 321 // to make sure it is constant. Branch may emit a skip-or-jump sequence |
| 322 // instead of the normal Branch. It seems that the "skip" part of that |
| 323 // sequence is about as long as this Branch would be so it is safe to ignore |
| 324 // that. |
| 325 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 313 Comment cmnt(masm_, "[ Stack check"); | 326 Comment cmnt(masm_, "[ Stack check"); |
| 314 Label ok; | 327 Label ok; |
| 315 __ LoadRoot(t0, Heap::kStackLimitRootIndex); | 328 __ LoadRoot(t0, Heap::kStackLimitRootIndex); |
| 316 __ Branch(&ok, hs, sp, Operand(t0)); | 329 __ sltu(at, sp, t0); |
| 330 __ beq(at, zero_reg, &ok); |
| 331 // CallStub will emit a li t9, ... first, so it is safe to use the delay slot. |
| 317 StackCheckStub stub; | 332 StackCheckStub stub; |
| 333 __ CallStub(&stub); |
| 318 // Record a mapping of this PC offset to the OSR id. This is used to find | 334 // Record a mapping of this PC offset to the OSR id. This is used to find |
| 319 // the AST id from the unoptimized code in order to use it as a key into | 335 // the AST id from the unoptimized code in order to use it as a key into |
| 320 // the deoptimization input data found in the optimized code. | 336 // the deoptimization input data found in the optimized code. |
| 321 RecordStackCheck(stmt->OsrEntryId()); | 337 RecordStackCheck(stmt->OsrEntryId()); |
| 322 | 338 |
| 323 __ CallStub(&stub); | |
| 324 __ bind(&ok); | 339 __ bind(&ok); |
| 325 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 340 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 326 // Record a mapping of the OSR id to this PC. This is used if the OSR | 341 // Record a mapping of the OSR id to this PC. This is used if the OSR |
| 327 // entry becomes the target of a bailout. We don't expect it to be, but | 342 // entry becomes the target of a bailout. We don't expect it to be, but |
| 328 // we want it to work if it is. | 343 // we want it to work if it is. |
| 329 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | 344 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
| 330 } | 345 } |
| 331 | 346 |
| 332 | 347 |
| 333 void FullCodeGenerator::EmitReturnSequence() { | 348 void FullCodeGenerator::EmitReturnSequence() { |
| (...skipping 3580 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3914 __ bind(&done); | 3929 __ bind(&done); |
| 3915 | 3930 |
| 3916 context()->Plug(v0); | 3931 context()->Plug(v0); |
| 3917 } else { | 3932 } else { |
| 3918 // This expression cannot throw a reference error at the top level. | 3933 // This expression cannot throw a reference error at the top level. |
| 3919 VisitInCurrentContext(expr); | 3934 VisitInCurrentContext(expr); |
| 3920 } | 3935 } |
| 3921 } | 3936 } |
| 3922 | 3937 |
| 3923 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, | 3938 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, |
| 3924 Handle<String> check, | 3939 Handle<String> check) { |
| 3925 Label* if_true, | 3940 Label materialize_true, materialize_false; |
| 3926 Label* if_false, | 3941 Label* if_true = NULL; |
| 3927 Label* fall_through) { | 3942 Label* if_false = NULL; |
| 3943 Label* fall_through = NULL; |
| 3944 context()->PrepareTest(&materialize_true, &materialize_false, |
| 3945 &if_true, &if_false, &fall_through); |
| 3946 |
| 3928 { AccumulatorValueContext context(this); | 3947 { AccumulatorValueContext context(this); |
| 3929 VisitForTypeofValue(expr); | 3948 VisitForTypeofValue(expr); |
| 3930 } | 3949 } |
| 3931 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | 3950 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 3932 | 3951 |
| 3933 if (check->Equals(isolate()->heap()->number_symbol())) { | 3952 if (check->Equals(isolate()->heap()->number_symbol())) { |
| 3934 __ JumpIfSmi(v0, if_true); | 3953 __ JumpIfSmi(v0, if_true); |
| 3935 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); | 3954 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); |
| 3936 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); | 3955 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); |
| 3937 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | 3956 Split(eq, v0, Operand(at), if_true, if_false, fall_through); |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3979 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 3998 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 3980 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset)); | 3999 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset)); |
| 3981 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 4000 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 3982 // Check for undetectable objects => false. | 4001 // Check for undetectable objects => false. |
| 3983 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); | 4002 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); |
| 3984 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | 4003 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
| 3985 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); | 4004 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); |
| 3986 } else { | 4005 } else { |
| 3987 if (if_false != fall_through) __ jmp(if_false); | 4006 if (if_false != fall_through) __ jmp(if_false); |
| 3988 } | 4007 } |
| 3989 } | 4008 context()->Plug(if_true, if_false); |
| 3990 | |
| 3991 | |
| 3992 void FullCodeGenerator::EmitLiteralCompareUndefined(Expression* expr, | |
| 3993 Label* if_true, | |
| 3994 Label* if_false, | |
| 3995 Label* fall_through) { | |
| 3996 VisitForAccumulatorValue(expr); | |
| 3997 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | |
| 3998 | |
| 3999 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | |
| 4000 Split(eq, v0, Operand(at), if_true, if_false, fall_through); | |
| 4001 } | 4009 } |
| 4002 | 4010 |
| 4003 | 4011 |
| 4004 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | 4012 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
| 4005 Comment cmnt(masm_, "[ CompareOperation"); | 4013 Comment cmnt(masm_, "[ CompareOperation"); |
| 4006 SetSourcePosition(expr->position()); | 4014 SetSourcePosition(expr->position()); |
| 4007 | 4015 |
| 4016 // First we try a fast inlined version of the compare when one of |
| 4017 // the operands is a literal. |
| 4018 if (TryLiteralCompare(expr)) return; |
| 4019 |
| 4008 // Always perform the comparison for its control flow. Pack the result | 4020 // Always perform the comparison for its control flow. Pack the result |
| 4009 // into the expression's context after the comparison is performed. | 4021 // into the expression's context after the comparison is performed. |
| 4010 | |
| 4011 Label materialize_true, materialize_false; | 4022 Label materialize_true, materialize_false; |
| 4012 Label* if_true = NULL; | 4023 Label* if_true = NULL; |
| 4013 Label* if_false = NULL; | 4024 Label* if_false = NULL; |
| 4014 Label* fall_through = NULL; | 4025 Label* fall_through = NULL; |
| 4015 context()->PrepareTest(&materialize_true, &materialize_false, | 4026 context()->PrepareTest(&materialize_true, &materialize_false, |
| 4016 &if_true, &if_false, &fall_through); | 4027 &if_true, &if_false, &fall_through); |
| 4017 | 4028 |
| 4018 // First we try a fast inlined version of the compare when one of | |
| 4019 // the operands is a literal. | |
| 4020 if (TryLiteralCompare(expr, if_true, if_false, fall_through)) { | |
| 4021 context()->Plug(if_true, if_false); | |
| 4022 return; | |
| 4023 } | |
| 4024 | |
| 4025 Token::Value op = expr->op(); | 4029 Token::Value op = expr->op(); |
| 4026 VisitForStackValue(expr->left()); | 4030 VisitForStackValue(expr->left()); |
| 4027 switch (op) { | 4031 switch (op) { |
| 4028 case Token::IN: | 4032 case Token::IN: |
| 4029 VisitForStackValue(expr->right()); | 4033 VisitForStackValue(expr->right()); |
| 4030 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); | 4034 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); |
| 4031 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); | 4035 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); |
| 4032 __ LoadRoot(t0, Heap::kTrueValueRootIndex); | 4036 __ LoadRoot(t0, Heap::kTrueValueRootIndex); |
| 4033 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); | 4037 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); |
| 4034 break; | 4038 break; |
| 4035 | 4039 |
| 4036 case Token::INSTANCEOF: { | 4040 case Token::INSTANCEOF: { |
| 4037 VisitForStackValue(expr->right()); | 4041 VisitForStackValue(expr->right()); |
| 4038 InstanceofStub stub(InstanceofStub::kNoFlags); | 4042 InstanceofStub stub(InstanceofStub::kNoFlags); |
| 4039 __ CallStub(&stub); | 4043 __ CallStub(&stub); |
| 4040 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | 4044 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 4041 // The stub returns 0 for true. | 4045 // The stub returns 0 for true. |
| 4042 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through); | 4046 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through); |
| 4043 break; | 4047 break; |
| 4044 } | 4048 } |
| 4045 | 4049 |
| 4046 default: { | 4050 default: { |
| 4047 VisitForAccumulatorValue(expr->right()); | 4051 VisitForAccumulatorValue(expr->right()); |
| 4048 Condition cc = eq; | 4052 Condition cc = eq; |
| 4049 bool strict = false; | |
| 4050 switch (op) { | 4053 switch (op) { |
| 4051 case Token::EQ_STRICT: | 4054 case Token::EQ_STRICT: |
| 4052 strict = true; | |
| 4053 // Fall through. | |
| 4054 case Token::EQ: | 4055 case Token::EQ: |
| 4055 cc = eq; | 4056 cc = eq; |
| 4056 __ mov(a0, result_register()); | 4057 __ mov(a0, result_register()); |
| 4057 __ pop(a1); | 4058 __ pop(a1); |
| 4058 break; | 4059 break; |
| 4059 case Token::LT: | 4060 case Token::LT: |
| 4060 cc = lt; | 4061 cc = lt; |
| 4061 __ mov(a0, result_register()); | 4062 __ mov(a0, result_register()); |
| 4062 __ pop(a1); | 4063 __ pop(a1); |
| 4063 break; | 4064 break; |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4102 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); | 4103 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); |
| 4103 } | 4104 } |
| 4104 } | 4105 } |
| 4105 | 4106 |
| 4106 // Convert the result of the comparison into one expected for this | 4107 // Convert the result of the comparison into one expected for this |
| 4107 // expression's context. | 4108 // expression's context. |
| 4108 context()->Plug(if_true, if_false); | 4109 context()->Plug(if_true, if_false); |
| 4109 } | 4110 } |
| 4110 | 4111 |
| 4111 | 4112 |
| 4112 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) { | 4113 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, |
| 4113 Comment cmnt(masm_, "[ CompareToNull"); | 4114 Expression* sub_expr, |
| 4115 NilValue nil) { |
| 4114 Label materialize_true, materialize_false; | 4116 Label materialize_true, materialize_false; |
| 4115 Label* if_true = NULL; | 4117 Label* if_true = NULL; |
| 4116 Label* if_false = NULL; | 4118 Label* if_false = NULL; |
| 4117 Label* fall_through = NULL; | 4119 Label* fall_through = NULL; |
| 4118 context()->PrepareTest(&materialize_true, &materialize_false, | 4120 context()->PrepareTest(&materialize_true, &materialize_false, |
| 4119 &if_true, &if_false, &fall_through); | 4121 &if_true, &if_false, &fall_through); |
| 4120 | 4122 |
| 4121 VisitForAccumulatorValue(expr->expression()); | 4123 VisitForAccumulatorValue(sub_expr); |
| 4122 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | 4124 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
| 4125 Heap::RootListIndex nil_value = nil == kNullValue ? |
| 4126 Heap::kNullValueRootIndex : |
| 4127 Heap::kUndefinedValueRootIndex; |
| 4123 __ mov(a0, result_register()); | 4128 __ mov(a0, result_register()); |
| 4124 __ LoadRoot(a1, Heap::kNullValueRootIndex); | 4129 __ LoadRoot(a1, nil_value); |
| 4125 if (expr->is_strict()) { | 4130 if (expr->op() == Token::EQ_STRICT) { |
| 4126 Split(eq, a0, Operand(a1), if_true, if_false, fall_through); | 4131 Split(eq, a0, Operand(a1), if_true, if_false, fall_through); |
| 4127 } else { | 4132 } else { |
| 4133 Heap::RootListIndex other_nil_value = nil == kNullValue ? |
| 4134 Heap::kUndefinedValueRootIndex : |
| 4135 Heap::kNullValueRootIndex; |
| 4128 __ Branch(if_true, eq, a0, Operand(a1)); | 4136 __ Branch(if_true, eq, a0, Operand(a1)); |
| 4129 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); | 4137 __ LoadRoot(a1, other_nil_value); |
| 4130 __ Branch(if_true, eq, a0, Operand(a1)); | 4138 __ Branch(if_true, eq, a0, Operand(a1)); |
| 4131 __ And(at, a0, Operand(kSmiTagMask)); | 4139 __ And(at, a0, Operand(kSmiTagMask)); |
| 4132 __ Branch(if_false, eq, at, Operand(zero_reg)); | 4140 __ Branch(if_false, eq, at, Operand(zero_reg)); |
| 4133 // It can be an undetectable object. | 4141 // It can be an undetectable object. |
| 4134 __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset)); | 4142 __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset)); |
| 4135 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); | 4143 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); |
| 4136 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); | 4144 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); |
| 4137 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); | 4145 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); |
| 4138 } | 4146 } |
| 4139 context()->Plug(if_true, if_false); | 4147 context()->Plug(if_true, if_false); |
| (...skipping 103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4243 *context_length = 0; | 4251 *context_length = 0; |
| 4244 return previous_; | 4252 return previous_; |
| 4245 } | 4253 } |
| 4246 | 4254 |
| 4247 | 4255 |
| 4248 #undef __ | 4256 #undef __ |
| 4249 | 4257 |
| 4250 } } // namespace v8::internal | 4258 } } // namespace v8::internal |
| 4251 | 4259 |
| 4252 #endif // V8_TARGET_ARCH_MIPS | 4260 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |