Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 1693833002: Remove strong mode support from binary operations. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Addressed comments. Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_MIPS 5 #if V8_TARGET_ARCH_MIPS
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h" 8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h" 9 #include "src/code-stubs.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
84 84
85 85
86 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor( 86 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
87 CodeStubDescriptor* descriptor) { 87 CodeStubDescriptor* descriptor) {
88 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1); 88 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
89 } 89 }
90 90
91 91
92 #define __ ACCESS_MASM(masm) 92 #define __ ACCESS_MASM(masm)
93 93
94
95 static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow, 94 static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
96 Condition cc, Strength strength); 95 Condition cc);
97 static void EmitSmiNonsmiComparison(MacroAssembler* masm, 96 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
98 Register lhs, 97 Register lhs,
99 Register rhs, 98 Register rhs,
100 Label* rhs_not_nan, 99 Label* rhs_not_nan,
101 Label* slow, 100 Label* slow,
102 bool strict); 101 bool strict);
103 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 102 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
104 Register lhs, 103 Register lhs,
105 Register rhs); 104 Register rhs);
106 105
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
268 267
269 __ Pop(scratch, scratch2, scratch3); 268 __ Pop(scratch, scratch2, scratch3);
270 __ Ret(); 269 __ Ret();
271 } 270 }
272 271
273 272
274 // Handle the case where the lhs and rhs are the same object. 273 // Handle the case where the lhs and rhs are the same object.
275 // Equality is almost reflexive (everything but NaN), so this is a test 274 // Equality is almost reflexive (everything but NaN), so this is a test
276 // for "identity and not NaN". 275 // for "identity and not NaN".
277 static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow, 276 static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
278 Condition cc, Strength strength) { 277 Condition cc) {
279 Label not_identical; 278 Label not_identical;
280 Label heap_number, return_equal; 279 Label heap_number, return_equal;
281 Register exp_mask_reg = t5; 280 Register exp_mask_reg = t5;
282 281
283 __ Branch(&not_identical, ne, a0, Operand(a1)); 282 __ Branch(&not_identical, ne, a0, Operand(a1));
284 283
285 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); 284 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask));
286 285
287 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), 286 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
288 // so we do the second best thing - test it ourselves. 287 // so we do the second best thing - test it ourselves.
289 // They are both equal and they are not both Smis so both of them are not 288 // They are both equal and they are not both Smis so both of them are not
290 // Smis. If it's not a heap number, then return equal. 289 // Smis. If it's not a heap number, then return equal.
291 __ GetObjectType(a0, t4, t4); 290 __ GetObjectType(a0, t4, t4);
292 if (cc == less || cc == greater) { 291 if (cc == less || cc == greater) {
293 // Call runtime on identical JSObjects. 292 // Call runtime on identical JSObjects.
294 __ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE)); 293 __ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE));
295 // Call runtime on identical symbols since we need to throw a TypeError. 294 // Call runtime on identical symbols since we need to throw a TypeError.
296 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE)); 295 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE));
297 // Call runtime on identical SIMD values since we must throw a TypeError. 296 // Call runtime on identical SIMD values since we must throw a TypeError.
298 __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE)); 297 __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE));
299 if (is_strong(strength)) {
300 // Call the runtime on anything that is converted in the semantics, since
301 // we need to throw a TypeError. Smis have already been ruled out.
302 __ Branch(&return_equal, eq, t4, Operand(HEAP_NUMBER_TYPE));
303 __ And(t4, t4, Operand(kIsNotStringMask));
304 __ Branch(slow, ne, t4, Operand(zero_reg));
305 }
306 } else { 298 } else {
307 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE)); 299 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE));
308 // Comparing JS objects with <=, >= is complicated. 300 // Comparing JS objects with <=, >= is complicated.
309 if (cc != eq) { 301 if (cc != eq) {
310 __ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE)); 302 __ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE));
311 // Call runtime on identical symbols since we need to throw a TypeError. 303 // Call runtime on identical symbols since we need to throw a TypeError.
312 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE)); 304 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE));
313 // Call runtime on identical SIMD values since we must throw a TypeError. 305 // Call runtime on identical SIMD values since we must throw a TypeError.
314 __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE)); 306 __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE));
315 if (is_strong(strength)) {
316 // Call the runtime on anything that is converted in the semantics,
317 // since we need to throw a TypeError. Smis and heap numbers have
318 // already been ruled out.
319 __ And(t4, t4, Operand(kIsNotStringMask));
320 __ Branch(slow, ne, t4, Operand(zero_reg));
321 }
322 // Normally here we fall through to return_equal, but undefined is 307 // Normally here we fall through to return_equal, but undefined is
323 // special: (undefined == undefined) == true, but 308 // special: (undefined == undefined) == true, but
324 // (undefined <= undefined) == false! See ECMAScript 11.8.5. 309 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
325 if (cc == less_equal || cc == greater_equal) { 310 if (cc == less_equal || cc == greater_equal) {
326 __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE)); 311 __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE));
327 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); 312 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
328 __ Branch(&return_equal, ne, a0, Operand(t2)); 313 __ Branch(&return_equal, ne, a0, Operand(t2));
329 DCHECK(is_int16(GREATER) && is_int16(LESS)); 314 DCHECK(is_int16(GREATER) && is_int16(LESS));
330 __ Ret(USE_DELAY_SLOT); 315 __ Ret(USE_DELAY_SLOT);
331 if (cc == le) { 316 if (cc == le) {
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
606 __ sra(a0, a0, 1); 591 __ sra(a0, a0, 1);
607 __ Ret(USE_DELAY_SLOT); 592 __ Ret(USE_DELAY_SLOT);
608 __ subu(v0, a1, a0); 593 __ subu(v0, a1, a0);
609 __ bind(&not_two_smis); 594 __ bind(&not_two_smis);
610 595
611 // NOTICE! This code is only reached after a smi-fast-case check, so 596 // NOTICE! This code is only reached after a smi-fast-case check, so
612 // it is certain that at least one operand isn't a smi. 597 // it is certain that at least one operand isn't a smi.
613 598
614 // Handle the case where the objects are identical. Either returns the answer 599 // Handle the case where the objects are identical. Either returns the answer
615 // or goes to slow. Only falls through if the objects were not identical. 600 // or goes to slow. Only falls through if the objects were not identical.
616 EmitIdenticalObjectComparison(masm, &slow, cc, strength()); 601 EmitIdenticalObjectComparison(masm, &slow, cc);
617 602
618 // If either is a Smi (we know that not both are), then they can only 603 // If either is a Smi (we know that not both are), then they can only
619 // be strictly equal if the other is a HeapNumber. 604 // be strictly equal if the other is a HeapNumber.
620 STATIC_ASSERT(kSmiTag == 0); 605 STATIC_ASSERT(kSmiTag == 0);
621 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); 606 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
622 __ And(t2, lhs, Operand(rhs)); 607 __ And(t2, lhs, Operand(rhs));
623 __ JumpIfNotSmi(t2, &not_smis, t0); 608 __ JumpIfNotSmi(t2, &not_smis, t0);
624 // One operand is a smi. EmitSmiNonsmiComparison generates code that can: 609 // One operand is a smi. EmitSmiNonsmiComparison generates code that can:
625 // 1) Return the answer. 610 // 1) Return the answer.
626 // 2) Go to slow. 611 // 2) Go to slow.
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
745 ncr = GREATER; 730 ncr = GREATER;
746 } else { 731 } else {
747 DCHECK(cc == gt || cc == ge); // Remaining cases. 732 DCHECK(cc == gt || cc == ge); // Remaining cases.
748 ncr = LESS; 733 ncr = LESS;
749 } 734 }
750 __ li(a0, Operand(Smi::FromInt(ncr))); 735 __ li(a0, Operand(Smi::FromInt(ncr)));
751 __ push(a0); 736 __ push(a0);
752 737
753 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 738 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
754 // tagged as a small integer. 739 // tagged as a small integer.
755 __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong 740 __ TailCallRuntime(Runtime::kCompare);
756 : Runtime::kCompare);
757 } 741 }
758 742
759 __ bind(&miss); 743 __ bind(&miss);
760 GenerateMiss(masm); 744 GenerateMiss(masm);
761 } 745 }
762 746
763 747
764 void StoreRegistersStateStub::Generate(MacroAssembler* masm) { 748 void StoreRegistersStateStub::Generate(MacroAssembler* masm) {
765 __ mov(t9, ra); 749 __ mov(t9, ra);
766 __ pop(ra); 750 __ pop(ra);
(...skipping 2304 matching lines...) Expand 10 before | Expand all | Expand 10 after
3071 __ TailCallStub(&stub); 3055 __ TailCallStub(&stub);
3072 } 3056 }
3073 3057
3074 3058
3075 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { 3059 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3076 DCHECK_EQ(CompareICState::BOOLEAN, state()); 3060 DCHECK_EQ(CompareICState::BOOLEAN, state());
3077 Label miss; 3061 Label miss;
3078 3062
3079 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3063 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3080 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); 3064 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK);
3081 if (op() != Token::EQ_STRICT && is_strong(strength())) { 3065 if (!Token::IsEqualityOp(op())) {
3082 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion); 3066 __ lw(a1, FieldMemOperand(a1, Oddball::kToNumberOffset));
3083 } else { 3067 __ AssertSmi(a1);
3084 if (!Token::IsEqualityOp(op())) { 3068 __ lw(a0, FieldMemOperand(a0, Oddball::kToNumberOffset));
3085 __ lw(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); 3069 __ AssertSmi(a0);
3086 __ AssertSmi(a1);
3087 __ lw(a0, FieldMemOperand(a0, Oddball::kToNumberOffset));
3088 __ AssertSmi(a0);
3089 }
3090 __ Ret(USE_DELAY_SLOT);
3091 __ Subu(v0, a1, a0);
3092 } 3070 }
3071 __ Ret(USE_DELAY_SLOT);
3072 __ Subu(v0, a1, a0);
3093 3073
3094 __ bind(&miss); 3074 __ bind(&miss);
3095 GenerateMiss(masm); 3075 GenerateMiss(masm);
3096 } 3076 }
3097 3077
3098 3078
3099 void CompareICStub::GenerateSmis(MacroAssembler* masm) { 3079 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3100 DCHECK(state() == CompareICState::SMI); 3080 DCHECK(state() == CompareICState::SMI);
3101 Label miss; 3081 Label miss;
3102 __ Or(a2, a1, a0); 3082 __ Or(a2, a1, a0);
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
3180 __ bind(&fpu_eq); 3160 __ bind(&fpu_eq);
3181 __ Ret(USE_DELAY_SLOT); 3161 __ Ret(USE_DELAY_SLOT);
3182 __ li(v0, Operand(EQUAL)); 3162 __ li(v0, Operand(EQUAL));
3183 3163
3184 __ bind(&fpu_lt); 3164 __ bind(&fpu_lt);
3185 __ Ret(USE_DELAY_SLOT); 3165 __ Ret(USE_DELAY_SLOT);
3186 __ li(v0, Operand(LESS)); 3166 __ li(v0, Operand(LESS));
3187 3167
3188 __ bind(&unordered); 3168 __ bind(&unordered);
3189 __ bind(&generic_stub); 3169 __ bind(&generic_stub);
3190 CompareICStub stub(isolate(), op(), strength(), CompareICState::GENERIC, 3170 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
3191 CompareICState::GENERIC, CompareICState::GENERIC); 3171 CompareICState::GENERIC, CompareICState::GENERIC);
3192 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 3172 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
3193 3173
3194 __ bind(&maybe_undefined1); 3174 __ bind(&maybe_undefined1);
3195 if (Token::IsOrderedRelationalCompareOp(op())) { 3175 if (Token::IsOrderedRelationalCompareOp(op())) {
3196 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 3176 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3197 __ Branch(&miss, ne, a0, Operand(at)); 3177 __ Branch(&miss, ne, a0, Operand(at));
3198 __ JumpIfSmi(a1, &unordered); 3178 __ JumpIfSmi(a1, &unordered);
3199 __ GetObjectType(a1, a2, a2); 3179 __ GetObjectType(a1, a2, a2);
3200 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE)); 3180 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE));
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
3410 __ JumpIfSmi(a2, &miss); 3390 __ JumpIfSmi(a2, &miss);
3411 __ GetWeakValue(t0, cell); 3391 __ GetWeakValue(t0, cell);
3412 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); 3392 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
3413 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); 3393 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset));
3414 __ Branch(&miss, ne, a2, Operand(t0)); 3394 __ Branch(&miss, ne, a2, Operand(t0));
3415 __ Branch(&miss, ne, a3, Operand(t0)); 3395 __ Branch(&miss, ne, a3, Operand(t0));
3416 3396
3417 if (Token::IsEqualityOp(op())) { 3397 if (Token::IsEqualityOp(op())) {
3418 __ Ret(USE_DELAY_SLOT); 3398 __ Ret(USE_DELAY_SLOT);
3419 __ subu(v0, a0, a1); 3399 __ subu(v0, a0, a1);
3420 } else if (is_strong(strength())) {
3421 __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3422 } else { 3400 } else {
3423 if (op() == Token::LT || op() == Token::LTE) { 3401 if (op() == Token::LT || op() == Token::LTE) {
3424 __ li(a2, Operand(Smi::FromInt(GREATER))); 3402 __ li(a2, Operand(Smi::FromInt(GREATER)));
3425 } else { 3403 } else {
3426 __ li(a2, Operand(Smi::FromInt(LESS))); 3404 __ li(a2, Operand(Smi::FromInt(LESS)));
3427 } 3405 }
3428 __ Push(a1, a0, a2); 3406 __ Push(a1, a0, a2);
3429 __ TailCallRuntime(Runtime::kCompare); 3407 __ TailCallRuntime(Runtime::kCompare);
3430 } 3408 }
3431 3409
(...skipping 2251 matching lines...) Expand 10 before | Expand all | Expand 10 after
5683 return_value_operand, NULL); 5661 return_value_operand, NULL);
5684 } 5662 }
5685 5663
5686 5664
5687 #undef __ 5665 #undef __
5688 5666
5689 } // namespace internal 5667 } // namespace internal
5690 } // namespace v8 5668 } // namespace v8
5691 5669
5692 #endif // V8_TARGET_ARCH_MIPS 5670 #endif // V8_TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698