Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_ARM64 | 7 #if V8_TARGET_ARCH_ARM64 |
| 8 | 8 |
| 9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 216 __ Cmp(right, left); | 216 __ Cmp(right, left); |
| 217 __ B(ne, ¬_identical); | 217 __ B(ne, ¬_identical); |
| 218 | 218 |
| 219 // Test for NaN. Sadly, we can't just compare to factory::nan_value(), | 219 // Test for NaN. Sadly, we can't just compare to factory::nan_value(), |
| 220 // so we do the second best thing - test it ourselves. | 220 // so we do the second best thing - test it ourselves. |
| 221 // They are both equal and they are not both Smis so both of them are not | 221 // They are both equal and they are not both Smis so both of them are not |
| 222 // Smis. If it's not a heap number, then return equal. | 222 // Smis. If it's not a heap number, then return equal. |
| 223 if ((cond == lt) || (cond == gt)) { | 223 if ((cond == lt) || (cond == gt)) { |
| 224 __ JumpIfObjectType(right, scratch, scratch, FIRST_SPEC_OBJECT_TYPE, slow, | 224 __ JumpIfObjectType(right, scratch, scratch, FIRST_SPEC_OBJECT_TYPE, slow, |
| 225 ge); | 225 ge); |
| 226 } else if (cond == eq) { | |
| 227 __ JumpIfHeapNumber(right, &heap_number); | |
| 226 } else { | 228 } else { |
| 227 Register right_type = scratch; | 229 Register right_type = scratch; |
| 228 __ JumpIfObjectType(right, right_type, right_type, HEAP_NUMBER_TYPE, | 230 __ JumpIfObjectType(right, right_type, right_type, HEAP_NUMBER_TYPE, |
|
ulan
2014/09/24 08:21:41
Forgot to replace this with JumpIfHeapNumber?
vincent.belliard
2014/09/24 08:25:41
We needs right_type when it's not a heap number. U
ulan
2014/09/24 08:37:04
I see, thanks for explaining.
| |
| 229 &heap_number); | 231 &heap_number); |
| 230 // Comparing JS objects with <=, >= is complicated. | 232 // Comparing JS objects with <=, >= is complicated. |
| 231 if (cond != eq) { | 233 __ Cmp(right_type, FIRST_SPEC_OBJECT_TYPE); |
| 232 __ Cmp(right_type, FIRST_SPEC_OBJECT_TYPE); | 234 __ B(ge, slow); |
| 233 __ B(ge, slow); | 235 // Normally here we fall through to return_equal, but undefined is |
| 234 // Normally here we fall through to return_equal, but undefined is | 236 // special: (undefined == undefined) == true, but |
| 235 // special: (undefined == undefined) == true, but | 237 // (undefined <= undefined) == false! See ECMAScript 11.8.5. |
| 236 // (undefined <= undefined) == false! See ECMAScript 11.8.5. | 238 if ((cond == le) || (cond == ge)) { |
| 237 if ((cond == le) || (cond == ge)) { | 239 __ Cmp(right_type, ODDBALL_TYPE); |
| 238 __ Cmp(right_type, ODDBALL_TYPE); | 240 __ B(ne, &return_equal); |
| 239 __ B(ne, &return_equal); | 241 __ JumpIfNotRoot(right, Heap::kUndefinedValueRootIndex, &return_equal); |
| 240 __ JumpIfNotRoot(right, Heap::kUndefinedValueRootIndex, &return_equal); | 242 if (cond == le) { |
| 241 if (cond == le) { | 243 // undefined <= undefined should fail. |
| 242 // undefined <= undefined should fail. | 244 __ Mov(result, GREATER); |
| 243 __ Mov(result, GREATER); | 245 } else { |
| 244 } else { | 246 // undefined >= undefined should fail. |
| 245 // undefined >= undefined should fail. | 247 __ Mov(result, LESS); |
| 246 __ Mov(result, LESS); | |
| 247 } | |
| 248 __ Ret(); | |
| 249 } | 248 } |
| 249 __ Ret(); | |
| 250 } | 250 } |
| 251 } | 251 } |
| 252 | 252 |
| 253 __ Bind(&return_equal); | 253 __ Bind(&return_equal); |
| 254 if (cond == lt) { | 254 if (cond == lt) { |
| 255 __ Mov(result, GREATER); // Things aren't less than themselves. | 255 __ Mov(result, GREATER); // Things aren't less than themselves. |
| 256 } else if (cond == gt) { | 256 } else if (cond == gt) { |
| 257 __ Mov(result, LESS); // Things aren't greater than themselves. | 257 __ Mov(result, LESS); // Things aren't greater than themselves. |
| 258 } else { | 258 } else { |
| 259 __ Mov(result, EQUAL); // Things are <=, >=, ==, === themselves. | 259 __ Mov(result, EQUAL); // Things are <=, >=, ==, === themselves. |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 343 scratch, kIsNotStringMask | kIsNotInternalizedMask, &return_not_equal); | 343 scratch, kIsNotStringMask | kIsNotInternalizedMask, &return_not_equal); |
| 344 } | 344 } |
| 345 | 345 |
| 346 | 346 |
| 347 // See call site for description. | 347 // See call site for description. |
| 348 static void EmitSmiNonsmiComparison(MacroAssembler* masm, | 348 static void EmitSmiNonsmiComparison(MacroAssembler* masm, |
| 349 Register left, | 349 Register left, |
| 350 Register right, | 350 Register right, |
| 351 FPRegister left_d, | 351 FPRegister left_d, |
| 352 FPRegister right_d, | 352 FPRegister right_d, |
| 353 Register scratch, | |
| 354 Label* slow, | 353 Label* slow, |
| 355 bool strict) { | 354 bool strict) { |
| 356 DCHECK(!AreAliased(left, right, scratch)); | |
| 357 DCHECK(!AreAliased(left_d, right_d)); | 355 DCHECK(!AreAliased(left_d, right_d)); |
| 358 DCHECK((left.is(x0) && right.is(x1)) || | 356 DCHECK((left.is(x0) && right.is(x1)) || |
| 359 (right.is(x0) && left.is(x1))); | 357 (right.is(x0) && left.is(x1))); |
| 360 Register result = x0; | 358 Register result = x0; |
| 361 | 359 |
| 362 Label right_is_smi, done; | 360 Label right_is_smi, done; |
| 363 __ JumpIfSmi(right, &right_is_smi); | 361 __ JumpIfSmi(right, &right_is_smi); |
| 364 | 362 |
| 365 // Left is the smi. Check whether right is a heap number. | 363 // Left is the smi. Check whether right is a heap number. |
| 366 if (strict) { | 364 if (strict) { |
| 367 // If right is not a number and left is a smi, then strict equality cannot | 365 // If right is not a number and left is a smi, then strict equality cannot |
| 368 // succeed. Return non-equal. | 366 // succeed. Return non-equal. |
| 369 Label is_heap_number; | 367 Label is_heap_number; |
| 370 __ JumpIfObjectType(right, scratch, scratch, HEAP_NUMBER_TYPE, | 368 __ JumpIfHeapNumber(right, &is_heap_number); |
| 371 &is_heap_number); | |
| 372 // Register right is a non-zero pointer, which is a valid NOT_EQUAL result. | 369 // Register right is a non-zero pointer, which is a valid NOT_EQUAL result. |
| 373 if (!right.is(result)) { | 370 if (!right.is(result)) { |
| 374 __ Mov(result, NOT_EQUAL); | 371 __ Mov(result, NOT_EQUAL); |
| 375 } | 372 } |
| 376 __ Ret(); | 373 __ Ret(); |
| 377 __ Bind(&is_heap_number); | 374 __ Bind(&is_heap_number); |
| 378 } else { | 375 } else { |
| 379 // Smi compared non-strictly with a non-smi, non-heap-number. Call the | 376 // Smi compared non-strictly with a non-smi, non-heap-number. Call the |
| 380 // runtime. | 377 // runtime. |
| 381 __ JumpIfNotObjectType(right, scratch, scratch, HEAP_NUMBER_TYPE, slow); | 378 __ JumpIfNotHeapNumber(right, slow); |
| 382 } | 379 } |
| 383 | 380 |
| 384 // Left is the smi. Right is a heap number. Load right value into right_d, and | 381 // Left is the smi. Right is a heap number. Load right value into right_d, and |
| 385 // convert left smi into double in left_d. | 382 // convert left smi into double in left_d. |
| 386 __ Ldr(right_d, FieldMemOperand(right, HeapNumber::kValueOffset)); | 383 __ Ldr(right_d, FieldMemOperand(right, HeapNumber::kValueOffset)); |
| 387 __ SmiUntagToDouble(left_d, left); | 384 __ SmiUntagToDouble(left_d, left); |
| 388 __ B(&done); | 385 __ B(&done); |
| 389 | 386 |
| 390 __ Bind(&right_is_smi); | 387 __ Bind(&right_is_smi); |
| 391 // Right is a smi. Check whether the non-smi left is a heap number. | 388 // Right is a smi. Check whether the non-smi left is a heap number. |
| 392 if (strict) { | 389 if (strict) { |
| 393 // If left is not a number and right is a smi then strict equality cannot | 390 // If left is not a number and right is a smi then strict equality cannot |
| 394 // succeed. Return non-equal. | 391 // succeed. Return non-equal. |
| 395 Label is_heap_number; | 392 Label is_heap_number; |
| 396 __ JumpIfObjectType(left, scratch, scratch, HEAP_NUMBER_TYPE, | 393 __ JumpIfHeapNumber(left, &is_heap_number); |
| 397 &is_heap_number); | |
| 398 // Register left is a non-zero pointer, which is a valid NOT_EQUAL result. | 394 // Register left is a non-zero pointer, which is a valid NOT_EQUAL result. |
| 399 if (!left.is(result)) { | 395 if (!left.is(result)) { |
| 400 __ Mov(result, NOT_EQUAL); | 396 __ Mov(result, NOT_EQUAL); |
| 401 } | 397 } |
| 402 __ Ret(); | 398 __ Ret(); |
| 403 __ Bind(&is_heap_number); | 399 __ Bind(&is_heap_number); |
| 404 } else { | 400 } else { |
| 405 // Smi compared non-strictly with a non-smi, non-heap-number. Call the | 401 // Smi compared non-strictly with a non-smi, non-heap-number. Call the |
| 406 // runtime. | 402 // runtime. |
| 407 __ JumpIfNotObjectType(left, scratch, scratch, HEAP_NUMBER_TYPE, slow); | 403 __ JumpIfNotHeapNumber(left, slow); |
| 408 } | 404 } |
| 409 | 405 |
| 410 // Right is the smi. Left is a heap number. Load left value into left_d, and | 406 // Right is the smi. Left is a heap number. Load left value into left_d, and |
| 411 // convert right smi into double in right_d. | 407 // convert right smi into double in right_d. |
| 412 __ Ldr(left_d, FieldMemOperand(left, HeapNumber::kValueOffset)); | 408 __ Ldr(left_d, FieldMemOperand(left, HeapNumber::kValueOffset)); |
| 413 __ SmiUntagToDouble(right_d, right); | 409 __ SmiUntagToDouble(right_d, right); |
| 414 | 410 |
| 415 // Fall through to both_loaded_as_doubles. | 411 // Fall through to both_loaded_as_doubles. |
| 416 __ Bind(&done); | 412 __ Bind(&done); |
| 417 } | 413 } |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 465 __ Ldrb(right_bitfield, FieldMemOperand(right_map, Map::kBitFieldOffset)); | 461 __ Ldrb(right_bitfield, FieldMemOperand(right_map, Map::kBitFieldOffset)); |
| 466 __ Ldrb(left_bitfield, FieldMemOperand(left_map, Map::kBitFieldOffset)); | 462 __ Ldrb(left_bitfield, FieldMemOperand(left_map, Map::kBitFieldOffset)); |
| 467 __ And(result, right_bitfield, left_bitfield); | 463 __ And(result, right_bitfield, left_bitfield); |
| 468 __ And(result, result, 1 << Map::kIsUndetectable); | 464 __ And(result, result, 1 << Map::kIsUndetectable); |
| 469 __ Eor(result, result, 1 << Map::kIsUndetectable); | 465 __ Eor(result, result, 1 << Map::kIsUndetectable); |
| 470 __ Ret(); | 466 __ Ret(); |
| 471 } | 467 } |
| 472 | 468 |
| 473 | 469 |
| 474 static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input, | 470 static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input, |
| 475 Register scratch, | |
| 476 CompareICState::State expected, | 471 CompareICState::State expected, |
| 477 Label* fail) { | 472 Label* fail) { |
| 478 Label ok; | 473 Label ok; |
| 479 if (expected == CompareICState::SMI) { | 474 if (expected == CompareICState::SMI) { |
| 480 __ JumpIfNotSmi(input, fail); | 475 __ JumpIfNotSmi(input, fail); |
| 481 } else if (expected == CompareICState::NUMBER) { | 476 } else if (expected == CompareICState::NUMBER) { |
| 482 __ JumpIfSmi(input, &ok); | 477 __ JumpIfSmi(input, &ok); |
| 483 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, | 478 __ JumpIfNotHeapNumber(input, fail); |
| 484 DONT_DO_SMI_CHECK); | |
| 485 } | 479 } |
| 486 // We could be strict about internalized/non-internalized here, but as long as | 480 // We could be strict about internalized/non-internalized here, but as long as |
| 487 // hydrogen doesn't care, the stub doesn't have to care either. | 481 // hydrogen doesn't care, the stub doesn't have to care either. |
| 488 __ Bind(&ok); | 482 __ Bind(&ok); |
| 489 } | 483 } |
| 490 | 484 |
| 491 | 485 |
| 492 void CompareICStub::GenerateGeneric(MacroAssembler* masm) { | 486 void CompareICStub::GenerateGeneric(MacroAssembler* masm) { |
| 493 Register lhs = x1; | 487 Register lhs = x1; |
| 494 Register rhs = x0; | 488 Register rhs = x0; |
| 495 Register result = x0; | 489 Register result = x0; |
| 496 Condition cond = GetCondition(); | 490 Condition cond = GetCondition(); |
| 497 | 491 |
| 498 Label miss; | 492 Label miss; |
| 499 CompareICStub_CheckInputType(masm, lhs, x2, left(), &miss); | 493 CompareICStub_CheckInputType(masm, lhs, left(), &miss); |
| 500 CompareICStub_CheckInputType(masm, rhs, x3, right(), &miss); | 494 CompareICStub_CheckInputType(masm, rhs, right(), &miss); |
| 501 | 495 |
| 502 Label slow; // Call builtin. | 496 Label slow; // Call builtin. |
| 503 Label not_smis, both_loaded_as_doubles; | 497 Label not_smis, both_loaded_as_doubles; |
| 504 Label not_two_smis, smi_done; | 498 Label not_two_smis, smi_done; |
| 505 __ JumpIfEitherNotSmi(lhs, rhs, ¬_two_smis); | 499 __ JumpIfEitherNotSmi(lhs, rhs, ¬_two_smis); |
| 506 __ SmiUntag(lhs); | 500 __ SmiUntag(lhs); |
| 507 __ Sub(result, lhs, Operand::UntagSmi(rhs)); | 501 __ Sub(result, lhs, Operand::UntagSmi(rhs)); |
| 508 __ Ret(); | 502 __ Ret(); |
| 509 | 503 |
| 510 __ Bind(¬_two_smis); | 504 __ Bind(¬_two_smis); |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 523 // Exactly one operand is a smi. EmitSmiNonsmiComparison generates code that | 517 // Exactly one operand is a smi. EmitSmiNonsmiComparison generates code that |
| 524 // can: | 518 // can: |
| 525 // 1) Return the answer. | 519 // 1) Return the answer. |
| 526 // 2) Branch to the slow case. | 520 // 2) Branch to the slow case. |
| 527 // 3) Fall through to both_loaded_as_doubles. | 521 // 3) Fall through to both_loaded_as_doubles. |
| 528 // In case 3, we have found out that we were dealing with a number-number | 522 // In case 3, we have found out that we were dealing with a number-number |
| 529 // comparison. The double values of the numbers have been loaded, right into | 523 // comparison. The double values of the numbers have been loaded, right into |
| 530 // rhs_d, left into lhs_d. | 524 // rhs_d, left into lhs_d. |
| 531 FPRegister rhs_d = d0; | 525 FPRegister rhs_d = d0; |
| 532 FPRegister lhs_d = d1; | 526 FPRegister lhs_d = d1; |
| 533 EmitSmiNonsmiComparison(masm, lhs, rhs, lhs_d, rhs_d, x10, &slow, strict()); | 527 EmitSmiNonsmiComparison(masm, lhs, rhs, lhs_d, rhs_d, &slow, strict()); |
| 534 | 528 |
| 535 __ Bind(&both_loaded_as_doubles); | 529 __ Bind(&both_loaded_as_doubles); |
| 536 // The arguments have been converted to doubles and stored in rhs_d and | 530 // The arguments have been converted to doubles and stored in rhs_d and |
| 537 // lhs_d. | 531 // lhs_d. |
| 538 Label nan; | 532 Label nan; |
| 539 __ Fcmp(lhs_d, rhs_d); | 533 __ Fcmp(lhs_d, rhs_d); |
| 540 __ B(vs, &nan); // Overflow flag set if either is NaN. | 534 __ B(vs, &nan); // Overflow flag set if either is NaN. |
| 541 STATIC_ASSERT((LESS == -1) && (EQUAL == 0) && (GREATER == 1)); | 535 STATIC_ASSERT((LESS == -1) && (EQUAL == 0) && (GREATER == 1)); |
| 542 __ Cset(result, gt); // gt => 1, otherwise (lt, eq) => 0 (EQUAL). | 536 __ Cset(result, gt); // gt => 1, otherwise (lt, eq) => 0 (EQUAL). |
| 543 __ Csinv(result, result, xzr, ge); // lt => -1, gt => 1, eq => 0. | 537 __ Csinv(result, result, xzr, ge); // lt => -1, gt => 1, eq => 0. |
| (...skipping 2589 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3133 } | 3127 } |
| 3134 | 3128 |
| 3135 | 3129 |
| 3136 void StringCharCodeAtGenerator::GenerateSlow( | 3130 void StringCharCodeAtGenerator::GenerateSlow( |
| 3137 MacroAssembler* masm, | 3131 MacroAssembler* masm, |
| 3138 const RuntimeCallHelper& call_helper) { | 3132 const RuntimeCallHelper& call_helper) { |
| 3139 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); | 3133 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); |
| 3140 | 3134 |
| 3141 __ Bind(&index_not_smi_); | 3135 __ Bind(&index_not_smi_); |
| 3142 // If index is a heap number, try converting it to an integer. | 3136 // If index is a heap number, try converting it to an integer. |
| 3143 __ CheckMap(index_, | 3137 __ JumpIfNotHeapNumber(index_, index_not_number_); |
| 3144 result_, | |
| 3145 Heap::kHeapNumberMapRootIndex, | |
| 3146 index_not_number_, | |
| 3147 DONT_DO_SMI_CHECK); | |
| 3148 call_helper.BeforeCall(masm); | 3138 call_helper.BeforeCall(masm); |
| 3149 // Save object_ on the stack and pass index_ as argument for runtime call. | 3139 // Save object_ on the stack and pass index_ as argument for runtime call. |
| 3150 __ Push(object_, index_); | 3140 __ Push(object_, index_); |
| 3151 if (index_flags_ == STRING_INDEX_IS_NUMBER) { | 3141 if (index_flags_ == STRING_INDEX_IS_NUMBER) { |
| 3152 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); | 3142 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); |
| 3153 } else { | 3143 } else { |
| 3154 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); | 3144 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX); |
| 3155 // NumberToSmi discards numbers that are not exact integers. | 3145 // NumberToSmi discards numbers that are not exact integers. |
| 3156 __ CallRuntime(Runtime::kNumberToSmi, 1); | 3146 __ CallRuntime(Runtime::kNumberToSmi, 1); |
| 3157 } | 3147 } |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 3258 } | 3248 } |
| 3259 if (right() == CompareICState::SMI) { | 3249 if (right() == CompareICState::SMI) { |
| 3260 __ JumpIfNotSmi(rhs, &miss); | 3250 __ JumpIfNotSmi(rhs, &miss); |
| 3261 } | 3251 } |
| 3262 | 3252 |
| 3263 __ SmiUntagToDouble(rhs_d, rhs, kSpeculativeUntag); | 3253 __ SmiUntagToDouble(rhs_d, rhs, kSpeculativeUntag); |
| 3264 __ SmiUntagToDouble(lhs_d, lhs, kSpeculativeUntag); | 3254 __ SmiUntagToDouble(lhs_d, lhs, kSpeculativeUntag); |
| 3265 | 3255 |
| 3266 // Load rhs if it's a heap number. | 3256 // Load rhs if it's a heap number. |
| 3267 __ JumpIfSmi(rhs, &handle_lhs); | 3257 __ JumpIfSmi(rhs, &handle_lhs); |
| 3268 __ CheckMap(rhs, x10, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, | 3258 __ JumpIfNotHeapNumber(rhs, &maybe_undefined1); |
| 3269 DONT_DO_SMI_CHECK); | |
| 3270 __ Ldr(rhs_d, FieldMemOperand(rhs, HeapNumber::kValueOffset)); | 3259 __ Ldr(rhs_d, FieldMemOperand(rhs, HeapNumber::kValueOffset)); |
| 3271 | 3260 |
| 3272 // Load lhs if it's a heap number. | 3261 // Load lhs if it's a heap number. |
| 3273 __ Bind(&handle_lhs); | 3262 __ Bind(&handle_lhs); |
| 3274 __ JumpIfSmi(lhs, &values_in_d_regs); | 3263 __ JumpIfSmi(lhs, &values_in_d_regs); |
| 3275 __ CheckMap(lhs, x10, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, | 3264 __ JumpIfNotHeapNumber(lhs, &maybe_undefined2); |
| 3276 DONT_DO_SMI_CHECK); | |
| 3277 __ Ldr(lhs_d, FieldMemOperand(lhs, HeapNumber::kValueOffset)); | 3265 __ Ldr(lhs_d, FieldMemOperand(lhs, HeapNumber::kValueOffset)); |
| 3278 | 3266 |
| 3279 __ Bind(&values_in_d_regs); | 3267 __ Bind(&values_in_d_regs); |
| 3280 __ Fcmp(lhs_d, rhs_d); | 3268 __ Fcmp(lhs_d, rhs_d); |
| 3281 __ B(vs, &unordered); // Overflow flag set if either is NaN. | 3269 __ B(vs, &unordered); // Overflow flag set if either is NaN. |
| 3282 STATIC_ASSERT((LESS == -1) && (EQUAL == 0) && (GREATER == 1)); | 3270 STATIC_ASSERT((LESS == -1) && (EQUAL == 0) && (GREATER == 1)); |
| 3283 __ Cset(result, gt); // gt => 1, otherwise (lt, eq) => 0 (EQUAL). | 3271 __ Cset(result, gt); // gt => 1, otherwise (lt, eq) => 0 (EQUAL). |
| 3284 __ Csinv(result, result, xzr, ge); // lt => -1, gt => 1, eq => 0. | 3272 __ Csinv(result, result, xzr, ge); // lt => -1, gt => 1, eq => 0. |
| 3285 __ Ret(); | 3273 __ Ret(); |
| 3286 | 3274 |
| 3287 __ Bind(&unordered); | 3275 __ Bind(&unordered); |
| 3288 CompareICStub stub(isolate(), op(), CompareICState::GENERIC, | 3276 CompareICStub stub(isolate(), op(), CompareICState::GENERIC, |
| 3289 CompareICState::GENERIC, CompareICState::GENERIC); | 3277 CompareICState::GENERIC, CompareICState::GENERIC); |
| 3290 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3278 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 3291 | 3279 |
| 3292 __ Bind(&maybe_undefined1); | 3280 __ Bind(&maybe_undefined1); |
| 3293 if (Token::IsOrderedRelationalCompareOp(op())) { | 3281 if (Token::IsOrderedRelationalCompareOp(op())) { |
| 3294 __ JumpIfNotRoot(rhs, Heap::kUndefinedValueRootIndex, &miss); | 3282 __ JumpIfNotRoot(rhs, Heap::kUndefinedValueRootIndex, &miss); |
| 3295 __ JumpIfSmi(lhs, &unordered); | 3283 __ JumpIfSmi(lhs, &unordered); |
| 3296 __ JumpIfNotObjectType(lhs, x10, x10, HEAP_NUMBER_TYPE, &maybe_undefined2); | 3284 __ JumpIfNotHeapNumber(lhs, &maybe_undefined2); |
| 3297 __ B(&unordered); | 3285 __ B(&unordered); |
| 3298 } | 3286 } |
| 3299 | 3287 |
| 3300 __ Bind(&maybe_undefined2); | 3288 __ Bind(&maybe_undefined2); |
| 3301 if (Token::IsOrderedRelationalCompareOp(op())) { | 3289 if (Token::IsOrderedRelationalCompareOp(op())) { |
| 3302 __ JumpIfRoot(lhs, Heap::kUndefinedValueRootIndex, &unordered); | 3290 __ JumpIfRoot(lhs, Heap::kUndefinedValueRootIndex, &unordered); |
| 3303 } | 3291 } |
| 3304 | 3292 |
| 3305 __ Bind(&miss); | 3293 __ Bind(&miss); |
| 3306 GenerateMiss(masm); | 3294 GenerateMiss(masm); |
| (...skipping 1765 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5072 MemOperand(fp, 6 * kPointerSize), | 5060 MemOperand(fp, 6 * kPointerSize), |
| 5073 NULL); | 5061 NULL); |
| 5074 } | 5062 } |
| 5075 | 5063 |
| 5076 | 5064 |
| 5077 #undef __ | 5065 #undef __ |
| 5078 | 5066 |
| 5079 } } // namespace v8::internal | 5067 } } // namespace v8::internal |
| 5080 | 5068 |
| 5081 #endif // V8_TARGET_ARCH_ARM64 | 5069 #endif // V8_TARGET_ARCH_ARM64 |
| OLD | NEW |