OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
62 __ ldr(scratch1, FieldMemOperand(operand, HeapObject::kMapOffset)); | 62 __ ldr(scratch1, FieldMemOperand(operand, HeapObject::kMapOffset)); |
63 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex); | 63 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex); |
64 __ cmp(scratch1, scratch2); | 64 __ cmp(scratch1, scratch2); |
65 __ b(ne, not_a_heap_number); | 65 __ b(ne, not_a_heap_number); |
66 } | 66 } |
67 | 67 |
68 | 68 |
69 void ToNumberStub::Generate(MacroAssembler* masm) { | 69 void ToNumberStub::Generate(MacroAssembler* masm) { |
70 // The ToNumber stub takes one argument in eax. | 70 // The ToNumber stub takes one argument in eax. |
71 Label check_heap_number, call_builtin; | 71 Label check_heap_number, call_builtin; |
72 __ tst(r0, Operand(kSmiTagMask)); | 72 __ JumpIfNotSmi(r0, &check_heap_number); |
73 __ b(ne, &check_heap_number); | |
74 __ Ret(); | 73 __ Ret(); |
75 | 74 |
76 __ bind(&check_heap_number); | 75 __ bind(&check_heap_number); |
77 EmitCheckForHeapNumber(masm, r0, r1, ip, &call_builtin); | 76 EmitCheckForHeapNumber(masm, r0, r1, ip, &call_builtin); |
78 __ Ret(); | 77 __ Ret(); |
79 | 78 |
80 __ bind(&call_builtin); | 79 __ bind(&call_builtin); |
81 __ push(r0); | 80 __ push(r0); |
82 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); | 81 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); |
83 } | 82 } |
(...skipping 938 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1022 static void EmitSmiNonsmiComparison(MacroAssembler* masm, | 1021 static void EmitSmiNonsmiComparison(MacroAssembler* masm, |
1023 Register lhs, | 1022 Register lhs, |
1024 Register rhs, | 1023 Register rhs, |
1025 Label* lhs_not_nan, | 1024 Label* lhs_not_nan, |
1026 Label* slow, | 1025 Label* slow, |
1027 bool strict) { | 1026 bool strict) { |
1028 ASSERT((lhs.is(r0) && rhs.is(r1)) || | 1027 ASSERT((lhs.is(r0) && rhs.is(r1)) || |
1029 (lhs.is(r1) && rhs.is(r0))); | 1028 (lhs.is(r1) && rhs.is(r0))); |
1030 | 1029 |
1031 Label rhs_is_smi; | 1030 Label rhs_is_smi; |
1032 __ tst(rhs, Operand(kSmiTagMask)); | 1031 __ JumpIfSmi(rhs, &rhs_is_smi); |
1033 __ b(eq, &rhs_is_smi); | |
1034 | 1032 |
1035 // Lhs is a Smi. Check whether the rhs is a heap number. | 1033 // Lhs is a Smi. Check whether the rhs is a heap number. |
1036 __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE); | 1034 __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE); |
1037 if (strict) { | 1035 if (strict) { |
1038 // If rhs is not a number and lhs is a Smi then strict equality cannot | 1036 // If rhs is not a number and lhs is a Smi then strict equality cannot |
1039 // succeed. Return non-equal | 1037 // succeed. Return non-equal |
1040 // If rhs is r0 then there is already a non zero value in it. | 1038 // If rhs is r0 then there is already a non zero value in it. |
1041 if (!rhs.is(r0)) { | 1039 if (!rhs.is(r0)) { |
1042 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne); | 1040 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne); |
1043 } | 1041 } |
(...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1451 void CompareStub::Generate(MacroAssembler* masm) { | 1449 void CompareStub::Generate(MacroAssembler* masm) { |
1452 ASSERT((lhs_.is(r0) && rhs_.is(r1)) || | 1450 ASSERT((lhs_.is(r0) && rhs_.is(r1)) || |
1453 (lhs_.is(r1) && rhs_.is(r0))); | 1451 (lhs_.is(r1) && rhs_.is(r0))); |
1454 | 1452 |
1455 Label slow; // Call builtin. | 1453 Label slow; // Call builtin. |
1456 Label not_smis, both_loaded_as_doubles, lhs_not_nan; | 1454 Label not_smis, both_loaded_as_doubles, lhs_not_nan; |
1457 | 1455 |
1458 if (include_smi_compare_) { | 1456 if (include_smi_compare_) { |
1459 Label not_two_smis, smi_done; | 1457 Label not_two_smis, smi_done; |
1460 __ orr(r2, r1, r0); | 1458 __ orr(r2, r1, r0); |
1461 __ tst(r2, Operand(kSmiTagMask)); | 1459 __ JumpIfNotSmi(r2, ¬_two_smis); |
1462 __ b(ne, ¬_two_smis); | |
1463 __ mov(r1, Operand(r1, ASR, 1)); | 1460 __ mov(r1, Operand(r1, ASR, 1)); |
1464 __ sub(r0, r1, Operand(r0, ASR, 1)); | 1461 __ sub(r0, r1, Operand(r0, ASR, 1)); |
1465 __ Ret(); | 1462 __ Ret(); |
1466 __ bind(¬_two_smis); | 1463 __ bind(¬_two_smis); |
1467 } else if (FLAG_debug_code) { | 1464 } else if (FLAG_debug_code) { |
1468 __ orr(r2, r1, r0); | 1465 __ orr(r2, r1, r0); |
1469 __ tst(r2, Operand(kSmiTagMask)); | 1466 __ tst(r2, Operand(kSmiTagMask)); |
1470 __ Assert(ne, "CompareStub: unexpected smi operands."); | 1467 __ Assert(ne, "CompareStub: unexpected smi operands."); |
1471 } | 1468 } |
1472 | 1469 |
1473 // NOTICE! This code is only reached after a smi-fast-case check, so | 1470 // NOTICE! This code is only reached after a smi-fast-case check, so |
1474 // it is certain that at least one operand isn't a smi. | 1471 // it is certain that at least one operand isn't a smi. |
1475 | 1472 |
1476 // Handle the case where the objects are identical. Either returns the answer | 1473 // Handle the case where the objects are identical. Either returns the answer |
1477 // or goes to slow. Only falls through if the objects were not identical. | 1474 // or goes to slow. Only falls through if the objects were not identical. |
1478 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_); | 1475 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_); |
1479 | 1476 |
1480 // If either is a Smi (we know that not both are), then they can only | 1477 // If either is a Smi (we know that not both are), then they can only |
1481 // be strictly equal if the other is a HeapNumber. | 1478 // be strictly equal if the other is a HeapNumber. |
1482 STATIC_ASSERT(kSmiTag == 0); | 1479 STATIC_ASSERT(kSmiTag == 0); |
1483 ASSERT_EQ(0, Smi::FromInt(0)); | 1480 ASSERT_EQ(0, Smi::FromInt(0)); |
1484 __ and_(r2, lhs_, Operand(rhs_)); | 1481 __ and_(r2, lhs_, Operand(rhs_)); |
1485 __ tst(r2, Operand(kSmiTagMask)); | 1482 __ JumpIfNotSmi(r2, ¬_smis); |
1486 __ b(ne, ¬_smis); | |
1487 // One operand is a smi. EmitSmiNonsmiComparison generates code that can: | 1483 // One operand is a smi. EmitSmiNonsmiComparison generates code that can: |
1488 // 1) Return the answer. | 1484 // 1) Return the answer. |
1489 // 2) Go to slow. | 1485 // 2) Go to slow. |
1490 // 3) Fall through to both_loaded_as_doubles. | 1486 // 3) Fall through to both_loaded_as_doubles. |
1491 // 4) Jump to lhs_not_nan. | 1487 // 4) Jump to lhs_not_nan. |
1492 // In cases 3 and 4 we have found out we were dealing with a number-number | 1488 // In cases 3 and 4 we have found out we were dealing with a number-number |
1493 // comparison. If VFP3 is supported the double values of the numbers have | 1489 // comparison. If VFP3 is supported the double values of the numbers have |
1494 // been loaded into d7 and d6. Otherwise, the double values have been loaded | 1490 // been loaded into d7 and d6. Otherwise, the double values have been loaded |
1495 // into r0, r1, r2, and r3. | 1491 // into r0, r1, r2, and r3. |
1496 EmitSmiNonsmiComparison(masm, lhs_, rhs_, &lhs_not_nan, &slow, strict_); | 1492 EmitSmiNonsmiComparison(masm, lhs_, rhs_, &lhs_not_nan, &slow, strict_); |
(...skipping 943 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2440 Label not_smis; | 2436 Label not_smis; |
2441 | 2437 |
2442 Register left = r1; | 2438 Register left = r1; |
2443 Register right = r0; | 2439 Register right = r0; |
2444 Register scratch1 = r7; | 2440 Register scratch1 = r7; |
2445 Register scratch2 = r9; | 2441 Register scratch2 = r9; |
2446 | 2442 |
2447 // Perform combined smi check on both operands. | 2443 // Perform combined smi check on both operands. |
2448 __ orr(scratch1, left, Operand(right)); | 2444 __ orr(scratch1, left, Operand(right)); |
2449 STATIC_ASSERT(kSmiTag == 0); | 2445 STATIC_ASSERT(kSmiTag == 0); |
2450 __ tst(scratch1, Operand(kSmiTagMask)); | 2446 __ JumpIfNotSmi(scratch1, ¬_smis); |
2451 __ b(ne, ¬_smis); | |
2452 | 2447 |
2453 // If the smi-smi operation results in a smi return is generated. | 2448 // If the smi-smi operation results in a smi return is generated. |
2454 GenerateSmiSmiOperation(masm); | 2449 GenerateSmiSmiOperation(masm); |
2455 | 2450 |
2456 // If heap number results are possible generate the result in an allocated | 2451 // If heap number results are possible generate the result in an allocated |
2457 // heap number. | 2452 // heap number. |
2458 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) { | 2453 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) { |
2459 GenerateFPOperation(masm, true, use_runtime, gc_required); | 2454 GenerateFPOperation(masm, true, use_runtime, gc_required); |
2460 } | 2455 } |
2461 __ bind(¬_smis); | 2456 __ bind(¬_smis); |
(...skipping 1614 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4076 ExternalReference address_of_regexp_stack_memory_size = | 4071 ExternalReference address_of_regexp_stack_memory_size = |
4077 ExternalReference::address_of_regexp_stack_memory_size(isolate); | 4072 ExternalReference::address_of_regexp_stack_memory_size(isolate); |
4078 __ mov(r0, Operand(address_of_regexp_stack_memory_size)); | 4073 __ mov(r0, Operand(address_of_regexp_stack_memory_size)); |
4079 __ ldr(r0, MemOperand(r0, 0)); | 4074 __ ldr(r0, MemOperand(r0, 0)); |
4080 __ tst(r0, Operand(r0)); | 4075 __ tst(r0, Operand(r0)); |
4081 __ b(eq, &runtime); | 4076 __ b(eq, &runtime); |
4082 | 4077 |
4083 // Check that the first argument is a JSRegExp object. | 4078 // Check that the first argument is a JSRegExp object. |
4084 __ ldr(r0, MemOperand(sp, kJSRegExpOffset)); | 4079 __ ldr(r0, MemOperand(sp, kJSRegExpOffset)); |
4085 STATIC_ASSERT(kSmiTag == 0); | 4080 STATIC_ASSERT(kSmiTag == 0); |
4086 __ tst(r0, Operand(kSmiTagMask)); | 4081 __ JumpIfSmi(r0, &runtime); |
4087 __ b(eq, &runtime); | |
4088 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); | 4082 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); |
4089 __ b(ne, &runtime); | 4083 __ b(ne, &runtime); |
4090 | 4084 |
4091 // Check that the RegExp has been compiled (data contains a fixed array). | 4085 // Check that the RegExp has been compiled (data contains a fixed array). |
4092 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset)); | 4086 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset)); |
4093 if (FLAG_debug_code) { | 4087 if (FLAG_debug_code) { |
4094 __ tst(regexp_data, Operand(kSmiTagMask)); | 4088 __ tst(regexp_data, Operand(kSmiTagMask)); |
4095 __ Check(ne, "Unexpected type for RegExp data, FixedArray expected"); | 4089 __ Check(ne, "Unexpected type for RegExp data, FixedArray expected"); |
4096 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE); | 4090 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE); |
4097 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected"); | 4091 __ Check(eq, "Unexpected type for RegExp data, FixedArray expected"); |
(...skipping 15 matching lines...) Expand all Loading... |
4113 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); | 4107 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); |
4114 __ add(r2, r2, Operand(2)); // r2 was a smi. | 4108 __ add(r2, r2, Operand(2)); // r2 was a smi. |
4115 // Check that the static offsets vector buffer is large enough. | 4109 // Check that the static offsets vector buffer is large enough. |
4116 __ cmp(r2, Operand(OffsetsVector::kStaticOffsetsVectorSize)); | 4110 __ cmp(r2, Operand(OffsetsVector::kStaticOffsetsVectorSize)); |
4117 __ b(hi, &runtime); | 4111 __ b(hi, &runtime); |
4118 | 4112 |
4119 // r2: Number of capture registers | 4113 // r2: Number of capture registers |
4120 // regexp_data: RegExp data (FixedArray) | 4114 // regexp_data: RegExp data (FixedArray) |
4121 // Check that the second argument is a string. | 4115 // Check that the second argument is a string. |
4122 __ ldr(subject, MemOperand(sp, kSubjectOffset)); | 4116 __ ldr(subject, MemOperand(sp, kSubjectOffset)); |
4123 __ tst(subject, Operand(kSmiTagMask)); | 4117 __ JumpIfSmi(subject, &runtime); |
4124 __ b(eq, &runtime); | |
4125 Condition is_string = masm->IsObjectStringType(subject, r0); | 4118 Condition is_string = masm->IsObjectStringType(subject, r0); |
4126 __ b(NegateCondition(is_string), &runtime); | 4119 __ b(NegateCondition(is_string), &runtime); |
4127 // Get the length of the string to r3. | 4120 // Get the length of the string to r3. |
4128 __ ldr(r3, FieldMemOperand(subject, String::kLengthOffset)); | 4121 __ ldr(r3, FieldMemOperand(subject, String::kLengthOffset)); |
4129 | 4122 |
4130 // r2: Number of capture registers | 4123 // r2: Number of capture registers |
4131 // r3: Length of subject string as a smi | 4124 // r3: Length of subject string as a smi |
4132 // subject: Subject string | 4125 // subject: Subject string |
4133 // regexp_data: RegExp data (FixedArray) | 4126 // regexp_data: RegExp data (FixedArray) |
4134 // Check that the third argument is a positive smi less than the subject | 4127 // Check that the third argument is a positive smi less than the subject |
4135 // string length. A negative value will be greater (unsigned comparison). | 4128 // string length. A negative value will be greater (unsigned comparison). |
4136 __ ldr(r0, MemOperand(sp, kPreviousIndexOffset)); | 4129 __ ldr(r0, MemOperand(sp, kPreviousIndexOffset)); |
4137 __ tst(r0, Operand(kSmiTagMask)); | 4130 __ JumpIfNotSmi(r0, &runtime); |
4138 __ b(ne, &runtime); | |
4139 __ cmp(r3, Operand(r0)); | 4131 __ cmp(r3, Operand(r0)); |
4140 __ b(ls, &runtime); | 4132 __ b(ls, &runtime); |
4141 | 4133 |
4142 // r2: Number of capture registers | 4134 // r2: Number of capture registers |
4143 // subject: Subject string | 4135 // subject: Subject string |
4144 // regexp_data: RegExp data (FixedArray) | 4136 // regexp_data: RegExp data (FixedArray) |
4145 // Check that the fourth object is a JSArray object. | 4137 // Check that the fourth object is a JSArray object. |
4146 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset)); | 4138 __ ldr(r0, MemOperand(sp, kLastMatchInfoOffset)); |
4147 __ tst(r0, Operand(kSmiTagMask)); | 4139 __ JumpIfSmi(r0, &runtime); |
4148 __ b(eq, &runtime); | |
4149 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); | 4140 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); |
4150 __ b(ne, &runtime); | 4141 __ b(ne, &runtime); |
4151 // Check that the JSArray is in fast case. | 4142 // Check that the JSArray is in fast case. |
4152 __ ldr(last_match_info_elements, | 4143 __ ldr(last_match_info_elements, |
4153 FieldMemOperand(r0, JSArray::kElementsOffset)); | 4144 FieldMemOperand(r0, JSArray::kElementsOffset)); |
4154 __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); | 4145 __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); |
4155 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); | 4146 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex); |
4156 __ cmp(r0, ip); | 4147 __ cmp(r0, ip); |
4157 __ b(ne, &runtime); | 4148 __ b(ne, &runtime); |
4158 // Check that the last match info has space for the capture registers and the | 4149 // Check that the last match info has space for the capture registers and the |
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4398 | 4389 |
4399 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { | 4390 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
4400 const int kMaxInlineLength = 100; | 4391 const int kMaxInlineLength = 100; |
4401 Label slowcase; | 4392 Label slowcase; |
4402 Label done; | 4393 Label done; |
4403 Factory* factory = masm->isolate()->factory(); | 4394 Factory* factory = masm->isolate()->factory(); |
4404 | 4395 |
4405 __ ldr(r1, MemOperand(sp, kPointerSize * 2)); | 4396 __ ldr(r1, MemOperand(sp, kPointerSize * 2)); |
4406 STATIC_ASSERT(kSmiTag == 0); | 4397 STATIC_ASSERT(kSmiTag == 0); |
4407 STATIC_ASSERT(kSmiTagSize == 1); | 4398 STATIC_ASSERT(kSmiTagSize == 1); |
4408 __ tst(r1, Operand(kSmiTagMask)); | 4399 __ JumpIfNotSmi(r1, &slowcase); |
4409 __ b(ne, &slowcase); | |
4410 __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength))); | 4400 __ cmp(r1, Operand(Smi::FromInt(kMaxInlineLength))); |
4411 __ b(hi, &slowcase); | 4401 __ b(hi, &slowcase); |
4412 // Smi-tagging is equivalent to multiplying by 2. | 4402 // Smi-tagging is equivalent to multiplying by 2. |
4413 // Allocate RegExpResult followed by FixedArray with size in ebx. | 4403 // Allocate RegExpResult followed by FixedArray with size in ebx. |
4414 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] | 4404 // JSArray: [Map][empty properties][Elements][Length-smi][index][input] |
4415 // Elements: [Map][Length][..elements..] | 4405 // Elements: [Map][Length][..elements..] |
4416 // Size of JSArray with two in-object properties and the header of a | 4406 // Size of JSArray with two in-object properties and the header of a |
4417 // FixedArray. | 4407 // FixedArray. |
4418 int objects_size = | 4408 int objects_size = |
4419 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize; | 4409 (JSRegExpResult::kSize + FixedArray::kHeaderSize) / kPointerSize; |
(...skipping 860 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5280 __ b(lt, &runtime); | 5270 __ b(lt, &runtime); |
5281 | 5271 |
5282 // r2: length | 5272 // r2: length |
5283 // r3: from index (untaged smi) | 5273 // r3: from index (untaged smi) |
5284 // r6 (a.k.a. to): to (smi) | 5274 // r6 (a.k.a. to): to (smi) |
5285 // r7 (a.k.a. from): from offset (smi) | 5275 // r7 (a.k.a. from): from offset (smi) |
5286 | 5276 |
5287 // Make sure first argument is a sequential (or flat) string. | 5277 // Make sure first argument is a sequential (or flat) string. |
5288 __ ldr(r5, MemOperand(sp, kStringOffset)); | 5278 __ ldr(r5, MemOperand(sp, kStringOffset)); |
5289 STATIC_ASSERT(kSmiTag == 0); | 5279 STATIC_ASSERT(kSmiTag == 0); |
5290 __ tst(r5, Operand(kSmiTagMask)); | 5280 __ JumpIfSmi(r5, &runtime); |
5291 __ b(eq, &runtime); | |
5292 Condition is_string = masm->IsObjectStringType(r5, r1); | 5281 Condition is_string = masm->IsObjectStringType(r5, r1); |
5293 __ b(NegateCondition(is_string), &runtime); | 5282 __ b(NegateCondition(is_string), &runtime); |
5294 | 5283 |
5295 // r1: instance type | 5284 // r1: instance type |
5296 // r2: length | 5285 // r2: length |
5297 // r3: from index (untagged smi) | 5286 // r3: from index (untagged smi) |
5298 // r5: string | 5287 // r5: string |
5299 // r6 (a.k.a. to): to (smi) | 5288 // r6 (a.k.a. to): to (smi) |
5300 // r7 (a.k.a. from): from offset (smi) | 5289 // r7 (a.k.a. from): from offset (smi) |
5301 Label seq_string; | 5290 Label seq_string; |
(...skipping 616 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5918 __ str(arg, MemOperand(sp, stack_offset)); | 5907 __ str(arg, MemOperand(sp, stack_offset)); |
5919 | 5908 |
5920 __ bind(&done); | 5909 __ bind(&done); |
5921 } | 5910 } |
5922 | 5911 |
5923 | 5912 |
5924 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 5913 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
5925 ASSERT(state_ == CompareIC::SMIS); | 5914 ASSERT(state_ == CompareIC::SMIS); |
5926 Label miss; | 5915 Label miss; |
5927 __ orr(r2, r1, r0); | 5916 __ orr(r2, r1, r0); |
5928 __ tst(r2, Operand(kSmiTagMask)); | 5917 __ JumpIfNotSmi(r2, &miss); |
5929 __ b(ne, &miss); | |
5930 | 5918 |
5931 if (GetCondition() == eq) { | 5919 if (GetCondition() == eq) { |
5932 // For equality we do not care about the sign of the result. | 5920 // For equality we do not care about the sign of the result. |
5933 __ sub(r0, r0, r1, SetCC); | 5921 __ sub(r0, r0, r1, SetCC); |
5934 } else { | 5922 } else { |
5935 // Untag before subtracting to avoid handling overflow. | 5923 // Untag before subtracting to avoid handling overflow. |
5936 __ SmiUntag(r1); | 5924 __ SmiUntag(r1); |
5937 __ sub(r0, r1, SmiUntagOperand(r0)); | 5925 __ sub(r0, r1, SmiUntagOperand(r0)); |
5938 } | 5926 } |
5939 __ Ret(); | 5927 __ Ret(); |
5940 | 5928 |
5941 __ bind(&miss); | 5929 __ bind(&miss); |
5942 GenerateMiss(masm); | 5930 GenerateMiss(masm); |
5943 } | 5931 } |
5944 | 5932 |
5945 | 5933 |
5946 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { | 5934 void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { |
5947 ASSERT(state_ == CompareIC::HEAP_NUMBERS); | 5935 ASSERT(state_ == CompareIC::HEAP_NUMBERS); |
5948 | 5936 |
5949 Label generic_stub; | 5937 Label generic_stub; |
5950 Label unordered; | 5938 Label unordered; |
5951 Label miss; | 5939 Label miss; |
5952 __ and_(r2, r1, Operand(r0)); | 5940 __ and_(r2, r1, Operand(r0)); |
5953 __ tst(r2, Operand(kSmiTagMask)); | 5941 __ JumpIfSmi(r2, &generic_stub); |
5954 __ b(eq, &generic_stub); | |
5955 | 5942 |
5956 __ CompareObjectType(r0, r2, r2, HEAP_NUMBER_TYPE); | 5943 __ CompareObjectType(r0, r2, r2, HEAP_NUMBER_TYPE); |
5957 __ b(ne, &miss); | 5944 __ b(ne, &miss); |
5958 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE); | 5945 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE); |
5959 __ b(ne, &miss); | 5946 __ b(ne, &miss); |
5960 | 5947 |
5961 // Inlining the double comparison and falling back to the general compare | 5948 // Inlining the double comparison and falling back to the general compare |
5962 // stub if NaN is involved or VFP3 is unsupported. | 5949 // stub if NaN is involved or VFP3 is unsupported. |
5963 if (CpuFeatures::IsSupported(VFP3)) { | 5950 if (CpuFeatures::IsSupported(VFP3)) { |
5964 CpuFeatures::Scope scope(VFP3); | 5951 CpuFeatures::Scope scope(VFP3); |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6093 | 6080 |
6094 __ bind(&miss); | 6081 __ bind(&miss); |
6095 GenerateMiss(masm); | 6082 GenerateMiss(masm); |
6096 } | 6083 } |
6097 | 6084 |
6098 | 6085 |
6099 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { | 6086 void ICCompareStub::GenerateObjects(MacroAssembler* masm) { |
6100 ASSERT(state_ == CompareIC::OBJECTS); | 6087 ASSERT(state_ == CompareIC::OBJECTS); |
6101 Label miss; | 6088 Label miss; |
6102 __ and_(r2, r1, Operand(r0)); | 6089 __ and_(r2, r1, Operand(r0)); |
6103 __ tst(r2, Operand(kSmiTagMask)); | 6090 __ JumpIfSmi(r2, &miss); |
6104 __ b(eq, &miss); | |
6105 | 6091 |
6106 __ CompareObjectType(r0, r2, r2, JS_OBJECT_TYPE); | 6092 __ CompareObjectType(r0, r2, r2, JS_OBJECT_TYPE); |
6107 __ b(ne, &miss); | 6093 __ b(ne, &miss); |
6108 __ CompareObjectType(r1, r2, r2, JS_OBJECT_TYPE); | 6094 __ CompareObjectType(r1, r2, r2, JS_OBJECT_TYPE); |
6109 __ b(ne, &miss); | 6095 __ b(ne, &miss); |
6110 | 6096 |
6111 ASSERT(GetCondition() == eq); | 6097 ASSERT(GetCondition() == eq); |
6112 __ sub(r0, r0, Operand(r1)); | 6098 __ sub(r0, r0, Operand(r1)); |
6113 __ Ret(); | 6099 __ Ret(); |
6114 | 6100 |
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6397 __ mov(result, Operand(0)); | 6383 __ mov(result, Operand(0)); |
6398 __ Ret(); | 6384 __ Ret(); |
6399 } | 6385 } |
6400 | 6386 |
6401 | 6387 |
6402 #undef __ | 6388 #undef __ |
6403 | 6389 |
6404 } } // namespace v8::internal | 6390 } } // namespace v8::internal |
6405 | 6391 |
6406 #endif // V8_TARGET_ARCH_ARM | 6392 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |