OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
514 static void GenerateCallFunction(MacroAssembler* masm, | 514 static void GenerateCallFunction(MacroAssembler* masm, |
515 Object* object, | 515 Object* object, |
516 const ParameterCount& arguments, | 516 const ParameterCount& arguments, |
517 Label* miss) { | 517 Label* miss) { |
518 // ----------- S t a t e ------------- | 518 // ----------- S t a t e ------------- |
519 // -- r0: receiver | 519 // -- r0: receiver |
520 // -- r1: function to call | 520 // -- r1: function to call |
521 // ----------------------------------- | 521 // ----------------------------------- |
522 | 522 |
523 // Check that the function really is a function. | 523 // Check that the function really is a function. |
524 __ BranchOnSmi(r1, miss); | 524 __ JumpIfSmi(r1, miss); |
525 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); | 525 __ CompareObjectType(r1, r3, r3, JS_FUNCTION_TYPE); |
526 __ b(ne, miss); | 526 __ b(ne, miss); |
527 | 527 |
528 // Patch the receiver on the stack with the global proxy if | 528 // Patch the receiver on the stack with the global proxy if |
529 // necessary. | 529 // necessary. |
530 if (object->IsGlobalObject()) { | 530 if (object->IsGlobalObject()) { |
531 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); | 531 __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); |
532 __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize)); | 532 __ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize)); |
533 } | 533 } |
534 | 534 |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
653 LookupResult* lookup, | 653 LookupResult* lookup, |
654 Register receiver, | 654 Register receiver, |
655 Register scratch1, | 655 Register scratch1, |
656 Register scratch2, | 656 Register scratch2, |
657 Register scratch3, | 657 Register scratch3, |
658 Label* miss) { | 658 Label* miss) { |
659 ASSERT(holder->HasNamedInterceptor()); | 659 ASSERT(holder->HasNamedInterceptor()); |
660 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); | 660 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); |
661 | 661 |
662 // Check that the receiver isn't a smi. | 662 // Check that the receiver isn't a smi. |
663 __ BranchOnSmi(receiver, miss); | 663 __ JumpIfSmi(receiver, miss); |
664 | 664 |
665 CallOptimization optimization(lookup); | 665 CallOptimization optimization(lookup); |
666 | 666 |
667 if (optimization.is_constant_call()) { | 667 if (optimization.is_constant_call()) { |
668 CompileCacheable(masm, | 668 CompileCacheable(masm, |
669 object, | 669 object, |
670 receiver, | 670 receiver, |
671 scratch1, | 671 scratch1, |
672 scratch2, | 672 scratch2, |
673 scratch3, | 673 scratch3, |
(...skipping 562 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1236 Register name_reg, | 1236 Register name_reg, |
1237 Register scratch1, | 1237 Register scratch1, |
1238 Register scratch2, | 1238 Register scratch2, |
1239 Register scratch3, | 1239 Register scratch3, |
1240 String* name, | 1240 String* name, |
1241 Label* miss) { | 1241 Label* miss) { |
1242 ASSERT(interceptor_holder->HasNamedInterceptor()); | 1242 ASSERT(interceptor_holder->HasNamedInterceptor()); |
1243 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined()); | 1243 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined()); |
1244 | 1244 |
1245 // Check that the receiver isn't a smi. | 1245 // Check that the receiver isn't a smi. |
1246 __ BranchOnSmi(receiver, miss); | 1246 __ JumpIfSmi(receiver, miss); |
1247 | 1247 |
1248 // So far the most popular follow ups for interceptor loads are FIELD | 1248 // So far the most popular follow ups for interceptor loads are FIELD |
1249 // and CALLBACKS, so inline only them, other cases may be added | 1249 // and CALLBACKS, so inline only them, other cases may be added |
1250 // later. | 1250 // later. |
1251 bool compile_followup_inline = false; | 1251 bool compile_followup_inline = false; |
1252 if (lookup->IsProperty() && lookup->IsCacheable()) { | 1252 if (lookup->IsProperty() && lookup->IsCacheable()) { |
1253 if (lookup->type() == FIELD) { | 1253 if (lookup->type() == FIELD) { |
1254 compile_followup_inline = true; | 1254 compile_followup_inline = true; |
1255 } else if (lookup->type() == CALLBACKS && | 1255 } else if (lookup->type() == CALLBACKS && |
1256 lookup->GetCallbackObject()->IsAccessorInfo() && | 1256 lookup->GetCallbackObject()->IsAccessorInfo() && |
(...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1504 | 1504 |
1505 GenerateNameCheck(name, &miss); | 1505 GenerateNameCheck(name, &miss); |
1506 | 1506 |
1507 Register receiver = r1; | 1507 Register receiver = r1; |
1508 | 1508 |
1509 // Get the receiver from the stack | 1509 // Get the receiver from the stack |
1510 const int argc = arguments().immediate(); | 1510 const int argc = arguments().immediate(); |
1511 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); | 1511 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); |
1512 | 1512 |
1513 // Check that the receiver isn't a smi. | 1513 // Check that the receiver isn't a smi. |
1514 __ BranchOnSmi(receiver, &miss); | 1514 __ JumpIfSmi(receiver, &miss); |
1515 | 1515 |
1516 // Check that the maps haven't changed. | 1516 // Check that the maps haven't changed. |
1517 CheckPrototypes(JSObject::cast(object), receiver, | 1517 CheckPrototypes(JSObject::cast(object), receiver, |
1518 holder, r3, r0, r4, name, &miss); | 1518 holder, r3, r0, r4, name, &miss); |
1519 | 1519 |
1520 if (argc == 0) { | 1520 if (argc == 0) { |
1521 // Nothing to do, just return the length. | 1521 // Nothing to do, just return the length. |
1522 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); | 1522 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset)); |
1523 __ Drop(argc + 1); | 1523 __ Drop(argc + 1); |
1524 __ Ret(); | 1524 __ Ret(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1558 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize)); | 1558 __ ldr(r4, MemOperand(sp, (argc - 1) * kPointerSize)); |
1559 // We may need a register containing the address end_elements below, | 1559 // We may need a register containing the address end_elements below, |
1560 // so write back the value in end_elements. | 1560 // so write back the value in end_elements. |
1561 __ add(end_elements, elements, | 1561 __ add(end_elements, elements, |
1562 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); | 1562 Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize)); |
1563 const int kEndElementsOffset = | 1563 const int kEndElementsOffset = |
1564 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; | 1564 FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize; |
1565 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex)); | 1565 __ str(r4, MemOperand(end_elements, kEndElementsOffset, PreIndex)); |
1566 | 1566 |
1567 // Check for a smi. | 1567 // Check for a smi. |
1568 __ BranchOnNotSmi(r4, &with_write_barrier); | 1568 __ JumpIfNotSmi(r4, &with_write_barrier); |
1569 __ bind(&exit); | 1569 __ bind(&exit); |
1570 __ Drop(argc + 1); | 1570 __ Drop(argc + 1); |
1571 __ Ret(); | 1571 __ Ret(); |
1572 | 1572 |
1573 __ bind(&with_write_barrier); | 1573 __ bind(&with_write_barrier); |
1574 __ InNewSpace(elements, r4, eq, &exit); | 1574 __ InNewSpace(elements, r4, eq, &exit); |
1575 __ RecordWriteHelper(elements, end_elements, r4); | 1575 __ RecordWriteHelper(elements, end_elements, r4); |
1576 __ Drop(argc + 1); | 1576 __ Drop(argc + 1); |
1577 __ Ret(); | 1577 __ Ret(); |
1578 | 1578 |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1665 Register receiver = r1; | 1665 Register receiver = r1; |
1666 Register elements = r3; | 1666 Register elements = r3; |
1667 | 1667 |
1668 GenerateNameCheck(name, &miss); | 1668 GenerateNameCheck(name, &miss); |
1669 | 1669 |
1670 // Get the receiver from the stack | 1670 // Get the receiver from the stack |
1671 const int argc = arguments().immediate(); | 1671 const int argc = arguments().immediate(); |
1672 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); | 1672 __ ldr(receiver, MemOperand(sp, argc * kPointerSize)); |
1673 | 1673 |
1674 // Check that the receiver isn't a smi. | 1674 // Check that the receiver isn't a smi. |
1675 __ BranchOnSmi(receiver, &miss); | 1675 __ JumpIfSmi(receiver, &miss); |
1676 | 1676 |
1677 // Check that the maps haven't changed. | 1677 // Check that the maps haven't changed. |
1678 CheckPrototypes(JSObject::cast(object), | 1678 CheckPrototypes(JSObject::cast(object), |
1679 receiver, holder, elements, r4, r0, name, &miss); | 1679 receiver, holder, elements, r4, r0, name, &miss); |
1680 | 1680 |
1681 // Get the elements array of the object. | 1681 // Get the elements array of the object. |
1682 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); | 1682 __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); |
1683 | 1683 |
1684 // Check that the elements are in fast mode and writable. | 1684 // Check that the elements are in fast mode and writable. |
1685 __ CheckMap(elements, r0, Heap::kFixedArrayMapRootIndex, &call_builtin, true); | 1685 __ CheckMap(elements, r0, Heap::kFixedArrayMapRootIndex, &call_builtin, true); |
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2002 // arguments, bail out to the regular call. | 2002 // arguments, bail out to the regular call. |
2003 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); | 2003 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); |
2004 | 2004 |
2005 Label miss, slow; | 2005 Label miss, slow; |
2006 GenerateNameCheck(name, &miss); | 2006 GenerateNameCheck(name, &miss); |
2007 | 2007 |
2008 if (cell == NULL) { | 2008 if (cell == NULL) { |
2009 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); | 2009 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); |
2010 | 2010 |
2011 STATIC_ASSERT(kSmiTag == 0); | 2011 STATIC_ASSERT(kSmiTag == 0); |
2012 __ BranchOnSmi(r1, &miss); | 2012 __ JumpIfSmi(r1, &miss); |
2013 | 2013 |
2014 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, | 2014 CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name, |
2015 &miss); | 2015 &miss); |
2016 } else { | 2016 } else { |
2017 ASSERT(cell->value() == function); | 2017 ASSERT(cell->value() == function); |
2018 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss); | 2018 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss); |
2019 GenerateLoadFunctionFromCell(cell, function, &miss); | 2019 GenerateLoadFunctionFromCell(cell, function, &miss); |
2020 } | 2020 } |
2021 | 2021 |
2022 // Load the (only) argument into r0. | 2022 // Load the (only) argument into r0. |
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2161 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss); | 2161 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss); |
2162 GenerateLoadFunctionFromCell(cell, function, &miss); | 2162 GenerateLoadFunctionFromCell(cell, function, &miss); |
2163 } | 2163 } |
2164 | 2164 |
2165 // Load the (only) argument into r0. | 2165 // Load the (only) argument into r0. |
2166 __ ldr(r0, MemOperand(sp, 0 * kPointerSize)); | 2166 __ ldr(r0, MemOperand(sp, 0 * kPointerSize)); |
2167 | 2167 |
2168 // Check if the argument is a smi. | 2168 // Check if the argument is a smi. |
2169 Label not_smi; | 2169 Label not_smi; |
2170 STATIC_ASSERT(kSmiTag == 0); | 2170 STATIC_ASSERT(kSmiTag == 0); |
2171 __ BranchOnNotSmi(r0, ¬_smi); | 2171 __ JumpIfNotSmi(r0, ¬_smi); |
2172 | 2172 |
2173 // Do bitwise not or do nothing depending on the sign of the | 2173 // Do bitwise not or do nothing depending on the sign of the |
2174 // argument. | 2174 // argument. |
2175 __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1)); | 2175 __ eor(r1, r0, Operand(r0, ASR, kBitsPerInt - 1)); |
2176 | 2176 |
2177 // Add 1 or do nothing depending on the sign of the argument. | 2177 // Add 1 or do nothing depending on the sign of the argument. |
2178 __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC); | 2178 __ sub(r0, r1, Operand(r0, ASR, kBitsPerInt - 1), SetCC); |
2179 | 2179 |
2180 // If the result is still negative, go to the slow case. | 2180 // If the result is still negative, go to the slow case. |
2181 // This only happens for the most negative smi. | 2181 // This only happens for the most negative smi. |
(...skipping 1172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3354 // -- lr : return address | 3354 // -- lr : return address |
3355 // -- r0 : key | 3355 // -- r0 : key |
3356 // -- r1 : receiver | 3356 // -- r1 : receiver |
3357 // ----------------------------------- | 3357 // ----------------------------------- |
3358 Label slow, failed_allocation; | 3358 Label slow, failed_allocation; |
3359 | 3359 |
3360 Register key = r0; | 3360 Register key = r0; |
3361 Register receiver = r1; | 3361 Register receiver = r1; |
3362 | 3362 |
3363 // Check that the object isn't a smi | 3363 // Check that the object isn't a smi |
3364 __ BranchOnSmi(receiver, &slow); | 3364 __ JumpIfSmi(receiver, &slow); |
3365 | 3365 |
3366 // Check that the key is a smi. | 3366 // Check that the key is a smi. |
3367 __ BranchOnNotSmi(key, &slow); | 3367 __ JumpIfNotSmi(key, &slow); |
3368 | 3368 |
3369 // Check that the object is a JS object. Load map into r2. | 3369 // Check that the object is a JS object. Load map into r2. |
3370 __ CompareObjectType(receiver, r2, r3, FIRST_JS_OBJECT_TYPE); | 3370 __ CompareObjectType(receiver, r2, r3, FIRST_JS_OBJECT_TYPE); |
3371 __ b(lt, &slow); | 3371 __ b(lt, &slow); |
3372 | 3372 |
3373 // Check that the receiver does not require access checks. We need | 3373 // Check that the receiver does not require access checks. We need |
3374 // to check this explicitly since this generic stub does not perform | 3374 // to check this explicitly since this generic stub does not perform |
3375 // map checks. | 3375 // map checks. |
3376 __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); | 3376 __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); |
3377 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); | 3377 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); |
(...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3638 // ----------------------------------- | 3638 // ----------------------------------- |
3639 Label slow, check_heap_number; | 3639 Label slow, check_heap_number; |
3640 | 3640 |
3641 // Register usage. | 3641 // Register usage. |
3642 Register value = r0; | 3642 Register value = r0; |
3643 Register key = r1; | 3643 Register key = r1; |
3644 Register receiver = r2; | 3644 Register receiver = r2; |
3645 // r3 mostly holds the elements array or the destination external array. | 3645 // r3 mostly holds the elements array or the destination external array. |
3646 | 3646 |
3647 // Check that the object isn't a smi. | 3647 // Check that the object isn't a smi. |
3648 __ BranchOnSmi(receiver, &slow); | 3648 __ JumpIfSmi(receiver, &slow); |
3649 | 3649 |
3650 // Check that the object is a JS object. Load map into r3. | 3650 // Check that the object is a JS object. Load map into r3. |
3651 __ CompareObjectType(receiver, r3, r4, FIRST_JS_OBJECT_TYPE); | 3651 __ CompareObjectType(receiver, r3, r4, FIRST_JS_OBJECT_TYPE); |
3652 __ b(le, &slow); | 3652 __ b(le, &slow); |
3653 | 3653 |
3654 // Check that the receiver does not require access checks. We need | 3654 // Check that the receiver does not require access checks. We need |
3655 // to do this because this generic stub does not perform map checks. | 3655 // to do this because this generic stub does not perform map checks. |
3656 __ ldrb(ip, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3656 __ ldrb(ip, FieldMemOperand(r3, Map::kBitFieldOffset)); |
3657 __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded)); | 3657 __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded)); |
3658 __ b(ne, &slow); | 3658 __ b(ne, &slow); |
3659 | 3659 |
3660 // Check that the key is a smi. | 3660 // Check that the key is a smi. |
3661 __ BranchOnNotSmi(key, &slow); | 3661 __ JumpIfNotSmi(key, &slow); |
3662 | 3662 |
3663 // Check that the elements array is the appropriate type of ExternalArray. | 3663 // Check that the elements array is the appropriate type of ExternalArray. |
3664 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); | 3664 __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset)); |
3665 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3665 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); |
3666 __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type)); | 3666 __ LoadRoot(ip, Heap::RootIndexForExternalArrayType(array_type)); |
3667 __ cmp(r4, ip); | 3667 __ cmp(r4, ip); |
3668 __ b(ne, &slow); | 3668 __ b(ne, &slow); |
3669 | 3669 |
3670 // Check that the index is in range. | 3670 // Check that the index is in range. |
3671 __ mov(r4, Operand(key, ASR, kSmiTagSize)); // Untag the index. | 3671 __ mov(r4, Operand(key, ASR, kSmiTagSize)); // Untag the index. |
3672 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset)); | 3672 __ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset)); |
3673 __ cmp(r4, ip); | 3673 __ cmp(r4, ip); |
3674 // Unsigned comparison catches both negative and too-large values. | 3674 // Unsigned comparison catches both negative and too-large values. |
3675 __ b(hs, &slow); | 3675 __ b(hs, &slow); |
3676 | 3676 |
3677 // Handle both smis and HeapNumbers in the fast path. Go to the | 3677 // Handle both smis and HeapNumbers in the fast path. Go to the |
3678 // runtime for all other kinds of values. | 3678 // runtime for all other kinds of values. |
3679 // r3: external array. | 3679 // r3: external array. |
3680 // r4: key (integer). | 3680 // r4: key (integer). |
3681 __ BranchOnNotSmi(value, &check_heap_number); | 3681 __ JumpIfNotSmi(value, &check_heap_number); |
3682 __ mov(r5, Operand(value, ASR, kSmiTagSize)); // Untag the value. | 3682 __ mov(r5, Operand(value, ASR, kSmiTagSize)); // Untag the value. |
3683 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); | 3683 __ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset)); |
3684 | 3684 |
3685 // r3: base pointer of external storage. | 3685 // r3: base pointer of external storage. |
3686 // r4: key (integer). | 3686 // r4: key (integer). |
3687 // r5: value (integer). | 3687 // r5: value (integer). |
3688 switch (array_type) { | 3688 switch (array_type) { |
3689 case kExternalByteArray: | 3689 case kExternalByteArray: |
3690 case kExternalUnsignedByteArray: | 3690 case kExternalUnsignedByteArray: |
3691 __ strb(r5, MemOperand(r3, r4, LSL, 0)); | 3691 __ strb(r5, MemOperand(r3, r4, LSL, 0)); |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3924 | 3924 |
3925 return GetCode(flags); | 3925 return GetCode(flags); |
3926 } | 3926 } |
3927 | 3927 |
3928 | 3928 |
3929 #undef __ | 3929 #undef __ |
3930 | 3930 |
3931 } } // namespace v8::internal | 3931 } } // namespace v8::internal |
3932 | 3932 |
3933 #endif // V8_TARGET_ARCH_ARM | 3933 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |