Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 6993057: Version 3.4.2 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/builtins-mips.cc ('k') | src/mips/constants-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 654 matching lines...) Expand 10 before | Expand all | Expand 10 after
665 // Save FCSR. 665 // Save FCSR.
666 __ cfc1(scratch1, FCSR); 666 __ cfc1(scratch1, FCSR);
667 // Disable FPU exceptions. 667 // Disable FPU exceptions.
668 __ ctc1(zero_reg, FCSR); 668 __ ctc1(zero_reg, FCSR);
669 __ trunc_w_d(single_scratch, double_dst); 669 __ trunc_w_d(single_scratch, double_dst);
670 // Retrieve FCSR. 670 // Retrieve FCSR.
671 __ cfc1(scratch2, FCSR); 671 __ cfc1(scratch2, FCSR);
672 // Restore FCSR. 672 // Restore FCSR.
673 __ ctc1(scratch1, FCSR); 673 __ ctc1(scratch1, FCSR);
674 674
675 // Check for inexact conversion. 675 // Check for inexact conversion or exception.
676 __ srl(scratch2, scratch2, kFCSRFlagShift); 676 __ And(scratch2, scratch2, kFCSRFlagMask);
677 __ And(scratch2, scratch2, (kFCSRFlagMask | kFCSRInexactFlagBit));
678 677
679 // Jump to not_int32 if the operation did not succeed. 678 // Jump to not_int32 if the operation did not succeed.
680 __ Branch(not_int32, ne, scratch2, Operand(zero_reg)); 679 __ Branch(not_int32, ne, scratch2, Operand(zero_reg));
681 680
682 if (destination == kCoreRegisters) { 681 if (destination == kCoreRegisters) {
683 __ Move(dst1, dst2, double_dst); 682 __ Move(dst1, dst2, double_dst);
684 } 683 }
685 684
686 } else { 685 } else {
687 ASSERT(!scratch1.is(object) && !scratch2.is(object)); 686 ASSERT(!scratch1.is(object) && !scratch2.is(object));
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
750 // Save FCSR. 749 // Save FCSR.
751 __ cfc1(scratch1, FCSR); 750 __ cfc1(scratch1, FCSR);
752 // Disable FPU exceptions. 751 // Disable FPU exceptions.
753 __ ctc1(zero_reg, FCSR); 752 __ ctc1(zero_reg, FCSR);
754 __ trunc_w_d(double_scratch, double_scratch); 753 __ trunc_w_d(double_scratch, double_scratch);
755 // Retrieve FCSR. 754 // Retrieve FCSR.
756 __ cfc1(scratch2, FCSR); 755 __ cfc1(scratch2, FCSR);
757 // Restore FCSR. 756 // Restore FCSR.
758 __ ctc1(scratch1, FCSR); 757 __ ctc1(scratch1, FCSR);
759 758
760 // Check for inexact conversion. 759 // Check for inexact conversion or exception.
761 __ srl(scratch2, scratch2, kFCSRFlagShift); 760 __ And(scratch2, scratch2, kFCSRFlagMask);
762 __ And(scratch2, scratch2, (kFCSRFlagMask | kFCSRInexactFlagBit));
763 761
764 // Jump to not_int32 if the operation did not succeed. 762 // Jump to not_int32 if the operation did not succeed.
765 __ Branch(not_int32, ne, scratch2, Operand(zero_reg)); 763 __ Branch(not_int32, ne, scratch2, Operand(zero_reg));
766 // Get the result in the destination register. 764 // Get the result in the destination register.
767 __ mfc1(dst, double_scratch); 765 __ mfc1(dst, double_scratch);
768 766
769 } else { 767 } else {
770 // Load the double value in the destination registers. 768 // Load the double value in the destination registers.
771 __ lw(scratch2, FieldMemOperand(object, HeapNumber::kExponentOffset)); 769 __ lw(scratch2, FieldMemOperand(object, HeapNumber::kExponentOffset));
772 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMantissaOffset)); 770 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMantissaOffset));
(...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after
978 // we now know they test equal. 976 // we now know they test equal.
979 if (cc != eq || !never_nan_nan) { 977 if (cc != eq || !never_nan_nan) {
980 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); 978 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask));
981 979
982 // Test for NaN. Sadly, we can't just compare to factory->nan_value(), 980 // Test for NaN. Sadly, we can't just compare to factory->nan_value(),
983 // so we do the second best thing - test it ourselves. 981 // so we do the second best thing - test it ourselves.
984 // They are both equal and they are not both Smis so both of them are not 982 // They are both equal and they are not both Smis so both of them are not
985 // Smis. If it's not a heap number, then return equal. 983 // Smis. If it's not a heap number, then return equal.
986 if (cc == less || cc == greater) { 984 if (cc == less || cc == greater) {
987 __ GetObjectType(a0, t4, t4); 985 __ GetObjectType(a0, t4, t4);
988 __ Branch(slow, greater, t4, Operand(FIRST_JS_OBJECT_TYPE)); 986 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE));
989 } else { 987 } else {
990 __ GetObjectType(a0, t4, t4); 988 __ GetObjectType(a0, t4, t4);
991 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE)); 989 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE));
992 // Comparing JS objects with <=, >= is complicated. 990 // Comparing JS objects with <=, >= is complicated.
993 if (cc != eq) { 991 if (cc != eq) {
994 __ Branch(slow, greater, t4, Operand(FIRST_JS_OBJECT_TYPE)); 992 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE));
995 // Normally here we fall through to return_equal, but undefined is 993 // Normally here we fall through to return_equal, but undefined is
996 // special: (undefined == undefined) == true, but 994 // special: (undefined == undefined) == true, but
997 // (undefined <= undefined) == false! See ECMAScript 11.8.5. 995 // (undefined <= undefined) == false! See ECMAScript 11.8.5.
998 if (cc == less_equal || cc == greater_equal) { 996 if (cc == less_equal || cc == greater_equal) {
999 __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE)); 997 __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE));
1000 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); 998 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1001 __ Branch(&return_equal, ne, a0, Operand(t2)); 999 __ Branch(&return_equal, ne, a0, Operand(t2));
1002 if (cc == le) { 1000 if (cc == le) {
1003 // undefined <= undefined should fail. 1001 // undefined <= undefined should fail.
1004 __ li(v0, Operand(GREATER)); 1002 __ li(v0, Operand(GREATER));
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after
1302 __ bind(&less_than); 1300 __ bind(&less_than);
1303 __ li(v0, Operand(LESS)); 1301 __ li(v0, Operand(LESS));
1304 __ Ret(); 1302 __ Ret();
1305 } 1303 }
1306 } 1304 }
1307 1305
1308 1306
1309 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 1307 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
1310 Register lhs, 1308 Register lhs,
1311 Register rhs) { 1309 Register rhs) {
1312 // If either operand is a JSObject or an oddball value, then they are 1310 // If either operand is a JS object or an oddball value, then they are
1313 // not equal since their pointers are different. 1311 // not equal since their pointers are different.
1314 // There is no test for undetectability in strict equality. 1312 // There is no test for undetectability in strict equality.
1315 STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 1313 STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
1316 Label first_non_object; 1314 Label first_non_object;
1317 // Get the type of the first operand into a2 and compare it with 1315 // Get the type of the first operand into a2 and compare it with
1318 // FIRST_JS_OBJECT_TYPE. 1316 // FIRST_SPEC_OBJECT_TYPE.
1319 __ GetObjectType(lhs, a2, a2); 1317 __ GetObjectType(lhs, a2, a2);
1320 __ Branch(&first_non_object, less, a2, Operand(FIRST_JS_OBJECT_TYPE)); 1318 __ Branch(&first_non_object, less, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
1321 1319
1322 // Return non-zero. 1320 // Return non-zero.
1323 Label return_not_equal; 1321 Label return_not_equal;
1324 __ bind(&return_not_equal); 1322 __ bind(&return_not_equal);
1325 __ li(v0, Operand(1)); 1323 __ li(v0, Operand(1));
1326 __ Ret(); 1324 __ Ret();
1327 1325
1328 __ bind(&first_non_object); 1326 __ bind(&first_non_object);
1329 // Check for oddballs: true, false, null, undefined. 1327 // Check for oddballs: true, false, null, undefined.
1330 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE)); 1328 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
1331 1329
1332 __ GetObjectType(rhs, a3, a3); 1330 __ GetObjectType(rhs, a3, a3);
1333 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_JS_OBJECT_TYPE)); 1331 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1334 1332
1335 // Check for oddballs: true, false, null, undefined. 1333 // Check for oddballs: true, false, null, undefined.
1336 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE)); 1334 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
1337 1335
1338 // Now that we have the types we might as well check for symbol-symbol. 1336 // Now that we have the types we might as well check for symbol-symbol.
1339 // Ensure that no non-strings have the symbol bit set. 1337 // Ensure that no non-strings have the symbol bit set.
1340 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask); 1338 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsSymbolMask);
1341 STATIC_ASSERT(kSymbolTag != 0); 1339 STATIC_ASSERT(kSymbolTag != 0);
1342 __ And(t2, a2, Operand(a3)); 1340 __ And(t2, a2, Operand(a3));
1343 __ And(t0, t2, Operand(kIsSymbolMask)); 1341 __ And(t0, t2, Operand(kIsSymbolMask));
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
1399 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE)); 1397 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE));
1400 __ And(at, a3, Operand(kIsSymbolMask)); 1398 __ And(at, a3, Operand(kIsSymbolMask));
1401 __ Branch(possible_strings, eq, at, Operand(zero_reg)); 1399 __ Branch(possible_strings, eq, at, Operand(zero_reg));
1402 1400
1403 // Both are symbols. We already checked they weren't the same pointer 1401 // Both are symbols. We already checked they weren't the same pointer
1404 // so they are not equal. 1402 // so they are not equal.
1405 __ li(v0, Operand(1)); // Non-zero indicates not equal. 1403 __ li(v0, Operand(1)); // Non-zero indicates not equal.
1406 __ Ret(); 1404 __ Ret();
1407 1405
1408 __ bind(&object_test); 1406 __ bind(&object_test);
1409 __ Branch(not_both_strings, lt, a2, Operand(FIRST_JS_OBJECT_TYPE)); 1407 __ Branch(not_both_strings, lt, a2, Operand(FIRST_SPEC_OBJECT_TYPE));
1410 __ GetObjectType(rhs, a2, a3); 1408 __ GetObjectType(rhs, a2, a3);
1411 __ Branch(not_both_strings, lt, a3, Operand(FIRST_JS_OBJECT_TYPE)); 1409 __ Branch(not_both_strings, lt, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1412 1410
1413 // If both objects are undetectable, they are equal. Otherwise, they 1411 // If both objects are undetectable, they are equal. Otherwise, they
1414 // are not equal, since they are different objects and an object is not 1412 // are not equal, since they are different objects and an object is not
1415 // equal to undefined. 1413 // equal to undefined.
1416 __ lw(a3, FieldMemOperand(lhs, HeapObject::kMapOffset)); 1414 __ lw(a3, FieldMemOperand(lhs, HeapObject::kMapOffset));
1417 __ lbu(a2, FieldMemOperand(a2, Map::kBitFieldOffset)); 1415 __ lbu(a2, FieldMemOperand(a2, Map::kBitFieldOffset));
1418 __ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset)); 1416 __ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset));
1419 __ and_(a0, a2, a3); 1417 __ and_(a0, a2, a3);
1420 __ And(a0, a0, Operand(1 << Map::kIsUndetectable)); 1418 __ And(a0, a0, Operand(1 << Map::kIsUndetectable));
1421 __ Xor(v0, a0, Operand(1 << Map::kIsUndetectable)); 1419 __ Xor(v0, a0, Operand(1 << Map::kIsUndetectable));
(...skipping 355 matching lines...) Expand 10 before | Expand all | Expand 10 after
1777 __ And(scratch0, scratch0, Operand(1 << Map::kIsUndetectable)); 1775 __ And(scratch0, scratch0, Operand(1 << Map::kIsUndetectable));
1778 __ Branch(&false_result, eq, scratch0, Operand(1 << Map::kIsUndetectable)); 1776 __ Branch(&false_result, eq, scratch0, Operand(1 << Map::kIsUndetectable));
1779 1777
1780 // JavaScript object => true. 1778 // JavaScript object => true.
1781 __ lw(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset)); 1779 __ lw(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset));
1782 __ lbu(scratch0, FieldMemOperand(scratch0, Map::kInstanceTypeOffset)); 1780 __ lbu(scratch0, FieldMemOperand(scratch0, Map::kInstanceTypeOffset));
1783 1781
1784 // "tos_" is a register and contains a non-zero value. 1782 // "tos_" is a register and contains a non-zero value.
1785 // Hence we implicitly return true if the greater than 1783 // Hence we implicitly return true if the greater than
1786 // condition is satisfied. 1784 // condition is satisfied.
1787 __ Ret(gt, scratch0, Operand(FIRST_JS_OBJECT_TYPE)); 1785 __ Ret(gt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
1788 1786
1789 // Check for string. 1787 // Check for string.
1790 __ lw(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset)); 1788 __ lw(scratch0, FieldMemOperand(tos_, HeapObject::kMapOffset));
1791 __ lbu(scratch0, FieldMemOperand(scratch0, Map::kInstanceTypeOffset)); 1789 __ lbu(scratch0, FieldMemOperand(scratch0, Map::kInstanceTypeOffset));
1792 // "tos_" is a register and contains a non-zero value. 1790 // "tos_" is a register and contains a non-zero value.
1793 // Hence we implicitly return true if the greater than 1791 // Hence we implicitly return true if the greater than
1794 // condition is satisfied. 1792 // condition is satisfied.
1795 __ Ret(gt, scratch0, Operand(FIRST_NONSTRING_TYPE)); 1793 __ Ret(gt, scratch0, Operand(FIRST_NONSTRING_TYPE));
1796 1794
1797 // String value => false iff empty, i.e., length is zero. 1795 // String value => false iff empty, i.e., length is zero.
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
1959 1957
1960 void UnaryOpStub::GenerateHeapNumberStubBitNot(MacroAssembler* masm) { 1958 void UnaryOpStub::GenerateHeapNumberStubBitNot(MacroAssembler* masm) {
1961 Label non_smi, slow; 1959 Label non_smi, slow;
1962 GenerateSmiCodeBitNot(masm, &non_smi); 1960 GenerateSmiCodeBitNot(masm, &non_smi);
1963 __ bind(&non_smi); 1961 __ bind(&non_smi);
1964 GenerateHeapNumberCodeBitNot(masm, &slow); 1962 GenerateHeapNumberCodeBitNot(masm, &slow);
1965 __ bind(&slow); 1963 __ bind(&slow);
1966 GenerateTypeTransition(masm); 1964 GenerateTypeTransition(masm);
1967 } 1965 }
1968 1966
1967
1969 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, 1968 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
1970 Label* slow) { 1969 Label* slow) {
1971 EmitCheckForHeapNumber(masm, a0, a1, t2, slow); 1970 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
1972 // a0 is a heap number. Get a new heap number in a1. 1971 // a0 is a heap number. Get a new heap number in a1.
1973 if (mode_ == UNARY_OVERWRITE) { 1972 if (mode_ == UNARY_OVERWRITE) {
1974 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); 1973 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1975 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign. 1974 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
1976 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); 1975 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1977 } else { 1976 } else {
1978 Label slow_allocate_heapnumber, heapnumber_allocated; 1977 Label slow_allocate_heapnumber, heapnumber_allocated;
(...skipping 791 matching lines...) Expand 10 before | Expand all | Expand 10 after
2770 // Save FCSR. 2769 // Save FCSR.
2771 __ cfc1(scratch1, FCSR); 2770 __ cfc1(scratch1, FCSR);
2772 // Disable FPU exceptions. 2771 // Disable FPU exceptions.
2773 __ ctc1(zero_reg, FCSR); 2772 __ ctc1(zero_reg, FCSR);
2774 __ trunc_w_d(single_scratch, f10); 2773 __ trunc_w_d(single_scratch, f10);
2775 // Retrieve FCSR. 2774 // Retrieve FCSR.
2776 __ cfc1(scratch2, FCSR); 2775 __ cfc1(scratch2, FCSR);
2777 // Restore FCSR. 2776 // Restore FCSR.
2778 __ ctc1(scratch1, FCSR); 2777 __ ctc1(scratch1, FCSR);
2779 2778
2780 // Check for inexact conversion. 2779 // Check for inexact conversion or exception.
2781 __ srl(scratch2, scratch2, kFCSRFlagShift);
2782 __ And(scratch2, scratch2, kFCSRFlagMask); 2780 __ And(scratch2, scratch2, kFCSRFlagMask);
2783 2781
2784 if (result_type_ <= BinaryOpIC::INT32) { 2782 if (result_type_ <= BinaryOpIC::INT32) {
2785 // If scratch2 != 0, result does not fit in a 32-bit integer. 2783 // If scratch2 != 0, result does not fit in a 32-bit integer.
2786 __ Branch(&transition, ne, scratch2, Operand(zero_reg)); 2784 __ Branch(&transition, ne, scratch2, Operand(zero_reg));
2787 } 2785 }
2788 2786
2789 // Check if the result fits in a smi. 2787 // Check if the result fits in a smi.
2790 __ mfc1(scratch1, single_scratch); 2788 __ mfc1(scratch1, single_scratch);
2791 __ Addu(scratch2, scratch1, Operand(0x40000000)); 2789 __ Addu(scratch2, scratch1, Operand(0x40000000));
(...skipping 1911 matching lines...) Expand 10 before | Expand all | Expand 10 after
4703 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); 4701 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
4704 4702
4705 // Fast-case: Invoke the function now. 4703 // Fast-case: Invoke the function now.
4706 // a1: pushed function 4704 // a1: pushed function
4707 ParameterCount actual(argc_); 4705 ParameterCount actual(argc_);
4708 4706
4709 if (ReceiverMightBeImplicit()) { 4707 if (ReceiverMightBeImplicit()) {
4710 Label call_as_function; 4708 Label call_as_function;
4711 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 4709 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
4712 __ Branch(&call_as_function, eq, t0, Operand(at)); 4710 __ Branch(&call_as_function, eq, t0, Operand(at));
4713 __ InvokeFunction(a1, actual, JUMP_FUNCTION); 4711 __ InvokeFunction(a1,
4712 actual,
4713 JUMP_FUNCTION,
4714 NullCallWrapper(),
4715 CALL_AS_METHOD);
4714 __ bind(&call_as_function); 4716 __ bind(&call_as_function);
4715 } 4717 }
4716 __ InvokeFunction(a1, 4718 __ InvokeFunction(a1,
4717 actual, 4719 actual,
4718 JUMP_FUNCTION, 4720 JUMP_FUNCTION,
4719 NullCallWrapper(), 4721 NullCallWrapper(),
4720 CALL_AS_FUNCTION); 4722 CALL_AS_FUNCTION);
4721 4723
4722 // Slow-case: Non-function called. 4724 // Slow-case: Non-function called.
4723 __ bind(&slow); 4725 __ bind(&slow);
(...skipping 1639 matching lines...) Expand 10 before | Expand all | Expand 10 after
6363 __ LeaveInternalFrame(); 6365 __ LeaveInternalFrame();
6364 // Compute the entry point of the rewritten stub. 6366 // Compute the entry point of the rewritten stub.
6365 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); 6367 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
6366 // Restore registers. 6368 // Restore registers.
6367 __ pop(ra); 6369 __ pop(ra);
6368 __ pop(a0); 6370 __ pop(a0);
6369 __ pop(a1); 6371 __ pop(a1);
6370 __ Jump(a2); 6372 __ Jump(a2);
6371 } 6373 }
6372 6374
6375
6373 void DirectCEntryStub::Generate(MacroAssembler* masm) { 6376 void DirectCEntryStub::Generate(MacroAssembler* masm) {
6374 // No need to pop or drop anything, LeaveExitFrame will restore the old 6377 // No need to pop or drop anything, LeaveExitFrame will restore the old
6375 // stack, thus dropping the allocated space for the return value. 6378 // stack, thus dropping the allocated space for the return value.
6376 // The saved ra is after the reserved stack space for the 4 args. 6379 // The saved ra is after the reserved stack space for the 4 args.
6377 __ lw(t9, MemOperand(sp, kCArgsSlotsSize)); 6380 __ lw(t9, MemOperand(sp, kCArgsSlotsSize));
6378 6381
6379 if (FLAG_debug_code && EnableSlowAsserts()) { 6382 if (FLAG_debug_code && EnableSlowAsserts()) {
6380 // In case of an error the return address may point to a memory area 6383 // In case of an error the return address may point to a memory area
6381 // filled with kZapValue by the GC. 6384 // filled with kZapValue by the GC.
6382 // Dereference the address and check for this. 6385 // Dereference the address and check for this.
6383 __ lw(t0, MemOperand(t9)); 6386 __ lw(t0, MemOperand(t9));
6384 __ Assert(ne, "Received invalid return address.", t0, 6387 __ Assert(ne, "Received invalid return address.", t0,
6385 Operand(reinterpret_cast<uint32_t>(kZapValue))); 6388 Operand(reinterpret_cast<uint32_t>(kZapValue)));
6386 } 6389 }
6387 __ Jump(t9); 6390 __ Jump(t9);
6388 } 6391 }
6389 6392
6390 6393
6391 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, 6394 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
6392 ExternalReference function) { 6395 ExternalReference function) {
6393 __ li(t9, Operand(function)); 6396 __ li(t9, Operand(function));
6394 this->GenerateCall(masm, t9); 6397 this->GenerateCall(masm, t9);
6395 } 6398 }
6396 6399
6400
6397 void DirectCEntryStub::GenerateCall(MacroAssembler* masm, 6401 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
6398 Register target) { 6402 Register target) {
6399 __ Move(t9, target); 6403 __ Move(t9, target);
6400 __ AssertStackIsAligned(); 6404 __ AssertStackIsAligned();
6401 // Allocate space for arg slots. 6405 // Allocate space for arg slots.
6402 __ Subu(sp, sp, kCArgsSlotsSize); 6406 __ Subu(sp, sp, kCArgsSlotsSize);
6403 6407
6404 // Block the trampoline pool through the whole function to make sure the 6408 // Block the trampoline pool through the whole function to make sure the
6405 // number of generated instructions is constant. 6409 // number of generated instructions is constant.
6406 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm); 6410 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
(...skipping 258 matching lines...) Expand 10 before | Expand all | Expand 10 after
6665 __ mov(result, zero_reg); 6669 __ mov(result, zero_reg);
6666 __ Ret(); 6670 __ Ret();
6667 } 6671 }
6668 6672
6669 6673
6670 #undef __ 6674 #undef __
6671 6675
6672 } } // namespace v8::internal 6676 } } // namespace v8::internal
6673 6677
6674 #endif // V8_TARGET_ARCH_MIPS 6678 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/builtins-mips.cc ('k') | src/mips/constants-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698