| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2926 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2937 // respect to register content between debug and release mode. | 2937 // respect to register content between debug and release mode. |
| 2938 lw(t9, MemOperand(topaddr)); | 2938 lw(t9, MemOperand(topaddr)); |
| 2939 Check(eq, kUnexpectedAllocationTop, result, Operand(t9)); | 2939 Check(eq, kUnexpectedAllocationTop, result, Operand(t9)); |
| 2940 } | 2940 } |
| 2941 // Load allocation limit into t9. Result already contains allocation top. | 2941 // Load allocation limit into t9. Result already contains allocation top. |
| 2942 lw(t9, MemOperand(topaddr, limit - top)); | 2942 lw(t9, MemOperand(topaddr, limit - top)); |
| 2943 } | 2943 } |
| 2944 | 2944 |
| 2945 if ((flags & DOUBLE_ALIGNMENT) != 0) { | 2945 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
| 2946 // Align the next allocation. Storing the filler map without checking top is | 2946 // Align the next allocation. Storing the filler map without checking top is |
| 2947 // always safe because the limit of the heap is always aligned. | 2947 // safe in new-space because the limit of the heap is aligned there. |
| 2948 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0); | 2948 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0); |
| 2949 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | 2949 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); |
| 2950 And(scratch2, result, Operand(kDoubleAlignmentMask)); | 2950 And(scratch2, result, Operand(kDoubleAlignmentMask)); |
| 2951 Label aligned; | 2951 Label aligned; |
| 2952 Branch(&aligned, eq, scratch2, Operand(zero_reg)); | 2952 Branch(&aligned, eq, scratch2, Operand(zero_reg)); |
| 2953 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) { |
| 2954 Branch(gc_required, Ugreater_equal, result, Operand(t9)); |
| 2955 } |
| 2953 li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); | 2956 li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); |
| 2954 sw(scratch2, MemOperand(result)); | 2957 sw(scratch2, MemOperand(result)); |
| 2955 Addu(result, result, Operand(kDoubleSize / 2)); | 2958 Addu(result, result, Operand(kDoubleSize / 2)); |
| 2956 bind(&aligned); | 2959 bind(&aligned); |
| 2957 } | 2960 } |
| 2958 | 2961 |
| 2959 // Calculate new top and bail out if new space is exhausted. Use result | 2962 // Calculate new top and bail out if new space is exhausted. Use result |
| 2960 // to calculate the new top. | 2963 // to calculate the new top. |
| 2961 Addu(scratch2, result, Operand(object_size)); | 2964 Addu(scratch2, result, Operand(object_size)); |
| 2962 Branch(gc_required, Ugreater, scratch2, Operand(t9)); | 2965 Branch(gc_required, Ugreater, scratch2, Operand(t9)); |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3021 // respect to register content between debug and release mode. | 3024 // respect to register content between debug and release mode. |
| 3022 lw(t9, MemOperand(topaddr)); | 3025 lw(t9, MemOperand(topaddr)); |
| 3023 Check(eq, kUnexpectedAllocationTop, result, Operand(t9)); | 3026 Check(eq, kUnexpectedAllocationTop, result, Operand(t9)); |
| 3024 } | 3027 } |
| 3025 // Load allocation limit into t9. Result already contains allocation top. | 3028 // Load allocation limit into t9. Result already contains allocation top. |
| 3026 lw(t9, MemOperand(topaddr, limit - top)); | 3029 lw(t9, MemOperand(topaddr, limit - top)); |
| 3027 } | 3030 } |
| 3028 | 3031 |
| 3029 if ((flags & DOUBLE_ALIGNMENT) != 0) { | 3032 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
| 3030 // Align the next allocation. Storing the filler map without checking top is | 3033 // Align the next allocation. Storing the filler map without checking top is |
| 3031 // always safe because the limit of the heap is always aligned. | 3034 // safe in new-space because the limit of the heap is aligned there. |
| 3032 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0); | 3035 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0); |
| 3033 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | 3036 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); |
| 3034 And(scratch2, result, Operand(kDoubleAlignmentMask)); | 3037 And(scratch2, result, Operand(kDoubleAlignmentMask)); |
| 3035 Label aligned; | 3038 Label aligned; |
| 3036 Branch(&aligned, eq, scratch2, Operand(zero_reg)); | 3039 Branch(&aligned, eq, scratch2, Operand(zero_reg)); |
| 3040 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) { |
| 3041 Branch(gc_required, Ugreater_equal, result, Operand(t9)); |
| 3042 } |
| 3037 li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); | 3043 li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); |
| 3038 sw(scratch2, MemOperand(result)); | 3044 sw(scratch2, MemOperand(result)); |
| 3039 Addu(result, result, Operand(kDoubleSize / 2)); | 3045 Addu(result, result, Operand(kDoubleSize / 2)); |
| 3040 bind(&aligned); | 3046 bind(&aligned); |
| 3041 } | 3047 } |
| 3042 | 3048 |
| 3043 // Calculate new top and bail out if new space is exhausted. Use result | 3049 // Calculate new top and bail out if new space is exhausted. Use result |
| 3044 // to calculate the new top. Object size may be in words so a shift is | 3050 // to calculate the new top. Object size may be in words so a shift is |
| 3045 // required to get the number of bytes. | 3051 // required to get the number of bytes. |
| 3046 if ((flags & SIZE_IN_WORDS) != 0) { | 3052 if ((flags & SIZE_IN_WORDS) != 0) { |
| (...skipping 2548 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5595 opcode == BGTZL); | 5601 opcode == BGTZL); |
| 5596 opcode = (cond == eq) ? BEQ : BNE; | 5602 opcode = (cond == eq) ? BEQ : BNE; |
| 5597 instr = (instr & ~kOpcodeMask) | opcode; | 5603 instr = (instr & ~kOpcodeMask) | opcode; |
| 5598 masm_.emit(instr); | 5604 masm_.emit(instr); |
| 5599 } | 5605 } |
| 5600 | 5606 |
| 5601 | 5607 |
| 5602 } } // namespace v8::internal | 5608 } } // namespace v8::internal |
| 5603 | 5609 |
| 5604 #endif // V8_TARGET_ARCH_MIPS | 5610 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |