OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/division-by-constant.h" | 10 #include "src/base/division-by-constant.h" |
(...skipping 4067 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4078 } | 4078 } |
4079 | 4079 |
4080 | 4080 |
4081 void MacroAssembler::Allocate(int object_size, | 4081 void MacroAssembler::Allocate(int object_size, |
4082 Register result, | 4082 Register result, |
4083 Register scratch1, | 4083 Register scratch1, |
4084 Register scratch2, | 4084 Register scratch2, |
4085 Label* gc_required, | 4085 Label* gc_required, |
4086 AllocationFlags flags) { | 4086 AllocationFlags flags) { |
4087 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | 4087 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); |
4088 DCHECK((flags & ALLOCATION_FOLDED) == 0); | |
4088 if (!FLAG_inline_new) { | 4089 if (!FLAG_inline_new) { |
4089 if (emit_debug_code()) { | 4090 if (emit_debug_code()) { |
4090 // Trash the registers to simulate an allocation failure. | 4091 // Trash the registers to simulate an allocation failure. |
4091 li(result, 0x7091); | 4092 li(result, 0x7091); |
4092 li(scratch1, 0x7191); | 4093 li(scratch1, 0x7191); |
4093 li(scratch2, 0x7291); | 4094 li(scratch2, 0x7291); |
4094 } | 4095 } |
4095 jmp(gc_required); | 4096 jmp(gc_required); |
4096 return; | 4097 return; |
4097 } | 4098 } |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4130 } else { | 4131 } else { |
4131 if (emit_debug_code()) { | 4132 if (emit_debug_code()) { |
4132 // Assert that result actually contains top on entry. | 4133 // Assert that result actually contains top on entry. |
4133 lw(alloc_limit, MemOperand(top_address)); | 4134 lw(alloc_limit, MemOperand(top_address)); |
4134 Check(eq, kUnexpectedAllocationTop, result, Operand(alloc_limit)); | 4135 Check(eq, kUnexpectedAllocationTop, result, Operand(alloc_limit)); |
4135 } | 4136 } |
4136 // Load allocation limit. Result already contains allocation top. | 4137 // Load allocation limit. Result already contains allocation top. |
4137 lw(alloc_limit, MemOperand(top_address, limit - top)); | 4138 lw(alloc_limit, MemOperand(top_address, limit - top)); |
4138 } | 4139 } |
4139 | 4140 |
4140 if ((flags & DOUBLE_ALIGNMENT) != 0) { | 4141 if (((flags & ALLOCATION_FOLDED) == 0) && ((flags & DOUBLE_ALIGNMENT) != 0)) { |
Michael Lippautz
2016/05/10 09:04:10
Same as w/ other platforms: Remove ALLOCATION_FOLD
Hannes Payer (out of office)
2016/05/10 09:10:59
Done.
| |
4141 // Align the next allocation. Storing the filler map without checking top is | 4142 // Align the next allocation. Storing the filler map without checking top is |
4142 // safe in new-space because the limit of the heap is aligned there. | 4143 // safe in new-space because the limit of the heap is aligned there. |
4143 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); | 4144 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); |
4144 And(result_end, result, Operand(kDoubleAlignmentMask)); | 4145 And(result_end, result, Operand(kDoubleAlignmentMask)); |
4145 Label aligned; | 4146 Label aligned; |
4146 Branch(&aligned, eq, result_end, Operand(zero_reg)); | 4147 Branch(&aligned, eq, result_end, Operand(zero_reg)); |
4147 if ((flags & PRETENURE) != 0) { | 4148 if ((flags & PRETENURE) != 0) { |
4148 Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit)); | 4149 Branch(gc_required, Ugreater_equal, result, Operand(alloc_limit)); |
4149 } | 4150 } |
4150 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map())); | 4151 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map())); |
4151 sw(result_end, MemOperand(result)); | 4152 sw(result_end, MemOperand(result)); |
4152 Addu(result, result, Operand(kDoubleSize / 2)); | 4153 Addu(result, result, Operand(kDoubleSize / 2)); |
4153 bind(&aligned); | 4154 bind(&aligned); |
4154 } | 4155 } |
4155 | 4156 |
4156 // Calculate new top and bail out if new space is exhausted. Use result | 4157 // Calculate new top and bail out if new space is exhausted. Use result |
4157 // to calculate the new top. | 4158 // to calculate the new top. |
4158 Addu(result_end, result, Operand(object_size)); | 4159 Addu(result_end, result, Operand(object_size)); |
4159 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit)); | 4160 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit)); |
4160 sw(result_end, MemOperand(top_address)); | 4161 |
4162 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) { | |
4163 // The top pointer is not updated for allocation folding dominators. | |
4164 sw(result_end, MemOperand(top_address)); | |
4165 } | |
4161 | 4166 |
4162 // Tag object. | 4167 // Tag object. |
4163 Addu(result, result, Operand(kHeapObjectTag)); | 4168 Addu(result, result, Operand(kHeapObjectTag)); |
4164 } | 4169 } |
4165 | 4170 |
4166 | 4171 |
4167 void MacroAssembler::Allocate(Register object_size, Register result, | 4172 void MacroAssembler::Allocate(Register object_size, Register result, |
4168 Register result_end, Register scratch, | 4173 Register result_end, Register scratch, |
4169 Label* gc_required, AllocationFlags flags) { | 4174 Label* gc_required, AllocationFlags flags) { |
4175 DCHECK((flags & ALLOCATION_FOLDED) == 0); | |
4170 if (!FLAG_inline_new) { | 4176 if (!FLAG_inline_new) { |
4171 if (emit_debug_code()) { | 4177 if (emit_debug_code()) { |
4172 // Trash the registers to simulate an allocation failure. | 4178 // Trash the registers to simulate an allocation failure. |
4173 li(result, 0x7091); | 4179 li(result, 0x7091); |
4174 li(scratch, 0x7191); | 4180 li(scratch, 0x7191); |
4175 li(result_end, 0x7291); | 4181 li(result_end, 0x7291); |
4176 } | 4182 } |
4177 jmp(gc_required); | 4183 jmp(gc_required); |
4178 return; | 4184 return; |
4179 } | 4185 } |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4232 } | 4238 } |
4233 | 4239 |
4234 // Calculate new top and bail out if new space is exhausted. Use result | 4240 // Calculate new top and bail out if new space is exhausted. Use result |
4235 // to calculate the new top. Object size may be in words so a shift is | 4241 // to calculate the new top. Object size may be in words so a shift is |
4236 // required to get the number of bytes. | 4242 // required to get the number of bytes. |
4237 if ((flags & SIZE_IN_WORDS) != 0) { | 4243 if ((flags & SIZE_IN_WORDS) != 0) { |
4238 Lsa(result_end, result, object_size, kPointerSizeLog2); | 4244 Lsa(result_end, result, object_size, kPointerSizeLog2); |
4239 } else { | 4245 } else { |
4240 Addu(result_end, result, Operand(object_size)); | 4246 Addu(result_end, result, Operand(object_size)); |
4241 } | 4247 } |
4248 | |
4242 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit)); | 4249 Branch(gc_required, Ugreater, result_end, Operand(alloc_limit)); |
4243 | 4250 |
4244 // Update allocation top. result temporarily holds the new top. | 4251 // Update allocation top. result temporarily holds the new top. |
4245 if (emit_debug_code()) { | 4252 if (emit_debug_code()) { |
4246 And(alloc_limit, result_end, Operand(kObjectAlignmentMask)); | 4253 And(alloc_limit, result_end, Operand(kObjectAlignmentMask)); |
4247 Check(eq, kUnalignedAllocationInNewSpace, alloc_limit, Operand(zero_reg)); | 4254 Check(eq, kUnalignedAllocationInNewSpace, alloc_limit, Operand(zero_reg)); |
4248 } | 4255 } |
4249 sw(result_end, MemOperand(top_address)); | 4256 |
4257 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) { | |
4258 // The top pointer is not updated for allocation folding dominators. | |
4259 sw(result_end, MemOperand(top_address)); | |
4260 } | |
4250 | 4261 |
4251 // Tag object. | 4262 // Tag object. |
4252 Addu(result, result, Operand(kHeapObjectTag)); | 4263 Addu(result, result, Operand(kHeapObjectTag)); |
4253 } | 4264 } |
4254 | 4265 |
4266 void MacroAssembler::FastAllocate(int object_size, Register result, | |
4267 Register scratch1, Register scratch2, | |
4268 AllocationFlags flags) { | |
4269 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); | |
4270 DCHECK(!AreAliased(result, scratch1, scratch2, t9, at)); | |
4271 | |
4272 // Make object size into bytes. | |
4273 if ((flags & SIZE_IN_WORDS) != 0) { | |
4274 object_size *= kPointerSize; | |
4275 } | |
4276 DCHECK_EQ(0, object_size & kObjectAlignmentMask); | |
4277 | |
4278 ExternalReference allocation_top = | |
4279 AllocationUtils::GetAllocationTopReference(isolate(), flags); | |
4280 | |
4281 // Set up allocation top address and allocation limit registers. | |
4282 Register top_address = scratch1; | |
4283 // This code stores a temporary value in t9. | |
4284 Register result_end = scratch2; | |
4285 li(top_address, Operand(allocation_top)); | |
4286 lw(result, MemOperand(top_address)); | |
4287 | |
4288 if ((flags & DOUBLE_ALIGNMENT) != 0) { | |
4289 // Align the next allocation. Storing the filler map without checking top is | |
4290 // safe in new-space because the limit of the heap is aligned there. | |
4291 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); | |
4292 And(result_end, result, Operand(kDoubleAlignmentMask)); | |
4293 Label aligned; | |
4294 Branch(&aligned, eq, result_end, Operand(zero_reg)); | |
4295 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map())); | |
4296 sw(result_end, MemOperand(result)); | |
4297 Addu(result, result, Operand(kDoubleSize / 2)); | |
4298 bind(&aligned); | |
4299 } | |
4300 | |
4301 Addu(result_end, result, Operand(object_size)); | |
4302 | |
4303 // The top pointer is not updated for allocation folding dominators. | |
4304 sw(result_end, MemOperand(top_address)); | |
4305 | |
4306 Addu(result, result, Operand(kHeapObjectTag)); | |
4307 } | |
4308 | |
4309 void MacroAssembler::FastAllocate(Register object_size, Register result, | |
4310 Register result_end, Register scratch, | |
4311 AllocationFlags flags) { | |
4312 // |object_size| and |result_end| may overlap if the DOUBLE_ALIGNMENT flag | |
4313 // is not specified. Other registers must not overlap. | |
4314 DCHECK(!AreAliased(object_size, result, scratch, t9, at)); | |
4315 DCHECK(!AreAliased(result_end, result, scratch, t9, at)); | |
4316 DCHECK((flags & DOUBLE_ALIGNMENT) == 0 || !object_size.is(result_end)); | |
4317 | |
4318 ExternalReference allocation_top = | |
4319 AllocationUtils::GetAllocationTopReference(isolate(), flags); | |
4320 | |
4321 // Set up allocation top address and allocation limit registers. | |
4322 Register top_address = scratch; | |
4323 // This code stores a temporary value in t9. | |
4324 li(top_address, Operand(allocation_top)); | |
4325 lw(result, MemOperand(top_address)); | |
4326 | |
4327 if ((flags & DOUBLE_ALIGNMENT) != 0) { | |
4328 // Align the next allocation. Storing the filler map without checking top is | |
4329 // safe in new-space because the limit of the heap is aligned there. | |
4330 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); | |
4331 And(result_end, result, Operand(kDoubleAlignmentMask)); | |
4332 Label aligned; | |
4333 Branch(&aligned, eq, result_end, Operand(zero_reg)); | |
4334 li(result_end, Operand(isolate()->factory()->one_pointer_filler_map())); | |
4335 sw(result_end, MemOperand(result)); | |
4336 Addu(result, result, Operand(kDoubleSize / 2)); | |
4337 bind(&aligned); | |
4338 } | |
4339 | |
4340 // Calculate new top and bail out if new space is exhausted. Use result | |
4341 // to calculate the new top. Object size may be in words so a shift is | |
4342 // required to get the number of bytes. | |
4343 if ((flags & SIZE_IN_WORDS) != 0) { | |
4344 Lsa(result_end, result, object_size, kPointerSizeLog2); | |
4345 } else { | |
4346 Addu(result_end, result, Operand(object_size)); | |
4347 } | |
4348 | |
4349 // The top pointer is not updated for allocation folding dominators. | |
4350 sw(result_end, MemOperand(top_address)); | |
4351 | |
4352 Addu(result, result, Operand(kHeapObjectTag)); | |
4353 } | |
4255 | 4354 |
4256 void MacroAssembler::AllocateTwoByteString(Register result, | 4355 void MacroAssembler::AllocateTwoByteString(Register result, |
4257 Register length, | 4356 Register length, |
4258 Register scratch1, | 4357 Register scratch1, |
4259 Register scratch2, | 4358 Register scratch2, |
4260 Register scratch3, | 4359 Register scratch3, |
4261 Label* gc_required) { | 4360 Label* gc_required) { |
4262 // Calculate the number of bytes needed for the characters in the string while | 4361 // Calculate the number of bytes needed for the characters in the string while |
4263 // observing object alignment. | 4362 // observing object alignment. |
4264 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); | 4363 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); |
(...skipping 2429 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6694 if (mag.shift > 0) sra(result, result, mag.shift); | 6793 if (mag.shift > 0) sra(result, result, mag.shift); |
6695 srl(at, dividend, 31); | 6794 srl(at, dividend, 31); |
6696 Addu(result, result, Operand(at)); | 6795 Addu(result, result, Operand(at)); |
6697 } | 6796 } |
6698 | 6797 |
6699 | 6798 |
6700 } // namespace internal | 6799 } // namespace internal |
6701 } // namespace v8 | 6800 } // namespace v8 |
6702 | 6801 |
6703 #endif // V8_TARGET_ARCH_MIPS | 6802 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |