OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 4063 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4074 | 4074 |
4075 | 4075 |
4076 void MacroAssembler::Allocate(int object_size, | 4076 void MacroAssembler::Allocate(int object_size, |
4077 Register result, | 4077 Register result, |
4078 Register result_end, | 4078 Register result_end, |
4079 Register scratch, | 4079 Register scratch, |
4080 Label* gc_required, | 4080 Label* gc_required, |
4081 AllocationFlags flags) { | 4081 AllocationFlags flags) { |
4082 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); | 4082 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); |
4083 ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize); | 4083 ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize); |
4084 if (!FLAG_inline_new) { | 4084 if (!FLAG_inline_new || |
| 4085 // TODO(mstarzinger): Implement more efficiently by keeping then |
| 4086 // bump-pointer allocation area empty instead of recompiling code. |
| 4087 isolate()->heap_profiler()->is_tracking_allocations()) { |
4085 if (emit_debug_code()) { | 4088 if (emit_debug_code()) { |
4086 // Trash the registers to simulate an allocation failure. | 4089 // Trash the registers to simulate an allocation failure. |
4087 movl(result, Immediate(0x7091)); | 4090 movl(result, Immediate(0x7091)); |
4088 if (result_end.is_valid()) { | 4091 if (result_end.is_valid()) { |
4089 movl(result_end, Immediate(0x7191)); | 4092 movl(result_end, Immediate(0x7191)); |
4090 } | 4093 } |
4091 if (scratch.is_valid()) { | 4094 if (scratch.is_valid()) { |
4092 movl(scratch, Immediate(0x7291)); | 4095 movl(scratch, Immediate(0x7291)); |
4093 } | 4096 } |
4094 } | 4097 } |
4095 jmp(gc_required); | 4098 jmp(gc_required); |
4096 return; | 4099 return; |
4097 } | 4100 } |
4098 ASSERT(!result.is(result_end)); | 4101 ASSERT(!result.is(result_end)); |
4099 | 4102 |
4100 // Load address of new object into result. | 4103 // Load address of new object into result. |
4101 LoadAllocationTopHelper(result, scratch, flags); | 4104 LoadAllocationTopHelper(result, scratch, flags); |
4102 | 4105 |
4103 if (isolate()->heap_profiler()->is_tracking_allocations()) { | |
4104 RecordObjectAllocation(isolate(), result, object_size); | |
4105 } | |
4106 | |
4107 // Align the next allocation. Storing the filler map without checking top is | 4106 // Align the next allocation. Storing the filler map without checking top is |
4108 // safe in new-space because the limit of the heap is aligned there. | 4107 // safe in new-space because the limit of the heap is aligned there. |
4109 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 4108 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
4110 testq(result, Immediate(kDoubleAlignmentMask)); | 4109 testq(result, Immediate(kDoubleAlignmentMask)); |
4111 Check(zero, kAllocationIsNotDoubleAligned); | 4110 Check(zero, kAllocationIsNotDoubleAligned); |
4112 } | 4111 } |
4113 | 4112 |
4114 // Calculate new top and bail out if new space is exhausted. | 4113 // Calculate new top and bail out if new space is exhausted. |
4115 ExternalReference allocation_limit = | 4114 ExternalReference allocation_limit = |
4116 AllocationUtils::GetAllocationLimitReference(isolate(), flags); | 4115 AllocationUtils::GetAllocationLimitReference(isolate(), flags); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4158 } | 4157 } |
4159 | 4158 |
4160 | 4159 |
4161 void MacroAssembler::Allocate(Register object_size, | 4160 void MacroAssembler::Allocate(Register object_size, |
4162 Register result, | 4161 Register result, |
4163 Register result_end, | 4162 Register result_end, |
4164 Register scratch, | 4163 Register scratch, |
4165 Label* gc_required, | 4164 Label* gc_required, |
4166 AllocationFlags flags) { | 4165 AllocationFlags flags) { |
4167 ASSERT((flags & SIZE_IN_WORDS) == 0); | 4166 ASSERT((flags & SIZE_IN_WORDS) == 0); |
4168 if (!FLAG_inline_new) { | 4167 if (!FLAG_inline_new || |
| 4168 // TODO(mstarzinger): Implement more efficiently by keeping then |
| 4169 // bump-pointer allocation area empty instead of recompiling code. |
| 4170 isolate()->heap_profiler()->is_tracking_allocations()) { |
4169 if (emit_debug_code()) { | 4171 if (emit_debug_code()) { |
4170 // Trash the registers to simulate an allocation failure. | 4172 // Trash the registers to simulate an allocation failure. |
4171 movl(result, Immediate(0x7091)); | 4173 movl(result, Immediate(0x7091)); |
4172 movl(result_end, Immediate(0x7191)); | 4174 movl(result_end, Immediate(0x7191)); |
4173 if (scratch.is_valid()) { | 4175 if (scratch.is_valid()) { |
4174 movl(scratch, Immediate(0x7291)); | 4176 movl(scratch, Immediate(0x7291)); |
4175 } | 4177 } |
4176 // object_size is left unchanged by this function. | 4178 // object_size is left unchanged by this function. |
4177 } | 4179 } |
4178 jmp(gc_required); | 4180 jmp(gc_required); |
4179 return; | 4181 return; |
4180 } | 4182 } |
4181 ASSERT(!result.is(result_end)); | 4183 ASSERT(!result.is(result_end)); |
4182 | 4184 |
4183 // Load address of new object into result. | 4185 // Load address of new object into result. |
4184 LoadAllocationTopHelper(result, scratch, flags); | 4186 LoadAllocationTopHelper(result, scratch, flags); |
4185 | 4187 |
4186 if (isolate()->heap_profiler()->is_tracking_allocations()) { | |
4187 RecordObjectAllocation(isolate(), result, object_size); | |
4188 } | |
4189 | |
4190 // Align the next allocation. Storing the filler map without checking top is | 4188 // Align the next allocation. Storing the filler map without checking top is |
4191 // safe in new-space because the limit of the heap is aligned there. | 4189 // safe in new-space because the limit of the heap is aligned there. |
4192 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { | 4190 if (((flags & DOUBLE_ALIGNMENT) != 0) && FLAG_debug_code) { |
4193 testq(result, Immediate(kDoubleAlignmentMask)); | 4191 testq(result, Immediate(kDoubleAlignmentMask)); |
4194 Check(zero, kAllocationIsNotDoubleAligned); | 4192 Check(zero, kAllocationIsNotDoubleAligned); |
4195 } | 4193 } |
4196 | 4194 |
4197 // Calculate new top and bail out if new space is exhausted. | 4195 // Calculate new top and bail out if new space is exhausted. |
4198 ExternalReference allocation_limit = | 4196 ExternalReference allocation_limit = |
4199 AllocationUtils::GetAllocationLimitReference(isolate(), flags); | 4197 AllocationUtils::GetAllocationLimitReference(isolate(), flags); |
(...skipping 740 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4940 Move(kScratchRegister, new_space_start); | 4938 Move(kScratchRegister, new_space_start); |
4941 cmpq(scratch_reg, kScratchRegister); | 4939 cmpq(scratch_reg, kScratchRegister); |
4942 j(less, no_memento_found); | 4940 j(less, no_memento_found); |
4943 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); | 4941 cmpq(scratch_reg, ExternalOperand(new_space_allocation_top)); |
4944 j(greater, no_memento_found); | 4942 j(greater, no_memento_found); |
4945 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4943 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4946 Heap::kAllocationMementoMapRootIndex); | 4944 Heap::kAllocationMementoMapRootIndex); |
4947 } | 4945 } |
4948 | 4946 |
4949 | 4947 |
4950 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, | |
4951 Register object, | |
4952 Register object_size) { | |
4953 FrameScope frame(this, StackFrame::EXIT); | |
4954 PushSafepointRegisters(); | |
4955 PrepareCallCFunction(3); | |
4956 // In case object is rdx | |
4957 movq(kScratchRegister, object); | |
4958 movq(arg_reg_3, object_size); | |
4959 movq(arg_reg_2, kScratchRegister); | |
4960 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); | |
4961 CallCFunction( | |
4962 ExternalReference::record_object_allocation_function(isolate), 3); | |
4963 PopSafepointRegisters(); | |
4964 } | |
4965 | |
4966 | |
4967 void MacroAssembler::RecordObjectAllocation(Isolate* isolate, | |
4968 Register object, | |
4969 int object_size) { | |
4970 FrameScope frame(this, StackFrame::EXIT); | |
4971 PushSafepointRegisters(); | |
4972 PrepareCallCFunction(3); | |
4973 movq(arg_reg_2, object); | |
4974 movq(arg_reg_3, Immediate(object_size)); | |
4975 movq(arg_reg_1, isolate, RelocInfo::EXTERNAL_REFERENCE); | |
4976 CallCFunction( | |
4977 ExternalReference::record_object_allocation_function(isolate), 3); | |
4978 PopSafepointRegisters(); | |
4979 } | |
4980 | |
4981 | |
4982 void MacroAssembler::JumpIfDictionaryInPrototypeChain( | 4948 void MacroAssembler::JumpIfDictionaryInPrototypeChain( |
4983 Register object, | 4949 Register object, |
4984 Register scratch0, | 4950 Register scratch0, |
4985 Register scratch1, | 4951 Register scratch1, |
4986 Label* found) { | 4952 Label* found) { |
4987 ASSERT(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister))); | 4953 ASSERT(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister))); |
4988 ASSERT(!scratch1.is(scratch0)); | 4954 ASSERT(!scratch1.is(scratch0)); |
4989 Register current = scratch0; | 4955 Register current = scratch0; |
4990 Label loop_again; | 4956 Label loop_again; |
4991 | 4957 |
4992 movq(current, object); | 4958 movq(current, object); |
4993 | 4959 |
4994 // Loop based on the map going up the prototype chain. | 4960 // Loop based on the map going up the prototype chain. |
4995 bind(&loop_again); | 4961 bind(&loop_again); |
4996 movq(current, FieldOperand(current, HeapObject::kMapOffset)); | 4962 movq(current, FieldOperand(current, HeapObject::kMapOffset)); |
4997 movq(scratch1, FieldOperand(current, Map::kBitField2Offset)); | 4963 movq(scratch1, FieldOperand(current, Map::kBitField2Offset)); |
4998 and_(scratch1, Immediate(Map::kElementsKindMask)); | 4964 and_(scratch1, Immediate(Map::kElementsKindMask)); |
4999 shr(scratch1, Immediate(Map::kElementsKindShift)); | 4965 shr(scratch1, Immediate(Map::kElementsKindShift)); |
5000 cmpq(scratch1, Immediate(DICTIONARY_ELEMENTS)); | 4966 cmpq(scratch1, Immediate(DICTIONARY_ELEMENTS)); |
5001 j(equal, found); | 4967 j(equal, found); |
5002 movq(current, FieldOperand(current, Map::kPrototypeOffset)); | 4968 movq(current, FieldOperand(current, Map::kPrototypeOffset)); |
5003 CompareRoot(current, Heap::kNullValueRootIndex); | 4969 CompareRoot(current, Heap::kNullValueRootIndex); |
5004 j(not_equal, &loop_again); | 4970 j(not_equal, &loop_again); |
5005 } | 4971 } |
5006 | 4972 |
5007 | 4973 |
5008 } } // namespace v8::internal | 4974 } } // namespace v8::internal |
5009 | 4975 |
5010 #endif // V8_TARGET_ARCH_X64 | 4976 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |