| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2878 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2889 set_external_map(external_map); | 2889 set_external_map(external_map); |
| 2890 | 2890 |
| 2891 ASSERT(!InNewSpace(empty_fixed_array())); | 2891 ASSERT(!InNewSpace(empty_fixed_array())); |
| 2892 return true; | 2892 return true; |
| 2893 } | 2893 } |
| 2894 | 2894 |
| 2895 | 2895 |
| 2896 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { | 2896 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { |
| 2897 // Statically ensure that it is safe to allocate heap numbers in paged | 2897 // Statically ensure that it is safe to allocate heap numbers in paged |
| 2898 // spaces. | 2898 // spaces. |
| 2899 int size = HeapNumber::kSize; |
| 2899 STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize); | 2900 STATIC_ASSERT(HeapNumber::kSize <= Page::kNonCodeObjectAreaSize); |
| 2900 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 2901 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 2901 | 2902 |
| 2902 Object* result; | 2903 Object* result; |
| 2903 { MaybeObject* maybe_result = | 2904 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 2904 AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE); | |
| 2905 if (!maybe_result->ToObject(&result)) return maybe_result; | 2905 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2906 } | 2906 } |
| 2907 | 2907 |
| 2908 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); | 2908 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); |
| 2909 HeapNumber::cast(result)->set_value(value); | 2909 HeapNumber::cast(result)->set_value(value); |
| 2910 return result; | 2910 return result; |
| 2911 } | 2911 } |
| 2912 | 2912 |
| 2913 | 2913 |
| 2914 MaybeObject* Heap::AllocateHeapNumber(double value) { | |
| 2915 // Use general version, if we're forced to always allocate. | |
| 2916 if (always_allocate()) return AllocateHeapNumber(value, TENURED); | |
| 2917 | |
| 2918 // This version of AllocateHeapNumber is optimized for | |
| 2919 // allocation in new space. | |
| 2920 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxNonCodeHeapObjectSize); | |
| 2921 Object* result; | |
| 2922 { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize); | |
| 2923 if (!maybe_result->ToObject(&result)) return maybe_result; | |
| 2924 } | |
| 2925 HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); | |
| 2926 HeapNumber::cast(result)->set_value(value); | |
| 2927 return result; | |
| 2928 } | |
| 2929 | |
| 2930 | |
| 2931 MaybeObject* Heap::AllocateCell(Object* value) { | 2914 MaybeObject* Heap::AllocateCell(Object* value) { |
| 2932 Object* result; | 2915 Object* result; |
| 2933 { MaybeObject* maybe_result = AllocateRawCell(); | 2916 { MaybeObject* maybe_result = AllocateRawCell(); |
| 2934 if (!maybe_result->ToObject(&result)) return maybe_result; | 2917 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2935 } | 2918 } |
| 2936 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); | 2919 HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); |
| 2937 Cell::cast(result)->set_value(value); | 2920 Cell::cast(result)->set_value(value); |
| 2938 return result; | 2921 return result; |
| 2939 } | 2922 } |
| 2940 | 2923 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 2960 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) { | 2943 MaybeObject* Heap::AllocateBox(Object* value, PretenureFlag pretenure) { |
| 2961 Box* result; | 2944 Box* result; |
| 2962 MaybeObject* maybe_result = AllocateStruct(BOX_TYPE); | 2945 MaybeObject* maybe_result = AllocateStruct(BOX_TYPE); |
| 2963 if (!maybe_result->To(&result)) return maybe_result; | 2946 if (!maybe_result->To(&result)) return maybe_result; |
| 2964 result->set_value(value); | 2947 result->set_value(value); |
| 2965 return result; | 2948 return result; |
| 2966 } | 2949 } |
| 2967 | 2950 |
| 2968 | 2951 |
| 2969 MaybeObject* Heap::AllocateAllocationSite() { | 2952 MaybeObject* Heap::AllocateAllocationSite() { |
| 2970 Object* result; | 2953 AllocationSite* site; |
| 2971 MaybeObject* maybe_result = Allocate(allocation_site_map(), | 2954 MaybeObject* maybe_result = Allocate(allocation_site_map(), |
| 2972 OLD_POINTER_SPACE); | 2955 OLD_POINTER_SPACE); |
| 2973 if (!maybe_result->ToObject(&result)) return maybe_result; | 2956 if (!maybe_result->To(&site)) return maybe_result; |
| 2974 AllocationSite* site = AllocationSite::cast(result); | |
| 2975 site->Initialize(); | 2957 site->Initialize(); |
| 2976 | 2958 |
| 2977 // Link the site | 2959 // Link the site |
| 2978 site->set_weak_next(allocation_sites_list()); | 2960 site->set_weak_next(allocation_sites_list()); |
| 2979 set_allocation_sites_list(site); | 2961 set_allocation_sites_list(site); |
| 2980 return result; | 2962 return site; |
| 2981 } | 2963 } |
| 2982 | 2964 |
| 2983 | 2965 |
| 2984 MaybeObject* Heap::CreateOddball(const char* to_string, | 2966 MaybeObject* Heap::CreateOddball(const char* to_string, |
| 2985 Object* to_number, | 2967 Object* to_number, |
| 2986 byte kind) { | 2968 byte kind) { |
| 2987 Object* result; | 2969 Object* result; |
| 2988 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE); | 2970 { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE); |
| 2989 if (!maybe_result->ToObject(&result)) return maybe_result; | 2971 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2990 } | 2972 } |
| (...skipping 1073 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4064 String* answer = String::cast(result); | 4046 String* answer = String::cast(result); |
| 4065 answer->Set(0, code); | 4047 answer->Set(0, code); |
| 4066 return answer; | 4048 return answer; |
| 4067 } | 4049 } |
| 4068 | 4050 |
| 4069 | 4051 |
| 4070 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { | 4052 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { |
| 4071 if (length < 0 || length > ByteArray::kMaxLength) { | 4053 if (length < 0 || length > ByteArray::kMaxLength) { |
| 4072 return Failure::OutOfMemoryException(0x7); | 4054 return Failure::OutOfMemoryException(0x7); |
| 4073 } | 4055 } |
| 4074 if (pretenure == NOT_TENURED) { | |
| 4075 return AllocateByteArray(length); | |
| 4076 } | |
| 4077 int size = ByteArray::SizeFor(length); | 4056 int size = ByteArray::SizeFor(length); |
| 4078 AllocationSpace space = | 4057 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 4079 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_DATA_SPACE; | |
| 4080 Object* result; | |
| 4081 { MaybeObject* maybe_result = AllocateRaw(size, space, space); | |
| 4082 if (!maybe_result->ToObject(&result)) return maybe_result; | |
| 4083 } | |
| 4084 | |
| 4085 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier( | |
| 4086 byte_array_map()); | |
| 4087 reinterpret_cast<ByteArray*>(result)->set_length(length); | |
| 4088 return result; | |
| 4089 } | |
| 4090 | |
| 4091 | |
| 4092 MaybeObject* Heap::AllocateByteArray(int length) { | |
| 4093 if (length < 0 || length > ByteArray::kMaxLength) { | |
| 4094 return Failure::OutOfMemoryException(0x8); | |
| 4095 } | |
| 4096 int size = ByteArray::SizeFor(length); | |
| 4097 AllocationSpace space = | |
| 4098 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : NEW_SPACE; | |
| 4099 Object* result; | 4058 Object* result; |
| 4100 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); | 4059 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 4101 if (!maybe_result->ToObject(&result)) return maybe_result; | 4060 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4102 } | 4061 } |
| 4103 | 4062 |
| 4104 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier( | 4063 reinterpret_cast<ByteArray*>(result)->set_map_no_write_barrier( |
| 4105 byte_array_map()); | 4064 byte_array_map()); |
| 4106 reinterpret_cast<ByteArray*>(result)->set_length(length); | 4065 reinterpret_cast<ByteArray*>(result)->set_length(length); |
| 4107 return result; | 4066 return result; |
| 4108 } | 4067 } |
| (...skipping 10 matching lines...) Expand all Loading... |
| 4119 filler->set_map_no_write_barrier(free_space_map()); | 4078 filler->set_map_no_write_barrier(free_space_map()); |
| 4120 FreeSpace::cast(filler)->set_size(size); | 4079 FreeSpace::cast(filler)->set_size(size); |
| 4121 } | 4080 } |
| 4122 } | 4081 } |
| 4123 | 4082 |
| 4124 | 4083 |
| 4125 MaybeObject* Heap::AllocateExternalArray(int length, | 4084 MaybeObject* Heap::AllocateExternalArray(int length, |
| 4126 ExternalArrayType array_type, | 4085 ExternalArrayType array_type, |
| 4127 void* external_pointer, | 4086 void* external_pointer, |
| 4128 PretenureFlag pretenure) { | 4087 PretenureFlag pretenure) { |
| 4129 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 4088 int size = ExternalArray::kAlignedSize; |
| 4089 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 4130 Object* result; | 4090 Object* result; |
| 4131 { MaybeObject* maybe_result = AllocateRaw(ExternalArray::kAlignedSize, | 4091 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 4132 space, | |
| 4133 OLD_DATA_SPACE); | |
| 4134 if (!maybe_result->ToObject(&result)) return maybe_result; | 4092 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 4135 } | 4093 } |
| 4136 | 4094 |
| 4137 reinterpret_cast<ExternalArray*>(result)->set_map_no_write_barrier( | 4095 reinterpret_cast<ExternalArray*>(result)->set_map_no_write_barrier( |
| 4138 MapForExternalArrayType(array_type)); | 4096 MapForExternalArrayType(array_type)); |
| 4139 reinterpret_cast<ExternalArray*>(result)->set_length(length); | 4097 reinterpret_cast<ExternalArray*>(result)->set_length(length); |
| 4140 reinterpret_cast<ExternalArray*>(result)->set_external_pointer( | 4098 reinterpret_cast<ExternalArray*>(result)->set_external_pointer( |
| 4141 external_pointer); | 4099 external_pointer); |
| 4142 | 4100 |
| 4143 return result; | 4101 return result; |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4422 boilerplate = | 4380 boilerplate = |
| 4423 isolate()->context()->native_context()-> | 4381 isolate()->context()->native_context()-> |
| 4424 strict_mode_arguments_boilerplate(); | 4382 strict_mode_arguments_boilerplate(); |
| 4425 arguments_object_size = kArgumentsObjectSizeStrict; | 4383 arguments_object_size = kArgumentsObjectSizeStrict; |
| 4426 } else { | 4384 } else { |
| 4427 boilerplate = | 4385 boilerplate = |
| 4428 isolate()->context()->native_context()->arguments_boilerplate(); | 4386 isolate()->context()->native_context()->arguments_boilerplate(); |
| 4429 arguments_object_size = kArgumentsObjectSize; | 4387 arguments_object_size = kArgumentsObjectSize; |
| 4430 } | 4388 } |
| 4431 | 4389 |
| 4432 // This calls Copy directly rather than using Heap::AllocateRaw so we | |
| 4433 // duplicate the check here. | |
| 4434 ASSERT(AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); | |
| 4435 | |
| 4436 // Check that the size of the boilerplate matches our | 4390 // Check that the size of the boilerplate matches our |
| 4437 // expectations. The ArgumentsAccessStub::GenerateNewObject relies | 4391 // expectations. The ArgumentsAccessStub::GenerateNewObject relies |
| 4438 // on the size being a known constant. | 4392 // on the size being a known constant. |
| 4439 ASSERT(arguments_object_size == boilerplate->map()->instance_size()); | 4393 ASSERT(arguments_object_size == boilerplate->map()->instance_size()); |
| 4440 | 4394 |
| 4441 // Do the allocation. | 4395 // Do the allocation. |
| 4442 Object* result; | 4396 Object* result; |
| 4443 { MaybeObject* maybe_result = | 4397 { MaybeObject* maybe_result = |
| 4444 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); | 4398 AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); |
| 4445 if (!maybe_result->ToObject(&result)) return maybe_result; | 4399 if (!maybe_result->ToObject(&result)) return maybe_result; |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4561 int prop_size = map->InitialPropertiesLength(); | 4515 int prop_size = map->InitialPropertiesLength(); |
| 4562 ASSERT(prop_size >= 0); | 4516 ASSERT(prop_size >= 0); |
| 4563 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); | 4517 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); |
| 4564 if (!maybe_properties->To(&properties)) return maybe_properties; | 4518 if (!maybe_properties->To(&properties)) return maybe_properties; |
| 4565 } | 4519 } |
| 4566 } else { | 4520 } else { |
| 4567 properties = empty_fixed_array(); | 4521 properties = empty_fixed_array(); |
| 4568 } | 4522 } |
| 4569 | 4523 |
| 4570 // Allocate the JSObject. | 4524 // Allocate the JSObject. |
| 4571 AllocationSpace space = | 4525 int size = map->instance_size(); |
| 4572 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; | 4526 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); |
| 4573 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; | |
| 4574 Object* obj; | 4527 Object* obj; |
| 4575 MaybeObject* maybe_obj = Allocate(map, space); | 4528 MaybeObject* maybe_obj = Allocate(map, space); |
| 4576 if (!maybe_obj->To(&obj)) return maybe_obj; | 4529 if (!maybe_obj->To(&obj)) return maybe_obj; |
| 4577 | 4530 |
| 4578 // Initialize the JSObject. | 4531 // Initialize the JSObject. |
| 4579 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); | 4532 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); |
| 4580 ASSERT(JSObject::cast(obj)->HasFastElements() || | 4533 ASSERT(JSObject::cast(obj)->HasFastElements() || |
| 4581 JSObject::cast(obj)->HasExternalArrayElements()); | 4534 JSObject::cast(obj)->HasExternalArrayElements()); |
| 4582 return obj; | 4535 return obj; |
| 4583 } | 4536 } |
| (...skipping 12 matching lines...) Expand all Loading... |
| 4596 | 4549 |
| 4597 // Allocate the backing storage for the properties. | 4550 // Allocate the backing storage for the properties. |
| 4598 int prop_size = map->InitialPropertiesLength(); | 4551 int prop_size = map->InitialPropertiesLength(); |
| 4599 ASSERT(prop_size >= 0); | 4552 ASSERT(prop_size >= 0); |
| 4600 FixedArray* properties; | 4553 FixedArray* properties; |
| 4601 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size); | 4554 { MaybeObject* maybe_properties = AllocateFixedArray(prop_size); |
| 4602 if (!maybe_properties->To(&properties)) return maybe_properties; | 4555 if (!maybe_properties->To(&properties)) return maybe_properties; |
| 4603 } | 4556 } |
| 4604 | 4557 |
| 4605 // Allocate the JSObject. | 4558 // Allocate the JSObject. |
| 4606 AllocationSpace space = NEW_SPACE; | 4559 int size = map->instance_size(); |
| 4607 if (map->instance_size() > Page::kMaxNonCodeHeapObjectSize) space = LO_SPACE; | 4560 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, NOT_TENURED); |
| 4608 Object* obj; | 4561 Object* obj; |
| 4609 MaybeObject* maybe_obj = | 4562 MaybeObject* maybe_obj = |
| 4610 AllocateWithAllocationSite(map, space, allocation_site); | 4563 AllocateWithAllocationSite(map, space, allocation_site); |
| 4611 if (!maybe_obj->To(&obj)) return maybe_obj; | 4564 if (!maybe_obj->To(&obj)) return maybe_obj; |
| 4612 | 4565 |
| 4613 // Initialize the JSObject. | 4566 // Initialize the JSObject. |
| 4614 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); | 4567 InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); |
| 4615 ASSERT(JSObject::cast(obj)->HasFastElements()); | 4568 ASSERT(JSObject::cast(obj)->HasFastElements()); |
| 4616 return obj; | 4569 return obj; |
| 4617 } | 4570 } |
| (...skipping 304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4922 global->set_map(new_map); | 4875 global->set_map(new_map); |
| 4923 global->set_properties(dictionary); | 4876 global->set_properties(dictionary); |
| 4924 | 4877 |
| 4925 // Make sure result is a global object with properties in dictionary. | 4878 // Make sure result is a global object with properties in dictionary. |
| 4926 ASSERT(global->IsGlobalObject()); | 4879 ASSERT(global->IsGlobalObject()); |
| 4927 ASSERT(!global->HasFastProperties()); | 4880 ASSERT(!global->HasFastProperties()); |
| 4928 return global; | 4881 return global; |
| 4929 } | 4882 } |
| 4930 | 4883 |
| 4931 | 4884 |
| 4932 MaybeObject* Heap::CopyJSObject(JSObject* source) { | 4885 MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { |
| 4933 // Never used to copy functions. If functions need to be copied we | 4886 // Never used to copy functions. If functions need to be copied we |
| 4934 // have to be careful to clear the literals array. | 4887 // have to be careful to clear the literals array. |
| 4935 SLOW_ASSERT(!source->IsJSFunction()); | 4888 SLOW_ASSERT(!source->IsJSFunction()); |
| 4936 | 4889 |
| 4937 // Make the clone. | 4890 // Make the clone. |
| 4938 Map* map = source->map(); | 4891 Map* map = source->map(); |
| 4939 int object_size = map->instance_size(); | 4892 int object_size = map->instance_size(); |
| 4940 Object* clone; | 4893 Object* clone; |
| 4941 | 4894 |
| 4895 ASSERT(site == NULL || (AllocationSite::CanTrack(map->instance_type()) && |
| 4896 map->instance_type() == JS_ARRAY_TYPE)); |
| 4897 |
| 4942 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; | 4898 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; |
| 4943 | 4899 |
| 4944 // If we're forced to always allocate, we use the general allocation | 4900 // If we're forced to always allocate, we use the general allocation |
| 4945 // functions which may leave us with an object in old space. | 4901 // functions which may leave us with an object in old space. |
| 4946 if (always_allocate()) { | 4902 if (always_allocate()) { |
| 4947 { MaybeObject* maybe_clone = | 4903 { MaybeObject* maybe_clone = |
| 4948 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); | 4904 AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); |
| 4949 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 4905 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
| 4950 } | 4906 } |
| 4951 Address clone_address = HeapObject::cast(clone)->address(); | 4907 Address clone_address = HeapObject::cast(clone)->address(); |
| 4952 CopyBlock(clone_address, | 4908 CopyBlock(clone_address, |
| 4953 source->address(), | 4909 source->address(), |
| 4954 object_size); | 4910 object_size); |
| 4955 // Update write barrier for all fields that lie beyond the header. | 4911 // Update write barrier for all fields that lie beyond the header. |
| 4956 RecordWrites(clone_address, | 4912 RecordWrites(clone_address, |
| 4957 JSObject::kHeaderSize, | 4913 JSObject::kHeaderSize, |
| 4958 (object_size - JSObject::kHeaderSize) / kPointerSize); | 4914 (object_size - JSObject::kHeaderSize) / kPointerSize); |
| 4959 } else { | 4915 } else { |
| 4960 wb_mode = SKIP_WRITE_BARRIER; | 4916 wb_mode = SKIP_WRITE_BARRIER; |
| 4961 | 4917 |
| 4962 { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size); | 4918 { int adjusted_object_size = site != NULL |
| 4919 ? object_size + AllocationMemento::kSize |
| 4920 : object_size; |
| 4921 MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); |
| 4963 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | 4922 if (!maybe_clone->ToObject(&clone)) return maybe_clone; |
| 4964 } | 4923 } |
| 4965 SLOW_ASSERT(InNewSpace(clone)); | 4924 SLOW_ASSERT(InNewSpace(clone)); |
| 4966 // Since we know the clone is allocated in new space, we can copy | 4925 // Since we know the clone is allocated in new space, we can copy |
| 4967 // the contents without worrying about updating the write barrier. | 4926 // the contents without worrying about updating the write barrier. |
| 4968 CopyBlock(HeapObject::cast(clone)->address(), | 4927 CopyBlock(HeapObject::cast(clone)->address(), |
| 4969 source->address(), | 4928 source->address(), |
| 4970 object_size); | 4929 object_size); |
| 4930 |
| 4931 if (site != NULL) { |
| 4932 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( |
| 4933 reinterpret_cast<Address>(clone) + object_size); |
| 4934 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); |
| 4935 ASSERT(site->map() == allocation_site_map()); |
| 4936 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); |
| 4937 } |
| 4971 } | 4938 } |
| 4972 | 4939 |
| 4973 SLOW_ASSERT( | 4940 SLOW_ASSERT( |
| 4974 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | |
| 4975 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | |
| 4976 FixedArray* properties = FixedArray::cast(source->properties()); | |
| 4977 // Update elements if necessary. | |
| 4978 if (elements->length() > 0) { | |
| 4979 Object* elem; | |
| 4980 { MaybeObject* maybe_elem; | |
| 4981 if (elements->map() == fixed_cow_array_map()) { | |
| 4982 maybe_elem = FixedArray::cast(elements); | |
| 4983 } else if (source->HasFastDoubleElements()) { | |
| 4984 maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); | |
| 4985 } else { | |
| 4986 maybe_elem = CopyFixedArray(FixedArray::cast(elements)); | |
| 4987 } | |
| 4988 if (!maybe_elem->ToObject(&elem)) return maybe_elem; | |
| 4989 } | |
| 4990 JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode); | |
| 4991 } | |
| 4992 // Update properties if necessary. | |
| 4993 if (properties->length() > 0) { | |
| 4994 Object* prop; | |
| 4995 { MaybeObject* maybe_prop = CopyFixedArray(properties); | |
| 4996 if (!maybe_prop->ToObject(&prop)) return maybe_prop; | |
| 4997 } | |
| 4998 JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode); | |
| 4999 } | |
| 5000 // Return the new clone. | |
| 5001 return clone; | |
| 5002 } | |
| 5003 | |
| 5004 | |
| 5005 MaybeObject* Heap::CopyJSObjectWithAllocationSite( | |
| 5006 JSObject* source, | |
| 5007 AllocationSite* site) { | |
| 5008 // Never used to copy functions. If functions need to be copied we | |
| 5009 // have to be careful to clear the literals array. | |
| 5010 SLOW_ASSERT(!source->IsJSFunction()); | |
| 5011 | |
| 5012 // Make the clone. | |
| 5013 Map* map = source->map(); | |
| 5014 int object_size = map->instance_size(); | |
| 5015 Object* clone; | |
| 5016 | |
| 5017 ASSERT(AllocationSite::CanTrack(map->instance_type())); | |
| 5018 ASSERT(map->instance_type() == JS_ARRAY_TYPE); | |
| 5019 WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER; | |
| 5020 | |
| 5021 // If we're forced to always allocate, we use the general allocation | |
| 5022 // functions which may leave us with an object in old space. | |
| 5023 int adjusted_object_size = object_size; | |
| 5024 if (always_allocate()) { | |
| 5025 // We'll only track origin if we are certain to allocate in new space | |
| 5026 const int kMinFreeNewSpaceAfterGC = InitialSemiSpaceSize() * 3/4; | |
| 5027 if ((object_size + AllocationMemento::kSize) < kMinFreeNewSpaceAfterGC) { | |
| 5028 adjusted_object_size += AllocationMemento::kSize; | |
| 5029 } | |
| 5030 | |
| 5031 { MaybeObject* maybe_clone = | |
| 5032 AllocateRaw(adjusted_object_size, NEW_SPACE, OLD_POINTER_SPACE); | |
| 5033 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | |
| 5034 } | |
| 5035 Address clone_address = HeapObject::cast(clone)->address(); | |
| 5036 CopyBlock(clone_address, | |
| 5037 source->address(), | |
| 5038 object_size); | |
| 5039 // Update write barrier for all fields that lie beyond the header. | |
| 5040 int write_barrier_offset = adjusted_object_size > object_size | |
| 5041 ? JSArray::kSize + AllocationMemento::kSize | |
| 5042 : JSObject::kHeaderSize; | |
| 5043 if (((object_size - write_barrier_offset) / kPointerSize) > 0) { | |
| 5044 RecordWrites(clone_address, | |
| 5045 write_barrier_offset, | |
| 5046 (object_size - write_barrier_offset) / kPointerSize); | |
| 5047 } | |
| 5048 | |
| 5049 // Track allocation site information, if we failed to allocate it inline. | |
| 5050 if (InNewSpace(clone) && | |
| 5051 adjusted_object_size == object_size) { | |
| 5052 MaybeObject* maybe_alloc_memento = | |
| 5053 AllocateStruct(ALLOCATION_MEMENTO_TYPE); | |
| 5054 AllocationMemento* alloc_memento; | |
| 5055 if (maybe_alloc_memento->To(&alloc_memento)) { | |
| 5056 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | |
| 5057 ASSERT(site->map() == allocation_site_map()); | |
| 5058 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); | |
| 5059 } | |
| 5060 } | |
| 5061 } else { | |
| 5062 wb_mode = SKIP_WRITE_BARRIER; | |
| 5063 adjusted_object_size += AllocationMemento::kSize; | |
| 5064 | |
| 5065 { MaybeObject* maybe_clone = new_space_.AllocateRaw(adjusted_object_size); | |
| 5066 if (!maybe_clone->ToObject(&clone)) return maybe_clone; | |
| 5067 } | |
| 5068 SLOW_ASSERT(InNewSpace(clone)); | |
| 5069 // Since we know the clone is allocated in new space, we can copy | |
| 5070 // the contents without worrying about updating the write barrier. | |
| 5071 CopyBlock(HeapObject::cast(clone)->address(), | |
| 5072 source->address(), | |
| 5073 object_size); | |
| 5074 } | |
| 5075 | |
| 5076 if (adjusted_object_size > object_size) { | |
| 5077 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( | |
| 5078 reinterpret_cast<Address>(clone) + object_size); | |
| 5079 alloc_memento->set_map_no_write_barrier(allocation_memento_map()); | |
| 5080 ASSERT(site->map() == allocation_site_map()); | |
| 5081 alloc_memento->set_allocation_site(site, SKIP_WRITE_BARRIER); | |
| 5082 } | |
| 5083 | |
| 5084 SLOW_ASSERT( | |
| 5085 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); | 4941 JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind()); |
| 5086 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); | 4942 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); |
| 5087 FixedArray* properties = FixedArray::cast(source->properties()); | 4943 FixedArray* properties = FixedArray::cast(source->properties()); |
| 5088 // Update elements if necessary. | 4944 // Update elements if necessary. |
| 5089 if (elements->length() > 0) { | 4945 if (elements->length() > 0) { |
| 5090 Object* elem; | 4946 Object* elem; |
| 5091 { MaybeObject* maybe_elem; | 4947 { MaybeObject* maybe_elem; |
| 5092 if (elements->map() == fixed_cow_array_map()) { | 4948 if (elements->map() == fixed_cow_array_map()) { |
| 5093 maybe_elem = FixedArray::cast(elements); | 4949 maybe_elem = FixedArray::cast(elements); |
| 5094 } else if (source->HasFastDoubleElements()) { | 4950 } else if (source->HasFastDoubleElements()) { |
| (...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5362 } | 5218 } |
| 5363 map = ascii_internalized_string_map(); | 5219 map = ascii_internalized_string_map(); |
| 5364 size = SeqOneByteString::SizeFor(chars); | 5220 size = SeqOneByteString::SizeFor(chars); |
| 5365 } else { | 5221 } else { |
| 5366 if (chars > SeqTwoByteString::kMaxLength) { | 5222 if (chars > SeqTwoByteString::kMaxLength) { |
| 5367 return Failure::OutOfMemoryException(0xa); | 5223 return Failure::OutOfMemoryException(0xa); |
| 5368 } | 5224 } |
| 5369 map = internalized_string_map(); | 5225 map = internalized_string_map(); |
| 5370 size = SeqTwoByteString::SizeFor(chars); | 5226 size = SeqTwoByteString::SizeFor(chars); |
| 5371 } | 5227 } |
| 5228 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); |
| 5372 | 5229 |
| 5373 // Allocate string. | 5230 // Allocate string. |
| 5374 Object* result; | 5231 Object* result; |
| 5375 { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize) | 5232 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 5376 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE) | |
| 5377 : old_data_space_->AllocateRaw(size); | |
| 5378 if (!maybe_result->ToObject(&result)) return maybe_result; | 5233 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5379 } | 5234 } |
| 5380 | 5235 |
| 5381 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); | 5236 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map); |
| 5382 // Set length and hash fields of the allocated string. | 5237 // Set length and hash fields of the allocated string. |
| 5383 String* answer = String::cast(result); | 5238 String* answer = String::cast(result); |
| 5384 answer->set_length(chars); | 5239 answer->set_length(chars); |
| 5385 answer->set_hash_field(hash_field); | 5240 answer->set_hash_field(hash_field); |
| 5386 | 5241 |
| 5387 ASSERT_EQ(size, answer->Size()); | 5242 ASSERT_EQ(size, answer->Size()); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 5406 Vector<const char>, int, uint32_t); | 5261 Vector<const char>, int, uint32_t); |
| 5407 | 5262 |
| 5408 | 5263 |
| 5409 MaybeObject* Heap::AllocateRawOneByteString(int length, | 5264 MaybeObject* Heap::AllocateRawOneByteString(int length, |
| 5410 PretenureFlag pretenure) { | 5265 PretenureFlag pretenure) { |
| 5411 if (length < 0 || length > SeqOneByteString::kMaxLength) { | 5266 if (length < 0 || length > SeqOneByteString::kMaxLength) { |
| 5412 return Failure::OutOfMemoryException(0xb); | 5267 return Failure::OutOfMemoryException(0xb); |
| 5413 } | 5268 } |
| 5414 int size = SeqOneByteString::SizeFor(length); | 5269 int size = SeqOneByteString::SizeFor(length); |
| 5415 ASSERT(size <= SeqOneByteString::kMaxSize); | 5270 ASSERT(size <= SeqOneByteString::kMaxSize); |
| 5416 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 5271 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 5417 AllocationSpace retry_space = OLD_DATA_SPACE; | |
| 5418 | |
| 5419 if (size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5420 // Allocate in large object space, retry space will be ignored. | |
| 5421 space = LO_SPACE; | |
| 5422 } | |
| 5423 | 5272 |
| 5424 Object* result; | 5273 Object* result; |
| 5425 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 5274 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 5426 if (!maybe_result->ToObject(&result)) return maybe_result; | 5275 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5427 } | 5276 } |
| 5428 | 5277 |
| 5429 // Partially initialize the object. | 5278 // Partially initialize the object. |
| 5430 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); | 5279 HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); |
| 5431 String::cast(result)->set_length(length); | 5280 String::cast(result)->set_length(length); |
| 5432 String::cast(result)->set_hash_field(String::kEmptyHashField); | 5281 String::cast(result)->set_hash_field(String::kEmptyHashField); |
| 5433 ASSERT_EQ(size, HeapObject::cast(result)->Size()); | 5282 ASSERT_EQ(size, HeapObject::cast(result)->Size()); |
| 5434 | 5283 |
| 5435 return result; | 5284 return result; |
| 5436 } | 5285 } |
| 5437 | 5286 |
| 5438 | 5287 |
| 5439 MaybeObject* Heap::AllocateRawTwoByteString(int length, | 5288 MaybeObject* Heap::AllocateRawTwoByteString(int length, |
| 5440 PretenureFlag pretenure) { | 5289 PretenureFlag pretenure) { |
| 5441 if (length < 0 || length > SeqTwoByteString::kMaxLength) { | 5290 if (length < 0 || length > SeqTwoByteString::kMaxLength) { |
| 5442 return Failure::OutOfMemoryException(0xc); | 5291 return Failure::OutOfMemoryException(0xc); |
| 5443 } | 5292 } |
| 5444 int size = SeqTwoByteString::SizeFor(length); | 5293 int size = SeqTwoByteString::SizeFor(length); |
| 5445 ASSERT(size <= SeqTwoByteString::kMaxSize); | 5294 ASSERT(size <= SeqTwoByteString::kMaxSize); |
| 5446 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | 5295 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 5447 AllocationSpace retry_space = OLD_DATA_SPACE; | |
| 5448 | |
| 5449 if (size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5450 // Allocate in large object space, retry space will be ignored. | |
| 5451 space = LO_SPACE; | |
| 5452 } | |
| 5453 | 5296 |
| 5454 Object* result; | 5297 Object* result; |
| 5455 { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); | 5298 { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 5456 if (!maybe_result->ToObject(&result)) return maybe_result; | 5299 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5457 } | 5300 } |
| 5458 | 5301 |
| 5459 // Partially initialize the object. | 5302 // Partially initialize the object. |
| 5460 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); | 5303 HeapObject::cast(result)->set_map_no_write_barrier(string_map()); |
| 5461 String::cast(result)->set_length(length); | 5304 String::cast(result)->set_length(length); |
| 5462 String::cast(result)->set_hash_field(String::kEmptyHashField); | 5305 String::cast(result)->set_hash_field(String::kEmptyHashField); |
| 5463 ASSERT_EQ(size, HeapObject::cast(result)->Size()); | 5306 ASSERT_EQ(size, HeapObject::cast(result)->Size()); |
| 5464 return result; | 5307 return result; |
| 5465 } | 5308 } |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5548 HeapObject* dst = HeapObject::cast(obj); | 5391 HeapObject* dst = HeapObject::cast(obj); |
| 5549 dst->set_map_no_write_barrier(map); | 5392 dst->set_map_no_write_barrier(map); |
| 5550 CopyBlock( | 5393 CopyBlock( |
| 5551 dst->address() + FixedDoubleArray::kLengthOffset, | 5394 dst->address() + FixedDoubleArray::kLengthOffset, |
| 5552 src->address() + FixedDoubleArray::kLengthOffset, | 5395 src->address() + FixedDoubleArray::kLengthOffset, |
| 5553 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); | 5396 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); |
| 5554 return obj; | 5397 return obj; |
| 5555 } | 5398 } |
| 5556 | 5399 |
| 5557 | 5400 |
| 5558 MaybeObject* Heap::AllocateFixedArray(int length) { | |
| 5559 ASSERT(length >= 0); | |
| 5560 if (length == 0) return empty_fixed_array(); | |
| 5561 Object* result; | |
| 5562 { MaybeObject* maybe_result = AllocateRawFixedArray(length); | |
| 5563 if (!maybe_result->ToObject(&result)) return maybe_result; | |
| 5564 } | |
| 5565 // Initialize header. | |
| 5566 FixedArray* array = reinterpret_cast<FixedArray*>(result); | |
| 5567 array->set_map_no_write_barrier(fixed_array_map()); | |
| 5568 array->set_length(length); | |
| 5569 // Initialize body. | |
| 5570 ASSERT(!InNewSpace(undefined_value())); | |
| 5571 MemsetPointer(array->data_start(), undefined_value(), length); | |
| 5572 return result; | |
| 5573 } | |
| 5574 | |
| 5575 | |
| 5576 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { | 5401 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { |
| 5577 if (length < 0 || length > FixedArray::kMaxLength) { | 5402 if (length < 0 || length > FixedArray::kMaxLength) { |
| 5578 return Failure::OutOfMemoryException(0xe); | 5403 return Failure::OutOfMemoryException(0xe); |
| 5579 } | 5404 } |
| 5580 int size = FixedArray::SizeFor(length); | 5405 int size = FixedArray::SizeFor(length); |
| 5581 AllocationSpace space = | 5406 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); |
| 5582 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE; | |
| 5583 AllocationSpace retry_space = OLD_POINTER_SPACE; | |
| 5584 | 5407 |
| 5585 if (size > Page::kMaxNonCodeHeapObjectSize) { | 5408 return AllocateRaw(size, space, OLD_POINTER_SPACE); |
| 5586 // Allocate in large object space, retry space will be ignored. | |
| 5587 space = LO_SPACE; | |
| 5588 } | |
| 5589 | |
| 5590 return AllocateRaw(size, space, retry_space); | |
| 5591 } | 5409 } |
| 5592 | 5410 |
| 5593 | 5411 |
| 5594 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( | 5412 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller( |
| 5595 Heap* heap, | 5413 Heap* heap, |
| 5596 int length, | 5414 int length, |
| 5597 PretenureFlag pretenure, | 5415 PretenureFlag pretenure, |
| 5598 Object* filler) { | 5416 Object* filler) { |
| 5599 ASSERT(length >= 0); | 5417 ASSERT(length >= 0); |
| 5600 ASSERT(heap->empty_fixed_array()->IsFixedArray()); | 5418 ASSERT(heap->empty_fixed_array()->IsFixedArray()); |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5698 return elements; | 5516 return elements; |
| 5699 } | 5517 } |
| 5700 | 5518 |
| 5701 | 5519 |
| 5702 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, | 5520 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, |
| 5703 PretenureFlag pretenure) { | 5521 PretenureFlag pretenure) { |
| 5704 if (length < 0 || length > FixedDoubleArray::kMaxLength) { | 5522 if (length < 0 || length > FixedDoubleArray::kMaxLength) { |
| 5705 return Failure::OutOfMemoryException(0xf); | 5523 return Failure::OutOfMemoryException(0xf); |
| 5706 } | 5524 } |
| 5707 int size = FixedDoubleArray::SizeFor(length); | 5525 int size = FixedDoubleArray::SizeFor(length); |
| 5708 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; | |
| 5709 AllocationSpace retry_space = OLD_DATA_SPACE; | |
| 5710 | |
| 5711 #ifndef V8_HOST_ARCH_64_BIT | 5526 #ifndef V8_HOST_ARCH_64_BIT |
| 5712 size += kPointerSize; | 5527 size += kPointerSize; |
| 5713 #endif | 5528 #endif |
| 5714 | 5529 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); |
| 5715 if (size > Page::kMaxNonCodeHeapObjectSize) { | |
| 5716 // Allocate in large object space, retry space will be ignored. | |
| 5717 space = LO_SPACE; | |
| 5718 } | |
| 5719 | 5530 |
| 5720 HeapObject* object; | 5531 HeapObject* object; |
| 5721 { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space); | 5532 { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); |
| 5722 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; | 5533 if (!maybe_object->To<HeapObject>(&object)) return maybe_object; |
| 5723 } | 5534 } |
| 5724 | 5535 |
| 5725 return EnsureDoubleAligned(this, object, size); | 5536 return EnsureDoubleAligned(this, object, size); |
| 5726 } | 5537 } |
| 5727 | 5538 |
| 5728 | 5539 |
| 5729 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { | 5540 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) { |
| 5730 Object* result; | 5541 Object* result; |
| 5731 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); | 5542 { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure); |
| (...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5915 switch (type) { | 5726 switch (type) { |
| 5916 #define MAKE_CASE(NAME, Name, name) \ | 5727 #define MAKE_CASE(NAME, Name, name) \ |
| 5917 case NAME##_TYPE: map = name##_map(); break; | 5728 case NAME##_TYPE: map = name##_map(); break; |
| 5918 STRUCT_LIST(MAKE_CASE) | 5729 STRUCT_LIST(MAKE_CASE) |
| 5919 #undef MAKE_CASE | 5730 #undef MAKE_CASE |
| 5920 default: | 5731 default: |
| 5921 UNREACHABLE(); | 5732 UNREACHABLE(); |
| 5922 return Failure::InternalError(); | 5733 return Failure::InternalError(); |
| 5923 } | 5734 } |
| 5924 int size = map->instance_size(); | 5735 int size = map->instance_size(); |
| 5925 AllocationSpace space = | 5736 AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); |
| 5926 (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_POINTER_SPACE; | |
| 5927 Object* result; | 5737 Object* result; |
| 5928 { MaybeObject* maybe_result = Allocate(map, space); | 5738 { MaybeObject* maybe_result = Allocate(map, space); |
| 5929 if (!maybe_result->ToObject(&result)) return maybe_result; | 5739 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 5930 } | 5740 } |
| 5931 Struct::cast(result)->InitializeBody(size); | 5741 Struct::cast(result)->InitializeBody(size); |
| 5932 return result; | 5742 return result; |
| 5933 } | 5743 } |
| 5934 | 5744 |
| 5935 | 5745 |
| 5936 bool Heap::IsHeapIterable() { | 5746 bool Heap::IsHeapIterable() { |
| (...skipping 2153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 8090 if (FLAG_concurrent_recompilation) { | 7900 if (FLAG_concurrent_recompilation) { |
| 8091 heap_->relocation_mutex_->Lock(); | 7901 heap_->relocation_mutex_->Lock(); |
| 8092 #ifdef DEBUG | 7902 #ifdef DEBUG |
| 8093 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7903 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
| 8094 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7904 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
| 8095 #endif // DEBUG | 7905 #endif // DEBUG |
| 8096 } | 7906 } |
| 8097 } | 7907 } |
| 8098 | 7908 |
| 8099 } } // namespace v8::internal | 7909 } } // namespace v8::internal |
| OLD | NEW |