OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3150 return result; | 3150 return result; |
3151 } | 3151 } |
3152 | 3152 |
3153 | 3153 |
3154 MaybeObject* Heap::CreateCode(const CodeDesc& desc, | 3154 MaybeObject* Heap::CreateCode(const CodeDesc& desc, |
3155 Code::Flags flags, | 3155 Code::Flags flags, |
3156 Handle<Object> self_reference, | 3156 Handle<Object> self_reference, |
3157 bool immovable) { | 3157 bool immovable) { |
3158 // Allocate ByteArray before the Code object, so that we do not risk | 3158 // Allocate ByteArray before the Code object, so that we do not risk |
3159 // leaving uninitialized Code object (and breaking the heap). | 3159 // leaving uninitialized Code object (and breaking the heap). |
3160 ByteArray* reloc_info; | 3160 Object* reloc_info; |
3161 MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED); | 3161 { MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED); |
3162 if (!maybe_reloc_info->To(&reloc_info)) return maybe_reloc_info; | 3162 if (!maybe_reloc_info->ToObject(&reloc_info)) return maybe_reloc_info; |
| 3163 } |
3163 | 3164 |
3164 // Compute size. | 3165 // Compute size. |
3165 int body_size = RoundUp(desc.instr_size, kObjectAlignment); | 3166 int body_size = RoundUp(desc.instr_size, kObjectAlignment); |
3166 int obj_size = Code::SizeFor(body_size); | 3167 int obj_size = Code::SizeFor(body_size); |
3167 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); | 3168 ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment)); |
3168 MaybeObject* maybe_result; | 3169 MaybeObject* maybe_result; |
3169 // Large code objects and code objects which should stay at a fixed address | 3170 // Large code objects and code objects which should stay at a fixed address |
3170 // are allocated in large object space. | 3171 // are allocated in large object space. |
3171 if (obj_size > MaxObjectSizeInPagedSpace() || immovable) { | 3172 if (obj_size > MaxObjectSizeInPagedSpace() || immovable) { |
3172 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); | 3173 maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); |
3173 } else { | 3174 } else { |
3174 maybe_result = code_space_->AllocateRaw(obj_size); | 3175 maybe_result = code_space_->AllocateRaw(obj_size); |
3175 } | 3176 } |
3176 | 3177 |
3177 Object* result; | 3178 Object* result; |
3178 if (!maybe_result->ToObject(&result)) return maybe_result; | 3179 if (!maybe_result->ToObject(&result)) return maybe_result; |
3179 | 3180 |
3180 // Initialize the object | 3181 // Initialize the object |
3181 HeapObject::cast(result)->set_map(code_map()); | 3182 HeapObject::cast(result)->set_map(code_map()); |
3182 Code* code = Code::cast(result); | 3183 Code* code = Code::cast(result); |
3183 ASSERT(!isolate_->code_range()->exists() || | 3184 ASSERT(!isolate_->code_range()->exists() || |
3184 isolate_->code_range()->contains(code->address())); | 3185 isolate_->code_range()->contains(code->address())); |
3185 code->set_instruction_size(desc.instr_size); | 3186 code->set_instruction_size(desc.instr_size); |
3186 code->set_relocation_info(reloc_info); | 3187 code->set_relocation_info(ByteArray::cast(reloc_info)); |
3187 code->set_flags(flags); | 3188 code->set_flags(flags); |
3188 if (code->is_call_stub() || code->is_keyed_call_stub()) { | 3189 if (code->is_call_stub() || code->is_keyed_call_stub()) { |
3189 code->set_check_type(RECEIVER_MAP_CHECK); | 3190 code->set_check_type(RECEIVER_MAP_CHECK); |
3190 } | 3191 } |
3191 code->set_deoptimization_data(empty_fixed_array()); | 3192 code->set_deoptimization_data(empty_fixed_array()); |
3192 code->set_handler_table(empty_fixed_array()); | |
3193 code->set_next_code_flushing_candidate(undefined_value()); | 3193 code->set_next_code_flushing_candidate(undefined_value()); |
3194 // Allow self references to created code object by patching the handle to | 3194 // Allow self references to created code object by patching the handle to |
3195 // point to the newly allocated Code object. | 3195 // point to the newly allocated Code object. |
3196 if (!self_reference.is_null()) { | 3196 if (!self_reference.is_null()) { |
3197 *(self_reference.location()) = code; | 3197 *(self_reference.location()) = code; |
3198 } | 3198 } |
3199 // Migrate generated code. | 3199 // Migrate generated code. |
3200 // The generated code can contain Object** values (typically from handles) | 3200 // The generated code can contain Object** values (typically from handles) |
3201 // that are dereferenced during the copy to point directly to the actual heap | 3201 // that are dereferenced during the copy to point directly to the actual heap |
3202 // objects. These pointers can include references to the code object itself, | 3202 // objects. These pointers can include references to the code object itself, |
(...skipping 3253 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6456 isolate_->heap()->store_buffer()->Compact(); | 6456 isolate_->heap()->store_buffer()->Compact(); |
6457 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6457 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6458 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6458 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6459 next = chunk->next_chunk(); | 6459 next = chunk->next_chunk(); |
6460 isolate_->memory_allocator()->Free(chunk); | 6460 isolate_->memory_allocator()->Free(chunk); |
6461 } | 6461 } |
6462 chunks_queued_for_free_ = NULL; | 6462 chunks_queued_for_free_ = NULL; |
6463 } | 6463 } |
6464 | 6464 |
6465 } } // namespace v8::internal | 6465 } } // namespace v8::internal |
OLD | NEW |