OLD | NEW |
1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-stub-assembler.h" | 5 #include "src/compiler/code-stub-assembler.h" |
6 | 6 |
7 #include <ostream> | 7 #include <ostream> |
8 | 8 |
9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
10 #include "src/compiler/graph.h" | 10 #include "src/compiler/graph.h" |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
238 compiler::Node* roots_array_start = | 238 compiler::Node* roots_array_start = |
239 ExternalConstant(ExternalReference::roots_array_start(isolate())); | 239 ExternalConstant(ExternalReference::roots_array_start(isolate())); |
240 USE(roots_array_start); | 240 USE(roots_array_start); |
241 | 241 |
242 // TODO(danno): Implement thee root-access case where the root is not constant | 242 // TODO(danno): Implement thee root-access case where the root is not constant |
243 // and must be loaded from the root array. | 243 // and must be loaded from the root array. |
244 UNIMPLEMENTED(); | 244 UNIMPLEMENTED(); |
245 return nullptr; | 245 return nullptr; |
246 } | 246 } |
247 | 247 |
| 248 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes, |
| 249 AllocationFlags flags, |
| 250 Node* top_address, |
| 251 Node* limit_address) { |
| 252 Node* top = Load(MachineType::Pointer(), top_address); |
| 253 Node* limit = Load(MachineType::Pointer(), limit_address); |
| 254 |
| 255 // If there's not enough space, call the runtime. |
| 256 RawMachineLabel runtime_call, no_runtime_call, merge_runtime; |
| 257 raw_assembler_->Branch( |
| 258 raw_assembler_->IntPtrLessThan(IntPtrSub(limit, top), size_in_bytes), |
| 259 &runtime_call, &no_runtime_call); |
| 260 |
| 261 raw_assembler_->Bind(&runtime_call); |
| 262 // AllocateInTargetSpace does not use the context. |
| 263 Node* context = IntPtrConstant(0); |
| 264 Node* runtime_flags = SmiTag(Int32Constant( |
| 265 AllocateDoubleAlignFlag::encode(false) | |
| 266 AllocateTargetSpace::encode(flags & kPretenured |
| 267 ? AllocationSpace::OLD_SPACE |
| 268 : AllocationSpace::NEW_SPACE))); |
| 269 Node* runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context, |
| 270 SmiTag(size_in_bytes), runtime_flags); |
| 271 raw_assembler_->Goto(&merge_runtime); |
| 272 |
| 273 // When there is enough space, return `top' and bump it up. |
| 274 raw_assembler_->Bind(&no_runtime_call); |
| 275 Node* no_runtime_result = top; |
| 276 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, |
| 277 IntPtrAdd(top, size_in_bytes)); |
| 278 no_runtime_result = |
| 279 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)); |
| 280 raw_assembler_->Goto(&merge_runtime); |
| 281 |
| 282 raw_assembler_->Bind(&merge_runtime); |
| 283 return raw_assembler_->Phi(MachineType::PointerRepresentation(), |
| 284 runtime_result, no_runtime_result); |
| 285 } |
| 286 |
| 287 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes, |
| 288 AllocationFlags flags, |
| 289 Node* top_address, |
| 290 Node* limit_address) { |
| 291 Node* top = Load(MachineType::Pointer(), top_address); |
| 292 Node* limit = Load(MachineType::Pointer(), limit_address); |
| 293 Node* adjusted_size = size_in_bytes; |
| 294 if (flags & kDoubleAlignment) { |
| 295 // TODO(epertoso): Simd128 alignment. |
| 296 RawMachineLabel aligned, not_aligned, merge; |
| 297 raw_assembler_->Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), |
| 298 ¬_aligned, &aligned); |
| 299 |
| 300 raw_assembler_->Bind(¬_aligned); |
| 301 Node* not_aligned_size = |
| 302 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize)); |
| 303 raw_assembler_->Goto(&merge); |
| 304 |
| 305 raw_assembler_->Bind(&aligned); |
| 306 raw_assembler_->Goto(&merge); |
| 307 |
| 308 raw_assembler_->Bind(&merge); |
| 309 adjusted_size = raw_assembler_->Phi(MachineType::PointerRepresentation(), |
| 310 not_aligned_size, adjusted_size); |
| 311 } |
| 312 |
| 313 Node* address = AllocateRawUnaligned(adjusted_size, kNone, top, limit); |
| 314 |
| 315 RawMachineLabel needs_filler, doesnt_need_filler, merge_address; |
| 316 raw_assembler_->Branch( |
| 317 raw_assembler_->IntPtrEqual(adjusted_size, size_in_bytes), |
| 318 &doesnt_need_filler, &needs_filler); |
| 319 |
| 320 raw_assembler_->Bind(&needs_filler); |
| 321 // Store a filler and increase the address by kPointerSize. |
| 322 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change |
| 323 // it when Simd128 alignment is supported. |
| 324 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top, |
| 325 LoadRoot(Heap::kOnePointerFillerMapRootIndex)); |
| 326 Node* address_with_filler = IntPtrAdd(address, IntPtrConstant(kPointerSize)); |
| 327 raw_assembler_->Goto(&merge_address); |
| 328 |
| 329 raw_assembler_->Bind(&doesnt_need_filler); |
| 330 Node* address_without_filler = address; |
| 331 raw_assembler_->Goto(&merge_address); |
| 332 |
| 333 raw_assembler_->Bind(&merge_address); |
| 334 address = raw_assembler_->Phi(MachineType::PointerRepresentation(), |
| 335 address_with_filler, address_without_filler); |
| 336 // Update the top. |
| 337 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address, |
| 338 IntPtrAdd(top, adjusted_size)); |
| 339 return address; |
| 340 } |
| 341 |
| 342 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) { |
| 343 bool const new_space = !(flags & kPretenured); |
| 344 Node* top_address = ExternalConstant( |
| 345 new_space |
| 346 ? ExternalReference::new_space_allocation_top_address(isolate()) |
| 347 : ExternalReference::old_space_allocation_top_address(isolate())); |
| 348 Node* limit_address = ExternalConstant( |
| 349 new_space |
| 350 ? ExternalReference::new_space_allocation_limit_address(isolate()) |
| 351 : ExternalReference::old_space_allocation_limit_address(isolate())); |
| 352 |
| 353 #ifdef V8_HOST_ARCH_32_BIT |
| 354 if (flags & kDoubleAlignment) { |
| 355 return AllocateRawAligned(IntPtrConstant(size_in_bytes), flags, top_address, |
| 356 limit_address); |
| 357 } |
| 358 #endif |
| 359 |
| 360 return AllocateRawUnaligned(IntPtrConstant(size_in_bytes), flags, top_address, |
| 361 limit_address); |
| 362 } |
| 363 |
248 Node* CodeStubAssembler::Load(MachineType rep, Node* base) { | 364 Node* CodeStubAssembler::Load(MachineType rep, Node* base) { |
249 return raw_assembler_->Load(rep, base); | 365 return raw_assembler_->Load(rep, base); |
250 } | 366 } |
251 | 367 |
252 Node* CodeStubAssembler::Load(MachineType rep, Node* base, Node* index) { | 368 Node* CodeStubAssembler::Load(MachineType rep, Node* base, Node* index) { |
253 return raw_assembler_->Load(rep, base, index); | 369 return raw_assembler_->Load(rep, base, index); |
254 } | 370 } |
255 | 371 |
256 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, | 372 Node* CodeStubAssembler::Store(MachineRepresentation rep, Node* base, |
257 Node* value) { | 373 Node* value) { |
(...skipping 415 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
673 } | 789 } |
674 } | 790 } |
675 } | 791 } |
676 | 792 |
677 bound_ = true; | 793 bound_ = true; |
678 } | 794 } |
679 | 795 |
680 } // namespace compiler | 796 } // namespace compiler |
681 } // namespace internal | 797 } // namespace internal |
682 } // namespace v8 | 798 } // namespace v8 |
OLD | NEW |