| OLD | NEW |
| 1 //===- subzero/src/IceGlobalContext.h - Global context defs -----*- C++ -*-===// | 1 //===- subzero/src/IceGlobalContext.h - Global context defs -----*- C++ -*-===// |
| 2 // | 2 // |
| 3 // The Subzero Code Generator | 3 // The Subzero Code Generator |
| 4 // | 4 // |
| 5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
| 6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
| 7 // | 7 // |
| 8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
| 9 /// | 9 /// |
| 10 /// \file | 10 /// \file |
| (...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 230 } | 230 } |
| 231 | 231 |
| 232 template <typename T> | 232 template <typename T> |
| 233 typename std::enable_if<!std::is_trivially_destructible<T>::value, T>::type * | 233 typename std::enable_if<!std::is_trivially_destructible<T>::value, T>::type * |
| 234 allocate() { | 234 allocate() { |
| 235 T *Ret = getAllocator()->Allocate<T>(); | 235 T *Ret = getAllocator()->Allocate<T>(); |
| 236 getDestructors()->emplace_back([Ret]() { Ret->~T(); }); | 236 getDestructors()->emplace_back([Ret]() { Ret->~T(); }); |
| 237 return Ret; | 237 return Ret; |
| 238 } | 238 } |
| 239 | 239 |
| 240 template <typename T> T *allocate_initializer(SizeT Count = 1) { |
| 241 static_assert( |
| 242 std::is_trivially_destructible<T>::value, |
| 243 "allocate_initializer can only allocate trivially destructible types."); |
| 244 return getInitializerAllocator()->allocate_initializer<T>(Count); |
| 245 } |
| 246 |
| 247 template <typename T> T *allocate_variable_declaration() { |
| 248 static_assert(!std::is_trivially_destructible<T>::value, |
| 249 "allocate_variable_declaration expects non-trivially " |
| 250 "destructible types."); |
| 251 return getInitializerAllocator()->allocate_variable_declaration<T>(); |
| 252 } |
| 253 |
| 240 const Intrinsics &getIntrinsicsInfo() const { return IntrinsicsInfo; } | 254 const Intrinsics &getIntrinsicsInfo() const { return IntrinsicsInfo; } |
| 241 | 255 |
| 242 ELFObjectWriter *getObjectWriter() const { return ObjectWriter.get(); } | 256 ELFObjectWriter *getObjectWriter() const { return ObjectWriter.get(); } |
| 243 | 257 |
| 244 /// Reset stats at the beginning of a function. | 258 /// Reset stats at the beginning of a function. |
| 245 void resetStats() { | 259 void resetStats() { |
| 246 if (BuildDefs::dump()) | 260 if (BuildDefs::dump()) |
| 247 ICE_TLS_GET_FIELD(TLS)->StatsFunction.reset(); | 261 ICE_TLS_GET_FIELD(TLS)->StatsFunction.reset(); |
| 248 } | 262 } |
| 249 void dumpStats(const IceString &Name, bool Final = false); | 263 void dumpStats(const IceString &Name, bool Final = false); |
| (...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 428 /// verbose level for a particular function. An empty Match argument means | 442 /// verbose level for a particular function. An empty Match argument means |
| 429 /// match everything. Returns true if there is a match. | 443 /// match everything. Returns true if there is a match. |
| 430 static bool matchSymbolName(const IceString &SymbolName, | 444 static bool matchSymbolName(const IceString &SymbolName, |
| 431 const IceString &Match) { | 445 const IceString &Match) { |
| 432 return Match.empty() || Match == SymbolName; | 446 return Match.empty() || Match == SymbolName; |
| 433 } | 447 } |
| 434 | 448 |
| 435 static ClFlags Flags; | 449 static ClFlags Flags; |
| 436 static ClFlagsExtra ExtraFlags; | 450 static ClFlagsExtra ExtraFlags; |
| 437 | 451 |
| 452 /// DisposeGlobalVariablesAfterLowering controls whether the memory used by |
| 453 /// GlobaleVariables can be reclaimed right after they have been lowered. |
| 454 /// @{ |
| 455 bool getDisposeGlobalVariablesAfterLowering() const { |
| 456 return DisposeGlobalVariablesAfterLowering; |
| 457 } |
| 458 |
| 459 void setDisposeGlobalVariablesAfterLowering(bool Value) { |
| 460 DisposeGlobalVariablesAfterLowering = Value; |
| 461 } |
| 462 /// @} |
| 463 |
| 438 private: | 464 private: |
| 439 // Try to ensure mutexes are allocated on separate cache lines. | 465 // Try to ensure mutexes are allocated on separate cache lines. |
| 440 | 466 |
| 441 // Destructors collaborate with Allocator | 467 // Destructors collaborate with Allocator |
| 442 ICE_CACHELINE_BOUNDARY; | 468 ICE_CACHELINE_BOUNDARY; |
| 443 // Managed by getAllocator() | 469 // Managed by getAllocator() |
| 444 GlobalLockType AllocLock; | 470 GlobalLockType AllocLock; |
| 445 ArenaAllocator Allocator; | 471 ArenaAllocator Allocator; |
| 446 | 472 |
| 447 ICE_CACHELINE_BOUNDARY; | 473 ICE_CACHELINE_BOUNDARY; |
| 474 // Managed by getInitializerAllocator() |
| 475 GlobalLockType InitAllocLock; |
| 476 VariableDeclarationList Globals; |
| 477 |
| 478 ICE_CACHELINE_BOUNDARY; |
| 448 // Managed by getDestructors() | 479 // Managed by getDestructors() |
| 449 using DestructorArray = std::vector<std::function<void()>>; | 480 using DestructorArray = std::vector<std::function<void()>>; |
| 450 GlobalLockType DestructorsLock; | 481 GlobalLockType DestructorsLock; |
| 451 DestructorArray Destructors; | 482 DestructorArray Destructors; |
| 452 | 483 |
| 453 ICE_CACHELINE_BOUNDARY; | 484 ICE_CACHELINE_BOUNDARY; |
| 454 // Managed by getConstantPool() | 485 // Managed by getConstantPool() |
| 455 GlobalLockType ConstPoolLock; | 486 GlobalLockType ConstPoolLock; |
| 456 std::unique_ptr<ConstantPool> ConstPool; | 487 std::unique_ptr<ConstantPool> ConstPool; |
| 457 | 488 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 492 BoundedProducerConsumerQueue<EmitterWorkItem> EmitQ; | 523 BoundedProducerConsumerQueue<EmitterWorkItem> EmitQ; |
| 493 // DataLowering is only ever used by a single thread at a time (either in | 524 // DataLowering is only ever used by a single thread at a time (either in |
| 494 // emitItems(), or in IceCompiler::run before the compilation is over.) | 525 // emitItems(), or in IceCompiler::run before the compilation is over.) |
| 495 // TODO(jpp): move to EmitterContext. | 526 // TODO(jpp): move to EmitterContext. |
| 496 std::unique_ptr<TargetDataLowering> DataLowering; | 527 std::unique_ptr<TargetDataLowering> DataLowering; |
| 497 /// If !HasEmittedCode, SubZero will accumulate all Globals (which are "true" | 528 /// If !HasEmittedCode, SubZero will accumulate all Globals (which are "true" |
| 498 /// program global variables) until the first code WorkItem is seen. | 529 /// program global variables) until the first code WorkItem is seen. |
| 499 // TODO(jpp): move to EmitterContext. | 530 // TODO(jpp): move to EmitterContext. |
| 500 bool HasSeenCode = false; | 531 bool HasSeenCode = false; |
| 501 // TODO(jpp): move to EmitterContext. | 532 // TODO(jpp): move to EmitterContext. |
| 502 VariableDeclarationList Globals; | 533 VariableDeclaration *ProfileBlockInfoVarDecl = nullptr; |
| 503 // TODO(jpp): move to EmitterContext. | 534 std::vector<VariableDeclaration *> ProfileBlockInfos; |
| 504 VariableDeclaration *ProfileBlockInfoVarDecl; | 535 /// Indicates if global variable declarations can be disposed of right after |
| 536 /// lowering. |
| 537 bool DisposeGlobalVariablesAfterLowering = true; |
| 505 | 538 |
| 506 LockedPtr<ArenaAllocator> getAllocator() { | 539 LockedPtr<ArenaAllocator> getAllocator() { |
| 507 return LockedPtr<ArenaAllocator>(&Allocator, &AllocLock); | 540 return LockedPtr<ArenaAllocator>(&Allocator, &AllocLock); |
| 508 } | 541 } |
| 542 LockedPtr<VariableDeclarationList> getInitializerAllocator() { |
| 543 return LockedPtr<VariableDeclarationList>(&Globals, &InitAllocLock); |
| 544 } |
| 509 LockedPtr<ConstantPool> getConstPool() { | 545 LockedPtr<ConstantPool> getConstPool() { |
| 510 return LockedPtr<ConstantPool>(ConstPool.get(), &ConstPoolLock); | 546 return LockedPtr<ConstantPool>(ConstPool.get(), &ConstPoolLock); |
| 511 } | 547 } |
| 512 LockedPtr<JumpTableDataList> getJumpTableList() { | 548 LockedPtr<JumpTableDataList> getJumpTableList() { |
| 513 return LockedPtr<JumpTableDataList>(&JumpTableList, &JumpTablesLock); | 549 return LockedPtr<JumpTableDataList>(&JumpTableList, &JumpTablesLock); |
| 514 } | 550 } |
| 515 LockedPtr<CodeStats> getStatsCumulative() { | 551 LockedPtr<CodeStats> getStatsCumulative() { |
| 516 return LockedPtr<CodeStats>(&StatsCumulative, &StatsLock); | 552 return LockedPtr<CodeStats>(&StatsCumulative, &StatsLock); |
| 517 } | 553 } |
| 518 LockedPtr<TimerList> getTimers() { | 554 LockedPtr<TimerList> getTimers() { |
| 519 return LockedPtr<TimerList>(&Timers, &TimerLock); | 555 return LockedPtr<TimerList>(&Timers, &TimerLock); |
| 520 } | 556 } |
| 521 LockedPtr<DestructorArray> getDestructors() { | 557 LockedPtr<DestructorArray> getDestructors() { |
| 522 return LockedPtr<DestructorArray>(&Destructors, &DestructorsLock); | 558 return LockedPtr<DestructorArray>(&Destructors, &DestructorsLock); |
| 523 } | 559 } |
| 524 | 560 |
| 525 void accumulateGlobals(std::unique_ptr<VariableDeclarationList> Globls) { | 561 void accumulateGlobals(std::unique_ptr<VariableDeclarationList> Globls) { |
| 526 if (Globls != nullptr) | 562 LockedPtr<VariableDeclarationList> _(&Globals, &InitAllocLock); |
| 527 Globals.insert(Globals.end(), Globls->begin(), Globls->end()); | 563 if (Globls != nullptr) { |
| 564 Globals.merge(Globls.get()); |
| 565 } |
| 528 } | 566 } |
| 529 | 567 |
| 530 void lowerGlobalsIfNoCodeHasBeenSeen() { | 568 void lowerGlobalsIfNoCodeHasBeenSeen() { |
| 531 if (HasSeenCode) | 569 if (HasSeenCode) |
| 532 return; | 570 return; |
| 533 constexpr char NoSuffix[] = ""; | 571 constexpr char NoSuffix[] = ""; |
| 534 lowerGlobals(NoSuffix); | 572 lowerGlobals(NoSuffix); |
| 535 HasSeenCode = true; | 573 HasSeenCode = true; |
| 536 } | 574 } |
| 537 | 575 |
| 538 void addBlockInfoPtrs(VariableDeclaration *ProfileBlockInfo); | 576 void saveBlockInfoPtrs(); |
| 539 | 577 |
| 540 llvm::SmallVector<ThreadContext *, 128> AllThreadContexts; | 578 llvm::SmallVector<ThreadContext *, 128> AllThreadContexts; |
| 541 llvm::SmallVector<std::thread, 128> TranslationThreads; | 579 llvm::SmallVector<std::thread, 128> TranslationThreads; |
| 542 llvm::SmallVector<std::thread, 128> EmitterThreads; | 580 llvm::SmallVector<std::thread, 128> EmitterThreads; |
| 543 // Each thread has its own TLS pointer which is also held in | 581 // Each thread has its own TLS pointer which is also held in |
| 544 // AllThreadContexts. | 582 // AllThreadContexts. |
| 545 ICE_TLS_DECLARE_FIELD(ThreadContext *, TLS); | 583 ICE_TLS_DECLARE_FIELD(ThreadContext *, TLS); |
| 546 | 584 |
| 547 public: | 585 public: |
| 548 static void TlsInit() { ICE_TLS_INIT_FIELD(TLS); } | 586 static void TlsInit() { ICE_TLS_INIT_FIELD(TLS); } |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 596 explicit OstreamLocker(GlobalContext *Ctx) : Ctx(Ctx) { Ctx->lockStr(); } | 634 explicit OstreamLocker(GlobalContext *Ctx) : Ctx(Ctx) { Ctx->lockStr(); } |
| 597 ~OstreamLocker() { Ctx->unlockStr(); } | 635 ~OstreamLocker() { Ctx->unlockStr(); } |
| 598 | 636 |
| 599 private: | 637 private: |
| 600 GlobalContext *const Ctx; | 638 GlobalContext *const Ctx; |
| 601 }; | 639 }; |
| 602 | 640 |
| 603 } // end of namespace Ice | 641 } // end of namespace Ice |
| 604 | 642 |
| 605 #endif // SUBZERO_SRC_ICEGLOBALCONTEXT_H | 643 #endif // SUBZERO_SRC_ICEGLOBALCONTEXT_H |
| OLD | NEW |