OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
61 | 61 |
62 template<typename Record> | 62 template<typename Record> |
63 void UnboundQueue<Record>::DeleteFirst() { | 63 void UnboundQueue<Record>::DeleteFirst() { |
64 Node* tmp = first_; | 64 Node* tmp = first_; |
65 first_ = tmp->next; | 65 first_ = tmp->next; |
66 delete tmp; | 66 delete tmp; |
67 } | 67 } |
68 | 68 |
69 | 69 |
70 template<typename Record> | 70 template<typename Record> |
71 void UnboundQueue<Record>::Dequeue(Record* rec) { | 71 bool UnboundQueue<Record>::Dequeue(Record* rec) { |
72 ASSERT(divider_ != last_); | 72 if (divider_ == Acquire_Load(&last_)) return false; |
73 Node* next = reinterpret_cast<Node*>(divider_)->next; | 73 Node* next = reinterpret_cast<Node*>(divider_)->next; |
74 *rec = next->value; | 74 *rec = next->value; |
75 Release_Store(÷r_, reinterpret_cast<AtomicWord>(next)); | 75 Release_Store(÷r_, reinterpret_cast<AtomicWord>(next)); |
| 76 return true; |
76 } | 77 } |
77 | 78 |
78 | 79 |
79 template<typename Record> | 80 template<typename Record> |
80 void UnboundQueue<Record>::Enqueue(const Record& rec) { | 81 void UnboundQueue<Record>::Enqueue(const Record& rec) { |
81 Node*& next = reinterpret_cast<Node*>(last_)->next; | 82 Node*& next = reinterpret_cast<Node*>(last_)->next; |
82 next = new Node(rec); | 83 next = new Node(rec); |
83 Release_Store(&last_, reinterpret_cast<AtomicWord>(next)); | 84 Release_Store(&last_, reinterpret_cast<AtomicWord>(next)); |
84 while (first_ != reinterpret_cast<Node*>(divider_)) DeleteFirst(); | 85 |
| 86 while (first_ != reinterpret_cast<Node*>(Acquire_Load(÷r_))) { |
| 87 DeleteFirst(); |
| 88 } |
85 } | 89 } |
86 | 90 |
87 | 91 |
88 template<typename Record> | 92 template<typename Record> |
89 Record* UnboundQueue<Record>::Peek() { | 93 bool UnboundQueue<Record>::IsEmpty() const { |
90 ASSERT(divider_ != last_); | 94 return NoBarrier_Load(÷r_) == NoBarrier_Load(&last_); |
| 95 } |
| 96 |
| 97 |
| 98 template<typename Record> |
| 99 Record* UnboundQueue<Record>::Peek() const { |
| 100 if (divider_ == Acquire_Load(&last_)) return NULL; |
91 Node* next = reinterpret_cast<Node*>(divider_)->next; | 101 Node* next = reinterpret_cast<Node*>(divider_)->next; |
92 return &next->value; | 102 return &next->value; |
93 } | 103 } |
94 | 104 |
95 } } // namespace v8::internal | 105 } } // namespace v8::internal |
96 | 106 |
97 #endif // V8_UNBOUND_QUEUE_INL_H_ | 107 #endif // V8_UNBOUND_QUEUE_INL_H_ |
OLD | NEW |