| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/state-values-utils.h" | 5 #include "src/compiler/state-values-utils.h" |
| 6 | 6 |
| 7 #include "src/bit-vector.h" |
| 8 |
| 7 namespace v8 { | 9 namespace v8 { |
| 8 namespace internal { | 10 namespace internal { |
| 9 namespace compiler { | 11 namespace compiler { |
| 10 | 12 |
| 11 StateValuesCache::StateValuesCache(JSGraph* js_graph) | 13 StateValuesCache::StateValuesCache(JSGraph* js_graph) |
| 12 : js_graph_(js_graph), | 14 : js_graph_(js_graph), |
| 13 hash_map_(AreKeysEqual, ZoneHashMap::kDefaultHashMapCapacity, | 15 hash_map_(AreKeysEqual, ZoneHashMap::kDefaultHashMapCapacity, |
| 14 ZoneAllocationPolicy(zone())), | 16 ZoneAllocationPolicy(zone())), |
| 15 working_space_(zone()), | 17 working_space_(zone()), |
| 16 empty_state_values_(nullptr) {} | 18 empty_state_values_(nullptr) {} |
| (...skipping 23 matching lines...) Expand all Loading... |
| 40 } | 42 } |
| 41 UNREACHABLE(); | 43 UNREACHABLE(); |
| 42 } | 44 } |
| 43 | 45 |
| 44 | 46 |
| 45 // static | 47 // static |
| 46 bool StateValuesCache::IsKeysEqualToNode(StateValuesKey* key, Node* node) { | 48 bool StateValuesCache::IsKeysEqualToNode(StateValuesKey* key, Node* node) { |
| 47 if (key->count != static_cast<size_t>(node->InputCount())) { | 49 if (key->count != static_cast<size_t>(node->InputCount())) { |
| 48 return false; | 50 return false; |
| 49 } | 51 } |
| 52 |
| 53 DCHECK(node->opcode() == IrOpcode::kStateValues); |
| 54 SparseInputMask node_mask = SparseInputMaskOf(node->op()); |
| 55 |
| 56 if (node_mask != key->mask) { |
| 57 return false; |
| 58 } |
| 59 |
| 60 // Comparing real inputs rather than sparse inputs, since we already know the |
| 61 // sparse input masks are the same. |
| 50 for (size_t i = 0; i < key->count; i++) { | 62 for (size_t i = 0; i < key->count; i++) { |
| 51 if (key->values[i] != node->InputAt(static_cast<int>(i))) { | 63 if (key->values[i] != node->InputAt(static_cast<int>(i))) { |
| 52 return false; | 64 return false; |
| 53 } | 65 } |
| 54 } | 66 } |
| 55 return true; | 67 return true; |
| 56 } | 68 } |
| 57 | 69 |
| 58 | 70 |
| 59 // static | 71 // static |
| 60 bool StateValuesCache::AreValueKeysEqual(StateValuesKey* key1, | 72 bool StateValuesCache::AreValueKeysEqual(StateValuesKey* key1, |
| 61 StateValuesKey* key2) { | 73 StateValuesKey* key2) { |
| 62 if (key1->count != key2->count) { | 74 if (key1->count != key2->count) { |
| 63 return false; | 75 return false; |
| 64 } | 76 } |
| 77 if (key1->mask != key2->mask) { |
| 78 return false; |
| 79 } |
| 65 for (size_t i = 0; i < key1->count; i++) { | 80 for (size_t i = 0; i < key1->count; i++) { |
| 66 if (key1->values[i] != key2->values[i]) { | 81 if (key1->values[i] != key2->values[i]) { |
| 67 return false; | 82 return false; |
| 68 } | 83 } |
| 69 } | 84 } |
| 70 return true; | 85 return true; |
| 71 } | 86 } |
| 72 | 87 |
| 73 | 88 |
| 74 Node* StateValuesCache::GetEmptyStateValues() { | 89 Node* StateValuesCache::GetEmptyStateValues() { |
| 75 if (empty_state_values_ == nullptr) { | 90 if (empty_state_values_ == nullptr) { |
| 76 empty_state_values_ = graph()->NewNode(common()->StateValues(0)); | 91 empty_state_values_ = |
| 92 graph()->NewNode(common()->StateValues(0, SparseInputMask::Dense())); |
| 77 } | 93 } |
| 78 return empty_state_values_; | 94 return empty_state_values_; |
| 79 } | 95 } |
| 80 | 96 |
| 81 | 97 StateValuesCache::WorkingBuffer* StateValuesCache::GetWorkingSpace( |
| 82 NodeVector* StateValuesCache::GetWorkingSpace(size_t level) { | 98 size_t level) { |
| 83 while (working_space_.size() <= level) { | 99 if (working_space_.size() <= level) { |
| 84 void* space = zone()->New(sizeof(NodeVector)); | 100 working_space_.resize(level + 1); |
| 85 working_space_.push_back(new (space) | |
| 86 NodeVector(kMaxInputCount, nullptr, zone())); | |
| 87 } | 101 } |
| 88 return working_space_[level]; | 102 return &working_space_[level]; |
| 89 } | 103 } |
| 90 | 104 |
| 91 namespace { | 105 namespace { |
| 92 | 106 |
| 93 int StateValuesHashKey(Node** nodes, size_t count) { | 107 int StateValuesHashKey(Node** nodes, size_t count) { |
| 94 size_t hash = count; | 108 size_t hash = count; |
| 95 for (size_t i = 0; i < count; i++) { | 109 for (size_t i = 0; i < count; i++) { |
| 96 hash = hash * 23 + nodes[i]->id(); | 110 hash = hash * 23 + (nodes[i] == nullptr ? 0 : nodes[i]->id()); |
| 97 } | 111 } |
| 98 return static_cast<int>(hash & 0x7fffffff); | 112 return static_cast<int>(hash & 0x7fffffff); |
| 99 } | 113 } |
| 100 | 114 |
| 101 } // namespace | 115 } // namespace |
| 102 | 116 |
| 103 | 117 Node* StateValuesCache::GetValuesNodeFromCache(Node** nodes, size_t count, |
| 104 Node* StateValuesCache::GetValuesNodeFromCache(Node** nodes, size_t count) { | 118 SparseInputMask mask) { |
| 105 StateValuesKey key(count, nodes); | 119 StateValuesKey key(count, mask, nodes); |
| 106 int hash = StateValuesHashKey(nodes, count); | 120 int hash = StateValuesHashKey(nodes, count); |
| 107 ZoneHashMap::Entry* lookup = | 121 ZoneHashMap::Entry* lookup = |
| 108 hash_map_.LookupOrInsert(&key, hash, ZoneAllocationPolicy(zone())); | 122 hash_map_.LookupOrInsert(&key, hash, ZoneAllocationPolicy(zone())); |
| 109 DCHECK_NOT_NULL(lookup); | 123 DCHECK_NOT_NULL(lookup); |
| 110 Node* node; | 124 Node* node; |
| 111 if (lookup->value == nullptr) { | 125 if (lookup->value == nullptr) { |
| 112 int input_count = static_cast<int>(count); | 126 int node_count = static_cast<int>(count); |
| 113 node = graph()->NewNode(common()->StateValues(input_count), input_count, | 127 node = graph()->NewNode(common()->StateValues(node_count, mask), node_count, |
| 114 nodes); | 128 nodes); |
| 115 NodeKey* new_key = new (zone()->New(sizeof(NodeKey))) NodeKey(node); | 129 NodeKey* new_key = new (zone()->New(sizeof(NodeKey))) NodeKey(node); |
| 116 lookup->key = new_key; | 130 lookup->key = new_key; |
| 117 lookup->value = node; | 131 lookup->value = node; |
| 118 } else { | 132 } else { |
| 119 node = reinterpret_cast<Node*>(lookup->value); | 133 node = reinterpret_cast<Node*>(lookup->value); |
| 120 } | 134 } |
| 121 return node; | 135 return node; |
| 122 } | 136 } |
| 123 | 137 |
| 138 SparseInputMask::BitMaskType StateValuesCache::FillBufferWithValues( |
| 139 WorkingBuffer* node_buffer, size_t* node_count, size_t* values_idx, |
| 140 Node** values, size_t count, const BitVector* liveness) { |
| 141 SparseInputMask::BitMaskType input_mask = 0; |
| 124 | 142 |
| 125 class StateValuesCache::ValueArrayIterator { | 143 // Virtual nodes are the live nodes plus the implicit optimized out nodes, |
| 126 public: | 144 // which are implied by the liveness mask. |
| 127 ValueArrayIterator(Node** values, size_t count) | 145 size_t virtual_node_count = *node_count; |
| 128 : values_(values), count_(count), current_(0) {} | |
| 129 | 146 |
| 130 void Advance() { | 147 while (*values_idx < count && *node_count < kMaxInputCount && |
| 131 if (!done()) { | 148 virtual_node_count < SparseInputMask::kMaxSparseInputs) { |
| 132 current_++; | 149 DCHECK_LE(*values_idx, static_cast<size_t>(INT_MAX)); |
| 150 |
| 151 if (liveness == nullptr || |
| 152 liveness->Contains(static_cast<int>(*values_idx))) { |
| 153 input_mask |= 1 << (virtual_node_count); |
| 154 (*node_buffer)[(*node_count)++] = values[*values_idx]; |
| 155 } |
| 156 virtual_node_count++; |
| 157 |
| 158 (*values_idx)++; |
| 159 } |
| 160 |
| 161 DCHECK(*node_count <= StateValuesCache::kMaxInputCount); |
| 162 DCHECK(virtual_node_count <= SparseInputMask::kMaxSparseInputs); |
| 163 |
| 164 // Add the end marker at the end of the mask. |
| 165 input_mask |= SparseInputMask::kEndMarker << virtual_node_count; |
| 166 |
| 167 return input_mask; |
| 168 } |
| 169 |
| 170 Node* StateValuesCache::BuildTree(size_t* values_idx, Node** values, |
| 171 size_t count, const BitVector* liveness, |
| 172 size_t level) { |
| 173 WorkingBuffer* node_buffer = GetWorkingSpace(level); |
| 174 size_t node_count = 0; |
| 175 SparseInputMask::BitMaskType input_mask = SparseInputMask::kDenseBitMask; |
| 176 |
| 177 if (level == 0) { |
| 178 input_mask = FillBufferWithValues(node_buffer, &node_count, values_idx, |
| 179 values, count, liveness); |
| 180 // Make sure we returned a sparse input mask. |
| 181 DCHECK_NE(input_mask, SparseInputMask::kDenseBitMask); |
| 182 } else { |
| 183 while (*values_idx < count && node_count < kMaxInputCount) { |
| 184 if (count - *values_idx < kMaxInputCount - node_count) { |
| 185 // If we have fewer values remaining than inputs remaining, dump the |
| 186 // remaining values into this node. |
| 187 // TODO(leszeks): We could optimise this further by only counting |
| 188 // remaining live nodes. |
| 189 |
| 190 size_t previous_input_count = node_count; |
| 191 input_mask = FillBufferWithValues(node_buffer, &node_count, values_idx, |
| 192 values, count, liveness); |
| 193 // Make sure we have exhausted our values. |
| 194 DCHECK_EQ(*values_idx, count); |
| 195 // Make sure we returned a sparse input mask. |
| 196 DCHECK_NE(input_mask, SparseInputMask::kDenseBitMask); |
| 197 |
| 198 // Make sure we haven't touched inputs below previous_input_count in the |
| 199 // mask. |
| 200 DCHECK_EQ(input_mask & ((1 << previous_input_count) - 1), 0u); |
| 201 // Mark all previous inputs as live. |
| 202 input_mask |= ((1 << previous_input_count) - 1); |
| 203 |
| 204 break; |
| 205 |
| 206 } else { |
| 207 // Otherwise, add the values to a subtree and add that as an input. |
| 208 Node* subtree = |
| 209 BuildTree(values_idx, values, count, liveness, level - 1); |
| 210 (*node_buffer)[node_count++] = subtree; |
| 211 // Don't touch the bitmask, so that it stays dense. |
| 212 } |
| 133 } | 213 } |
| 134 } | 214 } |
| 135 | 215 |
| 136 bool done() { return current_ >= count_; } | 216 if (node_count == 1 && input_mask == SparseInputMask::kDenseBitMask) { |
| 137 | 217 // Elide the StateValue node if there is only one, dense input. This will |
| 138 Node* node() { | 218 // only happen if we built a single subtree (as nodes with values are always |
| 139 DCHECK(!done()); | 219 // sparse), and so we can replace ourselves with it. |
| 140 return values_[current_]; | 220 DCHECK_EQ((*node_buffer)[0]->opcode(), IrOpcode::kStateValues); |
| 141 } | 221 return (*node_buffer)[0]; |
| 142 | |
| 143 private: | |
| 144 Node** values_; | |
| 145 size_t count_; | |
| 146 size_t current_; | |
| 147 }; | |
| 148 | |
| 149 | |
| 150 Node* StateValuesCache::BuildTree(ValueArrayIterator* it, size_t max_height) { | |
| 151 if (max_height == 0) { | |
| 152 Node* node = it->node(); | |
| 153 it->Advance(); | |
| 154 return node; | |
| 155 } | |
| 156 DCHECK(!it->done()); | |
| 157 | |
| 158 NodeVector* buffer = GetWorkingSpace(max_height); | |
| 159 size_t count = 0; | |
| 160 for (; count < kMaxInputCount; count++) { | |
| 161 if (it->done()) break; | |
| 162 (*buffer)[count] = BuildTree(it, max_height - 1); | |
| 163 } | |
| 164 if (count == 1) { | |
| 165 return (*buffer)[0]; | |
| 166 } else { | 222 } else { |
| 167 return GetValuesNodeFromCache(&(buffer->front()), count); | 223 return GetValuesNodeFromCache(node_buffer->data(), node_count, |
| 224 SparseInputMask(input_mask)); |
| 168 } | 225 } |
| 169 } | 226 } |
| 170 | 227 |
| 228 #if DEBUG |
| 229 namespace { |
| 171 | 230 |
| 172 Node* StateValuesCache::GetNodeForValues(Node** values, size_t count) { | 231 void CheckTreeContainsValues(Node* tree, Node** values, size_t count, |
| 232 const BitVector* liveness) { |
| 233 CHECK_EQ(count, StateValuesAccess(tree).size()); |
| 234 |
| 235 int i; |
| 236 auto access = StateValuesAccess(tree); |
| 237 auto it = access.begin(); |
| 238 auto itend = access.end(); |
| 239 for (i = 0; it != itend; ++it, ++i) { |
| 240 if (liveness == nullptr || liveness->Contains(i)) { |
| 241 CHECK((*it).node == values[i]); |
| 242 } else { |
| 243 CHECK((*it).node == nullptr); |
| 244 } |
| 245 } |
| 246 CHECK_EQ(static_cast<size_t>(i), count); |
| 247 } |
| 248 |
| 249 } // namespace |
| 250 #endif |
| 251 |
| 252 Node* StateValuesCache::GetNodeForValues(Node** values, size_t count, |
| 253 const BitVector* liveness) { |
| 173 #if DEBUG | 254 #if DEBUG |
| 255 // Check that the values represent actual values, and not a tree of values. |
| 174 for (size_t i = 0; i < count; i++) { | 256 for (size_t i = 0; i < count; i++) { |
| 175 DCHECK_NE(values[i]->opcode(), IrOpcode::kStateValues); | 257 if (values[i] != nullptr) { |
| 176 DCHECK_NE(values[i]->opcode(), IrOpcode::kTypedStateValues); | 258 DCHECK_NE(values[i]->opcode(), IrOpcode::kStateValues); |
| 259 DCHECK_NE(values[i]->opcode(), IrOpcode::kTypedStateValues); |
| 260 } |
| 261 } |
| 262 if (liveness != nullptr) { |
| 263 // Liveness can have extra bits for the stack or accumulator, which we |
| 264 // ignore here. |
| 265 DCHECK_LE(count, static_cast<size_t>(liveness->length())); |
| 266 |
| 267 for (size_t i = 0; i < count; i++) { |
| 268 if (liveness->Contains(static_cast<int>(i))) { |
| 269 DCHECK_NOT_NULL(values[i]); |
| 270 } |
| 271 } |
| 177 } | 272 } |
| 178 #endif | 273 #endif |
| 274 |
| 179 if (count == 0) { | 275 if (count == 0) { |
| 180 return GetEmptyStateValues(); | 276 return GetEmptyStateValues(); |
| 181 } | 277 } |
| 278 |
| 279 // This is a worst-case tree height estimate, assuming that all values are |
| 280 // live. We could get a better estimate by counting zeroes in the liveness |
| 281 // vector, but there's no point -- any excess height in the tree will be |
| 282 // collapsed by the single-input elision at the end of BuildTree. |
| 182 size_t height = 0; | 283 size_t height = 0; |
| 183 size_t max_nodes = 1; | 284 size_t max_inputs = kMaxInputCount; |
| 184 while (count > max_nodes) { | 285 while (count > max_inputs) { |
| 185 height++; | 286 height++; |
| 186 max_nodes *= kMaxInputCount; | 287 max_inputs *= kMaxInputCount; |
| 187 } | 288 } |
| 188 | 289 |
| 189 ValueArrayIterator it(values, count); | 290 size_t values_idx = 0; |
| 291 Node* tree = BuildTree(&values_idx, values, count, liveness, height); |
| 292 // The values should be exhausted by the end of BuildTree. |
| 293 DCHECK_EQ(values_idx, count); |
| 190 | 294 |
| 191 Node* tree = BuildTree(&it, height); | 295 // The 'tree' must be rooted with a state value node. |
| 296 DCHECK_EQ(tree->opcode(), IrOpcode::kStateValues); |
| 192 | 297 |
| 193 // If the 'tree' is a single node, equip it with a StateValues wrapper. | 298 #if DEBUG |
| 194 if (tree->opcode() != IrOpcode::kStateValues && | 299 CheckTreeContainsValues(tree, values, count, liveness); |
| 195 tree->opcode() != IrOpcode::kTypedStateValues) { | 300 #endif |
| 196 tree = GetValuesNodeFromCache(&tree, 1); | |
| 197 } | |
| 198 | 301 |
| 199 return tree; | 302 return tree; |
| 200 } | 303 } |
| 201 | 304 |
| 202 | |
| 203 StateValuesAccess::iterator::iterator(Node* node) : current_depth_(0) { | 305 StateValuesAccess::iterator::iterator(Node* node) : current_depth_(0) { |
| 204 // A hacky way initialize - just set the index before the node we want | 306 stack_[current_depth_] = |
| 205 // to process and then advance to it. | 307 SparseInputMaskOf(node->op()).IterateOverInputs(node); |
| 206 stack_[current_depth_].node = node; | 308 EnsureValid(); |
| 207 stack_[current_depth_].index = -1; | |
| 208 Advance(); | |
| 209 } | 309 } |
| 210 | 310 |
| 211 | 311 SparseInputMask::InputIterator* StateValuesAccess::iterator::Top() { |
| 212 StateValuesAccess::iterator::StatePos* StateValuesAccess::iterator::Top() { | |
| 213 DCHECK(current_depth_ >= 0); | 312 DCHECK(current_depth_ >= 0); |
| 214 DCHECK(current_depth_ < kMaxInlineDepth); | 313 DCHECK(current_depth_ < kMaxInlineDepth); |
| 215 return &(stack_[current_depth_]); | 314 return &(stack_[current_depth_]); |
| 216 } | 315 } |
| 217 | 316 |
| 218 | |
| 219 void StateValuesAccess::iterator::Push(Node* node) { | 317 void StateValuesAccess::iterator::Push(Node* node) { |
| 220 current_depth_++; | 318 current_depth_++; |
| 221 CHECK(current_depth_ < kMaxInlineDepth); | 319 CHECK(current_depth_ < kMaxInlineDepth); |
| 222 stack_[current_depth_].node = node; | 320 stack_[current_depth_] = |
| 223 stack_[current_depth_].index = 0; | 321 SparseInputMaskOf(node->op()).IterateOverInputs(node); |
| 224 } | 322 } |
| 225 | 323 |
| 226 | 324 |
| 227 void StateValuesAccess::iterator::Pop() { | 325 void StateValuesAccess::iterator::Pop() { |
| 228 DCHECK(current_depth_ >= 0); | 326 DCHECK(current_depth_ >= 0); |
| 229 current_depth_--; | 327 current_depth_--; |
| 230 } | 328 } |
| 231 | 329 |
| 232 | 330 |
| 233 bool StateValuesAccess::iterator::done() { return current_depth_ < 0; } | 331 bool StateValuesAccess::iterator::done() { return current_depth_ < 0; } |
| 234 | 332 |
| 235 | 333 |
| 236 void StateValuesAccess::iterator::Advance() { | 334 void StateValuesAccess::iterator::Advance() { |
| 237 // Advance the current index. | 335 Top()->Advance(); |
| 238 Top()->index++; | 336 EnsureValid(); |
| 337 } |
| 239 | 338 |
| 240 // Fix up the position to point to a valid node. | 339 void StateValuesAccess::iterator::EnsureValid() { |
| 241 while (true) { | 340 while (true) { |
| 242 // TODO(jarin): Factor to a separate method. | 341 SparseInputMask::InputIterator* top = Top(); |
| 243 Node* node = Top()->node; | |
| 244 int index = Top()->index; | |
| 245 | 342 |
| 246 if (index >= node->InputCount()) { | 343 if (top->IsEmpty()) { |
| 247 // Pop stack and move to the next sibling. | 344 // We are on a valid (albeit optimized out) node. |
| 345 return; |
| 346 } |
| 347 |
| 348 if (top->IsEnd()) { |
| 349 // We have hit the end of this iterator. Pop the stack and move to the |
| 350 // next sibling iterator. |
| 248 Pop(); | 351 Pop(); |
| 249 if (done()) { | 352 if (done()) { |
| 250 // Stack is exhausted, we have reached the end. | 353 // Stack is exhausted, we have reached the end. |
| 251 return; | 354 return; |
| 252 } | 355 } |
| 253 Top()->index++; | 356 Top()->Advance(); |
| 254 } else if (node->InputAt(index)->opcode() == IrOpcode::kStateValues || | 357 continue; |
| 255 node->InputAt(index)->opcode() == IrOpcode::kTypedStateValues) { | 358 } |
| 359 |
| 360 // At this point the value is known to be live and within our input nodes. |
| 361 Node* value_node = top->GetReal(); |
| 362 |
| 363 if (value_node->opcode() == IrOpcode::kStateValues || |
| 364 value_node->opcode() == IrOpcode::kTypedStateValues) { |
| 256 // Nested state, we need to push to the stack. | 365 // Nested state, we need to push to the stack. |
| 257 Push(node->InputAt(index)); | 366 Push(value_node); |
| 367 continue; |
| 368 } |
| 369 |
| 370 // We are on a valid node, we can stop the iteration. |
| 371 return; |
| 372 } |
| 373 } |
| 374 |
| 375 Node* StateValuesAccess::iterator::node() { return Top()->Get(nullptr); } |
| 376 |
| 377 MachineType StateValuesAccess::iterator::type() { |
| 378 Node* parent = Top()->parent(); |
| 379 if (parent->opcode() == IrOpcode::kStateValues) { |
| 380 return MachineType::AnyTagged(); |
| 381 } else { |
| 382 DCHECK_EQ(IrOpcode::kTypedStateValues, parent->opcode()); |
| 383 |
| 384 if (Top()->IsEmpty()) { |
| 385 return MachineType::None(); |
| 258 } else { | 386 } else { |
| 259 // We are on a valid node, we can stop the iteration. | 387 ZoneVector<MachineType> const* types = MachineTypesOf(parent->op()); |
| 260 return; | 388 return (*types)[Top()->real_index()]; |
| 261 } | 389 } |
| 262 } | 390 } |
| 263 } | 391 } |
| 264 | 392 |
| 265 | |
| 266 Node* StateValuesAccess::iterator::node() { | |
| 267 return Top()->node->InputAt(Top()->index); | |
| 268 } | |
| 269 | |
| 270 | |
| 271 MachineType StateValuesAccess::iterator::type() { | |
| 272 Node* state = Top()->node; | |
| 273 if (state->opcode() == IrOpcode::kStateValues) { | |
| 274 return MachineType::AnyTagged(); | |
| 275 } else { | |
| 276 DCHECK_EQ(IrOpcode::kTypedStateValues, state->opcode()); | |
| 277 ZoneVector<MachineType> const* types = MachineTypesOf(state->op()); | |
| 278 return (*types)[Top()->index]; | |
| 279 } | |
| 280 } | |
| 281 | |
| 282 | 393 |
| 283 bool StateValuesAccess::iterator::operator!=(iterator& other) { | 394 bool StateValuesAccess::iterator::operator!=(iterator& other) { |
| 284 // We only allow comparison with end(). | 395 // We only allow comparison with end(). |
| 285 CHECK(other.done()); | 396 CHECK(other.done()); |
| 286 return !done(); | 397 return !done(); |
| 287 } | 398 } |
| 288 | 399 |
| 289 | 400 |
| 290 StateValuesAccess::iterator& StateValuesAccess::iterator::operator++() { | 401 StateValuesAccess::iterator& StateValuesAccess::iterator::operator++() { |
| 291 Advance(); | 402 Advance(); |
| 292 return *this; | 403 return *this; |
| 293 } | 404 } |
| 294 | 405 |
| 295 | 406 |
| 296 StateValuesAccess::TypedNode StateValuesAccess::iterator::operator*() { | 407 StateValuesAccess::TypedNode StateValuesAccess::iterator::operator*() { |
| 297 return TypedNode(node(), type()); | 408 return TypedNode(node(), type()); |
| 298 } | 409 } |
| 299 | 410 |
| 300 | 411 |
| 301 size_t StateValuesAccess::size() { | 412 size_t StateValuesAccess::size() { |
| 302 size_t count = 0; | 413 size_t count = 0; |
| 303 for (int i = 0; i < node_->InputCount(); i++) { | 414 SparseInputMask mask = SparseInputMaskOf(node_->op()); |
| 304 if (node_->InputAt(i)->opcode() == IrOpcode::kStateValues || | 415 |
| 305 node_->InputAt(i)->opcode() == IrOpcode::kTypedStateValues) { | 416 SparseInputMask::InputIterator iterator = mask.IterateOverInputs(node_); |
| 306 count += StateValuesAccess(node_->InputAt(i)).size(); | 417 |
| 418 for (; !iterator.IsEnd(); iterator.Advance()) { |
| 419 if (iterator.IsEmpty()) { |
| 420 count++; |
| 307 } else { | 421 } else { |
| 308 count++; | 422 Node* value = iterator.GetReal(); |
| 423 if (value->opcode() == IrOpcode::kStateValues || |
| 424 value->opcode() == IrOpcode::kTypedStateValues) { |
| 425 count += StateValuesAccess(value).size(); |
| 426 } else { |
| 427 count++; |
| 428 } |
| 309 } | 429 } |
| 310 } | 430 } |
| 431 |
| 311 return count; | 432 return count; |
| 312 } | 433 } |
| 313 | 434 |
| 314 } // namespace compiler | 435 } // namespace compiler |
| 315 } // namespace internal | 436 } // namespace internal |
| 316 } // namespace v8 | 437 } // namespace v8 |
| OLD | NEW |