OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/bootstrapper.h" | 5 #include "src/bootstrapper.h" |
6 #include "src/compiler/graph-inl.h" | 6 #include "src/compiler/graph-inl.h" |
7 #include "src/compiler/graph-reducer.h" | |
8 #include "src/compiler/js-operator.h" | 7 #include "src/compiler/js-operator.h" |
9 #include "src/compiler/node.h" | 8 #include "src/compiler/node.h" |
10 #include "src/compiler/node-properties-inl.h" | 9 #include "src/compiler/node-properties-inl.h" |
11 #include "src/compiler/node-properties.h" | 10 #include "src/compiler/node-properties.h" |
12 #include "src/compiler/simplified-operator.h" | 11 #include "src/compiler/simplified-operator.h" |
13 #include "src/compiler/typer.h" | 12 #include "src/compiler/typer.h" |
14 | 13 |
15 namespace v8 { | 14 namespace v8 { |
16 namespace internal { | 15 namespace internal { |
17 namespace compiler { | 16 namespace compiler { |
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
211 decorator_ = new (zone) Decorator(this); | 210 decorator_ = new (zone) Decorator(this); |
212 graph_->AddDecorator(decorator_); | 211 graph_->AddDecorator(decorator_); |
213 } | 212 } |
214 | 213 |
215 | 214 |
216 Typer::~Typer() { | 215 Typer::~Typer() { |
217 graph_->RemoveDecorator(decorator_); | 216 graph_->RemoveDecorator(decorator_); |
218 } | 217 } |
219 | 218 |
220 | 219 |
221 class Typer::Visitor : public Reducer { | 220 class Typer::Visitor : public NullNodeVisitor { |
222 public: | 221 public: |
223 explicit Visitor(Typer* typer) : typer_(typer) {} | 222 explicit Visitor(Typer* typer) : typer_(typer) {} |
224 | 223 |
225 virtual Reduction Reduce(Node* node) OVERRIDE { | |
226 if (node->op()->ValueOutputCount() == 0) return NoChange(); | |
227 switch (node->opcode()) { | |
228 #define DECLARE_CASE(x) \ | |
229 case IrOpcode::k##x: \ | |
230 return UpdateBounds(node, TypeBinaryOp(node, x##Typer)); | |
231 JS_SIMPLE_BINOP_LIST(DECLARE_CASE) | |
232 #undef DECLARE_CASE | |
233 | |
234 #define DECLARE_CASE(x) \ | |
235 case IrOpcode::k##x: \ | |
236 return UpdateBounds(node, Type##x(node)); | |
237 DECLARE_CASE(Start) | |
238 // VALUE_OP_LIST without JS_SIMPLE_BINOP_LIST: | |
239 COMMON_OP_LIST(DECLARE_CASE) | |
240 SIMPLIFIED_OP_LIST(DECLARE_CASE) | |
241 MACHINE_OP_LIST(DECLARE_CASE) | |
242 JS_SIMPLE_UNOP_LIST(DECLARE_CASE) | |
243 JS_OBJECT_OP_LIST(DECLARE_CASE) | |
244 JS_CONTEXT_OP_LIST(DECLARE_CASE) | |
245 JS_OTHER_OP_LIST(DECLARE_CASE) | |
246 #undef DECLARE_CASE | |
247 | |
248 #define DECLARE_CASE(x) case IrOpcode::k##x: | |
249 DECLARE_CASE(End) | |
250 INNER_CONTROL_OP_LIST(DECLARE_CASE) | |
251 #undef DECLARE_CASE | |
252 break; | |
253 } | |
254 return NoChange(); | |
255 } | |
256 | |
257 Bounds TypeNode(Node* node) { | 224 Bounds TypeNode(Node* node) { |
258 switch (node->opcode()) { | 225 switch (node->opcode()) { |
259 #define DECLARE_CASE(x) \ | 226 #define DECLARE_CASE(x) \ |
260 case IrOpcode::k##x: return TypeBinaryOp(node, x##Typer); | 227 case IrOpcode::k##x: return TypeBinaryOp(node, x##Typer); |
261 JS_SIMPLE_BINOP_LIST(DECLARE_CASE) | 228 JS_SIMPLE_BINOP_LIST(DECLARE_CASE) |
262 #undef DECLARE_CASE | 229 #undef DECLARE_CASE |
263 | 230 |
264 #define DECLARE_CASE(x) case IrOpcode::k##x: return Type##x(node); | 231 #define DECLARE_CASE(x) case IrOpcode::k##x: return Type##x(node); |
265 DECLARE_CASE(Start) | 232 DECLARE_CASE(Start) |
266 // VALUE_OP_LIST without JS_SIMPLE_BINOP_LIST: | 233 // VALUE_OP_LIST without JS_SIMPLE_BINOP_LIST: |
(...skipping 11 matching lines...) Expand all Loading... |
278 INNER_CONTROL_OP_LIST(DECLARE_CASE) | 245 INNER_CONTROL_OP_LIST(DECLARE_CASE) |
279 #undef DECLARE_CASE | 246 #undef DECLARE_CASE |
280 break; | 247 break; |
281 } | 248 } |
282 UNREACHABLE(); | 249 UNREACHABLE(); |
283 return Bounds(); | 250 return Bounds(); |
284 } | 251 } |
285 | 252 |
286 Type* TypeConstant(Handle<Object> value); | 253 Type* TypeConstant(Handle<Object> value); |
287 | 254 |
288 private: | 255 protected: |
289 Typer* typer_; | |
290 MaybeHandle<Context> context_; | |
291 | |
292 #define DECLARE_METHOD(x) inline Bounds Type##x(Node* node); | 256 #define DECLARE_METHOD(x) inline Bounds Type##x(Node* node); |
293 DECLARE_METHOD(Start) | 257 DECLARE_METHOD(Start) |
294 VALUE_OP_LIST(DECLARE_METHOD) | 258 VALUE_OP_LIST(DECLARE_METHOD) |
295 #undef DECLARE_METHOD | 259 #undef DECLARE_METHOD |
296 | 260 |
297 Bounds BoundsOrNone(Node* node) { | 261 Bounds BoundsOrNone(Node* node) { |
298 return NodeProperties::IsTyped(node) ? NodeProperties::GetBounds(node) | 262 return NodeProperties::IsTyped(node) ? NodeProperties::GetBounds(node) |
299 : Bounds(Type::None()); | 263 : Bounds(Type::None()); |
300 } | 264 } |
301 | 265 |
(...skipping 10 matching lines...) Expand all Loading... |
312 return result; | 276 return result; |
313 } | 277 } |
314 | 278 |
315 Type* Weaken(Type* current_type, Type* previous_type); | 279 Type* Weaken(Type* current_type, Type* previous_type); |
316 | 280 |
317 Zone* zone() { return typer_->zone(); } | 281 Zone* zone() { return typer_->zone(); } |
318 Isolate* isolate() { return typer_->isolate(); } | 282 Isolate* isolate() { return typer_->isolate(); } |
319 Graph* graph() { return typer_->graph(); } | 283 Graph* graph() { return typer_->graph(); } |
320 MaybeHandle<Context> context() { return typer_->context(); } | 284 MaybeHandle<Context> context() { return typer_->context(); } |
321 | 285 |
| 286 private: |
| 287 Typer* typer_; |
| 288 MaybeHandle<Context> context_; |
| 289 |
322 typedef Type* (*UnaryTyperFun)(Type*, Typer* t); | 290 typedef Type* (*UnaryTyperFun)(Type*, Typer* t); |
323 typedef Type* (*BinaryTyperFun)(Type*, Type*, Typer* t); | 291 typedef Type* (*BinaryTyperFun)(Type*, Type*, Typer* t); |
324 | 292 |
325 Bounds TypeUnaryOp(Node* node, UnaryTyperFun); | 293 Bounds TypeUnaryOp(Node* node, UnaryTyperFun); |
326 Bounds TypeBinaryOp(Node* node, BinaryTyperFun); | 294 Bounds TypeBinaryOp(Node* node, BinaryTyperFun); |
327 | 295 |
328 static Type* Invert(Type*, Typer*); | 296 static Type* Invert(Type*, Typer*); |
329 static Type* FalsifyUndefined(Type*, Typer*); | 297 static Type* FalsifyUndefined(Type*, Typer*); |
330 static Type* Rangify(Type*, Typer*); | 298 static Type* Rangify(Type*, Typer*); |
331 | 299 |
(...skipping 12 matching lines...) Expand all Loading... |
344 | 312 |
345 static Type* JSCompareTyper(Type*, Type*, Typer*); | 313 static Type* JSCompareTyper(Type*, Type*, Typer*); |
346 | 314 |
347 #define DECLARE_METHOD(x) static Type* x##Typer(Type*, Type*, Typer*); | 315 #define DECLARE_METHOD(x) static Type* x##Typer(Type*, Type*, Typer*); |
348 JS_SIMPLE_BINOP_LIST(DECLARE_METHOD) | 316 JS_SIMPLE_BINOP_LIST(DECLARE_METHOD) |
349 #undef DECLARE_METHOD | 317 #undef DECLARE_METHOD |
350 | 318 |
351 static Type* JSUnaryNotTyper(Type*, Typer*); | 319 static Type* JSUnaryNotTyper(Type*, Typer*); |
352 static Type* JSLoadPropertyTyper(Type*, Type*, Typer*); | 320 static Type* JSLoadPropertyTyper(Type*, Type*, Typer*); |
353 static Type* JSCallFunctionTyper(Type*, Typer*); | 321 static Type* JSCallFunctionTyper(Type*, Typer*); |
| 322 }; |
354 | 323 |
355 Reduction UpdateBounds(Node* node, Bounds current) { | |
356 if (NodeProperties::IsTyped(node)) { | |
357 // Widen the bounds of a previously typed node. | |
358 Bounds previous = NodeProperties::GetBounds(node); | |
359 // Speed up termination in the presence of range types: | |
360 current.upper = Weaken(current.upper, previous.upper); | |
361 current.lower = Weaken(current.lower, previous.lower); | |
362 | 324 |
363 // Types should not get less precise. | 325 class Typer::RunVisitor : public Typer::Visitor { |
364 DCHECK(previous.lower->Is(current.lower)); | 326 public: |
365 DCHECK(previous.upper->Is(current.upper)); | 327 explicit RunVisitor(Typer* typer) |
| 328 : Visitor(typer), |
| 329 redo(NodeSet::key_compare(), NodeSet::allocator_type(typer->zone())) {} |
366 | 330 |
367 NodeProperties::SetBounds(node, current); | 331 void Post(Node* node) { |
368 if (!(previous.Narrows(current) && current.Narrows(previous))) { | 332 if (node->op()->ValueOutputCount() > 0) { |
369 // If something changed, revisit all uses. | 333 Bounds bounds = TypeNode(node); |
370 return Changed(node); | 334 NodeProperties::SetBounds(node, bounds); |
371 } | 335 // Remember incompletely typed nodes for least fixpoint iteration. |
372 return NoChange(); | 336 if (!NodeProperties::AllValueInputsAreTyped(node)) redo.insert(node); |
373 } else { | |
374 // No previous type, simply update the bounds. | |
375 NodeProperties::SetBounds(node, current); | |
376 return Changed(node); | |
377 } | 337 } |
378 } | 338 } |
| 339 |
| 340 NodeSet redo; |
| 341 }; |
| 342 |
| 343 |
| 344 class Typer::WidenVisitor : public Typer::Visitor { |
| 345 public: |
| 346 explicit WidenVisitor(Typer* typer) |
| 347 : Visitor(typer), |
| 348 local_zone_(zone()->isolate()), |
| 349 enabled_(graph()->NodeCount(), true, &local_zone_), |
| 350 queue_(&local_zone_) {} |
| 351 |
| 352 void Run(NodeSet* nodes) { |
| 353 // Queue all the roots. |
| 354 for (Node* node : *nodes) { |
| 355 Queue(node); |
| 356 } |
| 357 |
| 358 while (!queue_.empty()) { |
| 359 Node* node = queue_.front(); |
| 360 queue_.pop(); |
| 361 |
| 362 if (node->op()->ValueOutputCount() > 0) { |
| 363 // Enable future queuing (and thus re-typing) of this node. |
| 364 enabled_[node->id()] = true; |
| 365 |
| 366 // Compute the new type. |
| 367 Bounds previous = BoundsOrNone(node); |
| 368 Bounds current = TypeNode(node); |
| 369 |
| 370 // Speed up termination in the presence of range types: |
| 371 current.upper = Weaken(current.upper, previous.upper); |
| 372 current.lower = Weaken(current.lower, previous.lower); |
| 373 |
| 374 // Types should not get less precise. |
| 375 DCHECK(previous.lower->Is(current.lower)); |
| 376 DCHECK(previous.upper->Is(current.upper)); |
| 377 |
| 378 NodeProperties::SetBounds(node, current); |
| 379 // If something changed, push all uses into the queue. |
| 380 if (!(previous.Narrows(current) && current.Narrows(previous))) { |
| 381 for (Node* use : node->uses()) { |
| 382 Queue(use); |
| 383 } |
| 384 } |
| 385 } |
| 386 // If there is no value output, we deliberately leave the node disabled |
| 387 // for queuing - there is no need to type it. |
| 388 } |
| 389 } |
| 390 |
| 391 void Queue(Node* node) { |
| 392 // If the node is enabled for queuing, push it to the queue and disable it |
| 393 // (to avoid queuing it multiple times). |
| 394 if (enabled_[node->id()]) { |
| 395 queue_.push(node); |
| 396 enabled_[node->id()] = false; |
| 397 } |
| 398 } |
| 399 |
| 400 private: |
| 401 Zone local_zone_; |
| 402 BoolVector enabled_; |
| 403 ZoneQueue<Node*> queue_; |
379 }; | 404 }; |
380 | 405 |
381 | 406 |
382 void Typer::Run() { | 407 void Typer::Run() { |
383 { | 408 RunVisitor typing(this); |
384 // TODO(titzer): this is a hack. Reset types for interior nodes first. | 409 graph_->VisitNodeInputsFromEnd(&typing); |
385 NodeDeque deque(zone()); | 410 // Find least fixpoint. |
386 NodeMarker<bool> marked(graph(), 2); | 411 WidenVisitor widen(this); |
387 deque.push_front(graph()->end()); | 412 widen.Run(&typing.redo); |
388 marked.Set(graph()->end(), true); | |
389 while (!deque.empty()) { | |
390 Node* node = deque.front(); | |
391 deque.pop_front(); | |
392 // TODO(titzer): there shouldn't be a need to retype constants. | |
393 if (node->op()->ValueOutputCount() > 0) | |
394 NodeProperties::RemoveBounds(node); | |
395 for (Node* input : node->inputs()) { | |
396 if (!marked.Get(input)) { | |
397 marked.Set(input, true); | |
398 deque.push_back(input); | |
399 } | |
400 } | |
401 } | |
402 } | |
403 | |
404 Visitor visitor(this); | |
405 GraphReducer graph_reducer(graph(), zone()); | |
406 graph_reducer.AddReducer(&visitor); | |
407 graph_reducer.ReduceGraph(); | |
408 } | 413 } |
409 | 414 |
410 | 415 |
411 void Typer::Decorator::Decorate(Node* node) { | 416 void Typer::Decorator::Decorate(Node* node) { |
412 if (node->op()->ValueOutputCount() > 0) { | 417 if (node->op()->ValueOutputCount() > 0) { |
413 // Only eagerly type-decorate nodes with known input types. | 418 // Only eagerly type-decorate nodes with known input types. |
414 // Other cases will generally require a proper fixpoint iteration with Run. | 419 // Other cases will generally require a proper fixpoint iteration with Run. |
415 bool is_typed = NodeProperties::IsTyped(node); | 420 bool is_typed = NodeProperties::IsTyped(node); |
416 if (is_typed || NodeProperties::AllValueInputsAreTyped(node)) { | 421 if (is_typed || NodeProperties::AllValueInputsAreTyped(node)) { |
417 Visitor typing(typer_); | 422 Visitor typing(typer_); |
(...skipping 872 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1290 Bounds Typer::Visitor::TypeJSInstanceOf(Node* node) { | 1295 Bounds Typer::Visitor::TypeJSInstanceOf(Node* node) { |
1291 return Bounds(Type::None(zone()), Type::Boolean(zone())); | 1296 return Bounds(Type::None(zone()), Type::Boolean(zone())); |
1292 } | 1297 } |
1293 | 1298 |
1294 | 1299 |
1295 // JS context operators. | 1300 // JS context operators. |
1296 | 1301 |
1297 | 1302 |
1298 Bounds Typer::Visitor::TypeJSLoadContext(Node* node) { | 1303 Bounds Typer::Visitor::TypeJSLoadContext(Node* node) { |
1299 Bounds outer = Operand(node, 0); | 1304 Bounds outer = Operand(node, 0); |
1300 Type* context_type = outer.upper; | 1305 DCHECK(outer.upper->Maybe(Type::Internal())); |
1301 if (context_type->Is(Type::None())) { | |
1302 // Upper bound of context is not yet known. | |
1303 return Bounds(Type::None(), Type::Any()); | |
1304 } | |
1305 | |
1306 DCHECK(context_type->Maybe(Type::Internal())); | |
1307 // TODO(rossberg): More precisely, instead of the above assertion, we should | 1306 // TODO(rossberg): More precisely, instead of the above assertion, we should |
1308 // back-propagate the constraint that it has to be a subtype of Internal. | 1307 // back-propagate the constraint that it has to be a subtype of Internal. |
1309 | 1308 |
1310 ContextAccess access = OpParameter<ContextAccess>(node); | 1309 ContextAccess access = OpParameter<ContextAccess>(node); |
| 1310 Type* context_type = outer.upper; |
1311 MaybeHandle<Context> context; | 1311 MaybeHandle<Context> context; |
1312 if (context_type->IsConstant()) { | 1312 if (context_type->IsConstant()) { |
1313 context = Handle<Context>::cast(context_type->AsConstant()->Value()); | 1313 context = Handle<Context>::cast(context_type->AsConstant()->Value()); |
1314 } | 1314 } |
1315 // Walk context chain (as far as known), mirroring dynamic lookup. | 1315 // Walk context chain (as far as known), mirroring dynamic lookup. |
1316 // Since contexts are mutable, the information is only useful as a lower | 1316 // Since contexts are mutable, the information is only useful as a lower |
1317 // bound. | 1317 // bound. |
1318 // TODO(rossberg): Could use scope info to fix upper bounds for constant | 1318 // TODO(rossberg): Could use scope info to fix upper bounds for constant |
1319 // bindings if we know that this code is never shared. | 1319 // bindings if we know that this code is never shared. |
1320 for (size_t i = access.depth(); i > 0; --i) { | 1320 for (size_t i = access.depth(); i > 0; --i) { |
(...skipping 753 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2074 return typer_->cache_->Get(kFloat64ArrayFunc); | 2074 return typer_->cache_->Get(kFloat64ArrayFunc); |
2075 } | 2075 } |
2076 } | 2076 } |
2077 } | 2077 } |
2078 return Type::Constant(value, zone()); | 2078 return Type::Constant(value, zone()); |
2079 } | 2079 } |
2080 | 2080 |
2081 } // namespace compiler | 2081 } // namespace compiler |
2082 } // namespace internal | 2082 } // namespace internal |
2083 } // namespace v8 | 2083 } // namespace v8 |
OLD | NEW |