| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 79 } | 79 } |
| 80 } | 80 } |
| 81 | 81 |
| 82 | 82 |
| 83 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) { | 83 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) { |
| 84 ASSERT(CheckFlag(kFlexibleRepresentation)); | 84 ASSERT(CheckFlag(kFlexibleRepresentation)); |
| 85 Representation new_rep = RepresentationFromInputs(); | 85 Representation new_rep = RepresentationFromInputs(); |
| 86 UpdateRepresentation(new_rep, h_infer, "inputs"); | 86 UpdateRepresentation(new_rep, h_infer, "inputs"); |
| 87 new_rep = RepresentationFromUses(); | 87 new_rep = RepresentationFromUses(); |
| 88 UpdateRepresentation(new_rep, h_infer, "uses"); | 88 UpdateRepresentation(new_rep, h_infer, "uses"); |
| 89 new_rep = RepresentationFromUseRequirements(); | 89 if (representation().IsSmi() && HasNonSmiUse()) { |
| 90 if (new_rep.fits_into(Representation::Integer32())) { | 90 UpdateRepresentation( |
| 91 UpdateRepresentation(new_rep, h_infer, "use requirements"); | 91 Representation::Integer32(), h_infer, "use requirements"); |
| 92 } | 92 } |
| 93 } | 93 } |
| 94 | 94 |
| 95 | 95 |
| 96 Representation HValue::RepresentationFromUses() { | 96 Representation HValue::RepresentationFromUses() { |
| 97 if (HasNoUses()) return Representation::None(); | 97 if (HasNoUses()) return Representation::None(); |
| 98 | 98 |
| 99 // Array of use counts for each representation. | 99 // Array of use counts for each representation. |
| 100 int use_count[Representation::kNumRepresentations] = { 0 }; | 100 int use_count[Representation::kNumRepresentations] = { 0 }; |
| 101 | 101 |
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 254 } | 254 } |
| 255 | 255 |
| 256 | 256 |
| 257 HValue* RangeEvaluationContext::ConvertGuarantee(HValue* guarantee) { | 257 HValue* RangeEvaluationContext::ConvertGuarantee(HValue* guarantee) { |
| 258 return guarantee->IsBoundsCheckBaseIndexInformation() | 258 return guarantee->IsBoundsCheckBaseIndexInformation() |
| 259 ? HBoundsCheckBaseIndexInformation::cast(guarantee)->bounds_check() | 259 ? HBoundsCheckBaseIndexInformation::cast(guarantee)->bounds_check() |
| 260 : guarantee; | 260 : guarantee; |
| 261 } | 261 } |
| 262 | 262 |
| 263 | 263 |
| 264 static int32_t ConvertAndSetOverflow(int64_t result, bool* overflow) { | 264 static int32_t ConvertAndSetOverflow(Representation r, |
| 265 if (result > kMaxInt) { | 265 int64_t result, |
| 266 *overflow = true; | 266 bool* overflow) { |
| 267 return kMaxInt; | 267 if (r.IsSmi()) { |
| 268 } | 268 if (result > Smi::kMaxValue) { |
| 269 if (result < kMinInt) { | 269 *overflow = true; |
| 270 *overflow = true; | 270 return Smi::kMaxValue; |
| 271 return kMinInt; | 271 } |
| 272 if (result < Smi::kMinValue) { |
| 273 *overflow = true; |
| 274 return Smi::kMinValue; |
| 275 } |
| 276 } else { |
| 277 if (result > kMaxInt) { |
| 278 *overflow = true; |
| 279 return kMaxInt; |
| 280 } |
| 281 if (result < kMinInt) { |
| 282 *overflow = true; |
| 283 return kMinInt; |
| 284 } |
| 272 } | 285 } |
| 273 return static_cast<int32_t>(result); | 286 return static_cast<int32_t>(result); |
| 274 } | 287 } |
| 275 | 288 |
| 276 | 289 |
| 277 static int32_t AddWithoutOverflow(int32_t a, int32_t b, bool* overflow) { | 290 static int32_t AddWithoutOverflow(Representation r, |
| 291 int32_t a, |
| 292 int32_t b, |
| 293 bool* overflow) { |
| 278 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b); | 294 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b); |
| 279 return ConvertAndSetOverflow(result, overflow); | 295 return ConvertAndSetOverflow(r, result, overflow); |
| 280 } | 296 } |
| 281 | 297 |
| 282 | 298 |
| 283 static int32_t SubWithoutOverflow(int32_t a, int32_t b, bool* overflow) { | 299 static int32_t SubWithoutOverflow(Representation r, |
| 300 int32_t a, |
| 301 int32_t b, |
| 302 bool* overflow) { |
| 284 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b); | 303 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b); |
| 285 return ConvertAndSetOverflow(result, overflow); | 304 return ConvertAndSetOverflow(r, result, overflow); |
| 286 } | 305 } |
| 287 | 306 |
| 288 | 307 |
| 289 static int32_t MulWithoutOverflow(int32_t a, int32_t b, bool* overflow) { | 308 static int32_t MulWithoutOverflow(const Representation& r, |
| 309 int32_t a, |
| 310 int32_t b, |
| 311 bool* overflow) { |
| 290 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b); | 312 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b); |
| 291 return ConvertAndSetOverflow(result, overflow); | 313 return ConvertAndSetOverflow(r, result, overflow); |
| 292 } | 314 } |
| 293 | 315 |
| 294 | 316 |
| 295 int32_t Range::Mask() const { | 317 int32_t Range::Mask() const { |
| 296 if (lower_ == upper_) return lower_; | 318 if (lower_ == upper_) return lower_; |
| 297 if (lower_ >= 0) { | 319 if (lower_ >= 0) { |
| 298 int32_t res = 1; | 320 int32_t res = 1; |
| 299 while (res < upper_) { | 321 while (res < upper_) { |
| 300 res = (res << 1) | 1; | 322 res = (res << 1) | 1; |
| 301 } | 323 } |
| 302 return res; | 324 return res; |
| 303 } | 325 } |
| 304 return 0xffffffff; | 326 return 0xffffffff; |
| 305 } | 327 } |
| 306 | 328 |
| 307 | 329 |
| 308 void Range::AddConstant(int32_t value) { | 330 void Range::AddConstant(int32_t value) { |
| 309 if (value == 0) return; | 331 if (value == 0) return; |
| 310 bool may_overflow = false; // Overflow is ignored here. | 332 bool may_overflow = false; // Overflow is ignored here. |
| 311 lower_ = AddWithoutOverflow(lower_, value, &may_overflow); | 333 Representation r = Representation::Integer32(); |
| 312 upper_ = AddWithoutOverflow(upper_, value, &may_overflow); | 334 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow); |
| 335 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow); |
| 313 #ifdef DEBUG | 336 #ifdef DEBUG |
| 314 Verify(); | 337 Verify(); |
| 315 #endif | 338 #endif |
| 316 } | 339 } |
| 317 | 340 |
| 318 | 341 |
| 319 void Range::Intersect(Range* other) { | 342 void Range::Intersect(Range* other) { |
| 320 upper_ = Min(upper_, other->upper_); | 343 upper_ = Min(upper_, other->upper_); |
| 321 lower_ = Max(lower_, other->lower_); | 344 lower_ = Max(lower_, other->lower_); |
| 322 bool b = CanBeMinusZero() && other->CanBeMinusZero(); | 345 bool b = CanBeMinusZero() && other->CanBeMinusZero(); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 361 lower_ = lower_ << bits; | 384 lower_ = lower_ << bits; |
| 362 upper_ = upper_ << bits; | 385 upper_ = upper_ << bits; |
| 363 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) { | 386 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) { |
| 364 upper_ = kMaxInt; | 387 upper_ = kMaxInt; |
| 365 lower_ = kMinInt; | 388 lower_ = kMinInt; |
| 366 } | 389 } |
| 367 set_can_be_minus_zero(false); | 390 set_can_be_minus_zero(false); |
| 368 } | 391 } |
| 369 | 392 |
| 370 | 393 |
| 371 bool Range::AddAndCheckOverflow(Range* other) { | 394 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) { |
| 372 bool may_overflow = false; | 395 bool may_overflow = false; |
| 373 lower_ = AddWithoutOverflow(lower_, other->lower(), &may_overflow); | 396 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow); |
| 374 upper_ = AddWithoutOverflow(upper_, other->upper(), &may_overflow); | 397 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow); |
| 375 KeepOrder(); | 398 KeepOrder(); |
| 376 #ifdef DEBUG | 399 #ifdef DEBUG |
| 377 Verify(); | 400 Verify(); |
| 378 #endif | 401 #endif |
| 379 return may_overflow; | 402 return may_overflow; |
| 380 } | 403 } |
| 381 | 404 |
| 382 | 405 |
| 383 bool Range::SubAndCheckOverflow(Range* other) { | 406 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) { |
| 384 bool may_overflow = false; | 407 bool may_overflow = false; |
| 385 lower_ = SubWithoutOverflow(lower_, other->upper(), &may_overflow); | 408 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow); |
| 386 upper_ = SubWithoutOverflow(upper_, other->lower(), &may_overflow); | 409 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow); |
| 387 KeepOrder(); | 410 KeepOrder(); |
| 388 #ifdef DEBUG | 411 #ifdef DEBUG |
| 389 Verify(); | 412 Verify(); |
| 390 #endif | 413 #endif |
| 391 return may_overflow; | 414 return may_overflow; |
| 392 } | 415 } |
| 393 | 416 |
| 394 | 417 |
| 395 void Range::KeepOrder() { | 418 void Range::KeepOrder() { |
| 396 if (lower_ > upper_) { | 419 if (lower_ > upper_) { |
| 397 int32_t tmp = lower_; | 420 int32_t tmp = lower_; |
| 398 lower_ = upper_; | 421 lower_ = upper_; |
| 399 upper_ = tmp; | 422 upper_ = tmp; |
| 400 } | 423 } |
| 401 } | 424 } |
| 402 | 425 |
| 403 | 426 |
| 404 #ifdef DEBUG | 427 #ifdef DEBUG |
| 405 void Range::Verify() const { | 428 void Range::Verify() const { |
| 406 ASSERT(lower_ <= upper_); | 429 ASSERT(lower_ <= upper_); |
| 407 } | 430 } |
| 408 #endif | 431 #endif |
| 409 | 432 |
| 410 | 433 |
| 411 bool Range::MulAndCheckOverflow(Range* other) { | 434 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) { |
| 412 bool may_overflow = false; | 435 bool may_overflow = false; |
| 413 int v1 = MulWithoutOverflow(lower_, other->lower(), &may_overflow); | 436 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow); |
| 414 int v2 = MulWithoutOverflow(lower_, other->upper(), &may_overflow); | 437 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow); |
| 415 int v3 = MulWithoutOverflow(upper_, other->lower(), &may_overflow); | 438 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow); |
| 416 int v4 = MulWithoutOverflow(upper_, other->upper(), &may_overflow); | 439 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow); |
| 417 lower_ = Min(Min(v1, v2), Min(v3, v4)); | 440 lower_ = Min(Min(v1, v2), Min(v3, v4)); |
| 418 upper_ = Max(Max(v1, v2), Max(v3, v4)); | 441 upper_ = Max(Max(v1, v2), Max(v3, v4)); |
| 419 #ifdef DEBUG | 442 #ifdef DEBUG |
| 420 Verify(); | 443 Verify(); |
| 421 #endif | 444 #endif |
| 422 return may_overflow; | 445 return may_overflow; |
| 423 } | 446 } |
| 424 | 447 |
| 425 | 448 |
| 426 const char* HType::ToString() { | 449 const char* HType::ToString() { |
| (...skipping 602 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1029 if (context->upper_bound() == length() && | 1052 if (context->upper_bound() == length() && |
| 1030 context->lower_bound_guarantee() != NULL && | 1053 context->lower_bound_guarantee() != NULL && |
| 1031 context->lower_bound_guarantee() != this && | 1054 context->lower_bound_guarantee() != this && |
| 1032 context->lower_bound_guarantee()->block() != block() && | 1055 context->lower_bound_guarantee()->block() != block() && |
| 1033 offset() < context->offset() && | 1056 offset() < context->offset() && |
| 1034 index_can_increase() && | 1057 index_can_increase() && |
| 1035 context->upper_bound_guarantee() == NULL) { | 1058 context->upper_bound_guarantee() == NULL) { |
| 1036 offset_ = context->offset(); | 1059 offset_ = context->offset(); |
| 1037 SetResponsibilityForRange(DIRECTION_UPPER); | 1060 SetResponsibilityForRange(DIRECTION_UPPER); |
| 1038 context->set_upper_bound_guarantee(this); | 1061 context->set_upper_bound_guarantee(this); |
| 1062 isolate()->counters()->bounds_checks_eliminated()->Increment(); |
| 1039 } else if (context->upper_bound_guarantee() != NULL && | 1063 } else if (context->upper_bound_guarantee() != NULL && |
| 1040 context->upper_bound_guarantee() != this && | 1064 context->upper_bound_guarantee() != this && |
| 1041 context->upper_bound_guarantee()->block() != block() && | 1065 context->upper_bound_guarantee()->block() != block() && |
| 1042 offset() > context->offset() && | 1066 offset() > context->offset() && |
| 1043 index_can_decrease() && | 1067 index_can_decrease() && |
| 1044 context->lower_bound_guarantee() == NULL) { | 1068 context->lower_bound_guarantee() == NULL) { |
| 1045 offset_ = context->offset(); | 1069 offset_ = context->offset(); |
| 1046 SetResponsibilityForRange(DIRECTION_LOWER); | 1070 SetResponsibilityForRange(DIRECTION_LOWER); |
| 1047 context->set_lower_bound_guarantee(this); | 1071 context->set_lower_bound_guarantee(this); |
| 1072 isolate()->counters()->bounds_checks_eliminated()->Increment(); |
| 1048 } | 1073 } |
| 1049 } | 1074 } |
| 1050 | 1075 |
| 1051 | 1076 |
| 1052 void HBoundsCheck::ApplyIndexChange() { | 1077 void HBoundsCheck::ApplyIndexChange() { |
| 1053 if (skip_check()) return; | 1078 if (skip_check()) return; |
| 1054 | 1079 |
| 1055 DecompositionResult decomposition; | 1080 DecompositionResult decomposition; |
| 1056 bool index_is_decomposable = index()->TryDecompose(&decomposition); | 1081 bool index_is_decomposable = index()->TryDecompose(&decomposition); |
| 1057 if (index_is_decomposable) { | 1082 if (index_is_decomposable) { |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1098 scale_ = 0; | 1123 scale_ = 0; |
| 1099 responsibility_direction_ = DIRECTION_NONE; | 1124 responsibility_direction_ = DIRECTION_NONE; |
| 1100 } | 1125 } |
| 1101 | 1126 |
| 1102 | 1127 |
| 1103 void HBoundsCheck::AddInformativeDefinitions() { | 1128 void HBoundsCheck::AddInformativeDefinitions() { |
| 1104 // TODO(mmassi): Executing this code during AddInformativeDefinitions | 1129 // TODO(mmassi): Executing this code during AddInformativeDefinitions |
| 1105 // is a hack. Move it to some other HPhase. | 1130 // is a hack. Move it to some other HPhase. |
| 1106 if (FLAG_array_bounds_checks_elimination) { | 1131 if (FLAG_array_bounds_checks_elimination) { |
| 1107 if (index()->TryGuaranteeRange(length())) { | 1132 if (index()->TryGuaranteeRange(length())) { |
| 1108 set_skip_check(true); | 1133 set_skip_check(); |
| 1109 } | 1134 } |
| 1110 if (DetectCompoundIndex()) { | 1135 if (DetectCompoundIndex()) { |
| 1111 HBoundsCheckBaseIndexInformation* base_index_info = | 1136 HBoundsCheckBaseIndexInformation* base_index_info = |
| 1112 new(block()->graph()->zone()) | 1137 new(block()->graph()->zone()) |
| 1113 HBoundsCheckBaseIndexInformation(this); | 1138 HBoundsCheckBaseIndexInformation(this); |
| 1114 base_index_info->InsertAfter(this); | 1139 base_index_info->InsertAfter(this); |
| 1115 } | 1140 } |
| 1116 } | 1141 } |
| 1117 } | 1142 } |
| 1118 | 1143 |
| (...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1424 | 1449 |
| 1425 | 1450 |
| 1426 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) { | 1451 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) { |
| 1427 object()->PrintNameTo(stream); | 1452 object()->PrintNameTo(stream); |
| 1428 stream->Add(" "); | 1453 stream->Add(" "); |
| 1429 index()->PrintNameTo(stream); | 1454 index()->PrintNameTo(stream); |
| 1430 } | 1455 } |
| 1431 | 1456 |
| 1432 | 1457 |
| 1433 HValue* HBitwise::Canonicalize() { | 1458 HValue* HBitwise::Canonicalize() { |
| 1434 if (!representation().IsInteger32()) return this; | 1459 if (!representation().IsSmiOrInteger32()) return this; |
| 1435 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x. | 1460 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x. |
| 1436 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0; | 1461 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0; |
| 1437 if (left()->EqualsInteger32Constant(nop_constant) && | 1462 if (left()->EqualsInteger32Constant(nop_constant) && |
| 1438 !right()->CheckFlag(kUint32)) { | 1463 !right()->CheckFlag(kUint32)) { |
| 1439 return right(); | 1464 return right(); |
| 1440 } | 1465 } |
| 1441 if (right()->EqualsInteger32Constant(nop_constant) && | 1466 if (right()->EqualsInteger32Constant(nop_constant) && |
| 1442 !left()->CheckFlag(kUint32)) { | 1467 !left()->CheckFlag(kUint32)) { |
| 1443 return left(); | 1468 return left(); |
| 1444 } | 1469 } |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1544 } | 1569 } |
| 1545 | 1570 |
| 1546 | 1571 |
| 1547 HValue* HUnaryMathOperation::Canonicalize() { | 1572 HValue* HUnaryMathOperation::Canonicalize() { |
| 1548 if (op() == kMathFloor) { | 1573 if (op() == kMathFloor) { |
| 1549 HValue* val = value(); | 1574 HValue* val = value(); |
| 1550 if (val->IsChange()) val = HChange::cast(val)->value(); | 1575 if (val->IsChange()) val = HChange::cast(val)->value(); |
| 1551 | 1576 |
| 1552 // If the input is integer32 then we replace the floor instruction | 1577 // If the input is integer32 then we replace the floor instruction |
| 1553 // with its input. | 1578 // with its input. |
| 1554 if (val->representation().IsInteger32()) return val; | 1579 if (val->representation().IsSmiOrInteger32()) return val; |
| 1555 | 1580 |
| 1556 #if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_IA32) || \ | 1581 #if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_IA32) || \ |
| 1557 defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_A64) | 1582 defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_A64) |
| 1558 if (value()->IsDiv() && (value()->UseCount() == 1)) { | 1583 if (value()->IsDiv() && (value()->UseCount() == 1)) { |
| 1559 // TODO(2038): Implement this optimization for non ARM architectures. | 1584 // TODO(2038): Implement this optimization for non ARM architectures. |
| 1560 HDiv* hdiv = HDiv::cast(value()); | 1585 HDiv* hdiv = HDiv::cast(value()); |
| 1561 HValue* left = hdiv->left(); | 1586 HValue* left = hdiv->left(); |
| 1562 HValue* right = hdiv->right(); | 1587 HValue* right = hdiv->right(); |
| 1563 // Try to simplify left and right values of the division. | 1588 // Try to simplify left and right values of the division. |
| 1564 HValue* new_left = SimplifiedDividendForMathFloorOfDiv(left); | 1589 HValue* new_left = SimplifiedDividendForMathFloorOfDiv(left); |
| 1565 if (new_left == NULL && | 1590 if (new_left == NULL && |
| 1566 hdiv->observed_input_representation(1).IsSmiOrInteger32()) { | 1591 hdiv->observed_input_representation(1).IsSmiOrInteger32()) { |
| 1567 new_left = new(block()->zone()) | 1592 new_left = new(block()->zone()) HChange( |
| 1568 HChange(left, Representation::Integer32(), false, false); | 1593 left, Representation::Integer32(), false, false, false); |
| 1569 HChange::cast(new_left)->InsertBefore(this); | 1594 HChange::cast(new_left)->InsertBefore(this); |
| 1570 } | 1595 } |
| 1571 HValue* new_right = | 1596 HValue* new_right = |
| 1572 LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(right); | 1597 LChunkBuilder::SimplifiedDivisorForMathFloorOfDiv(right); |
| 1573 if (new_right == NULL && | 1598 if (new_right == NULL && |
| 1574 #if V8_TARGET_ARCH_ARM | 1599 #if V8_TARGET_ARCH_ARM |
| 1575 CpuFeatures::IsSupported(SUDIV) && | 1600 CpuFeatures::IsSupported(SUDIV) && |
| 1576 #endif | 1601 #endif |
| 1577 hdiv->observed_input_representation(2).IsSmiOrInteger32()) { | 1602 hdiv->observed_input_representation(2).IsSmiOrInteger32()) { |
| 1578 new_right = new(block()->zone()) | 1603 new_right = new(block()->zone()) HChange( |
| 1579 HChange(right, Representation::Integer32(), false, false); | 1604 right, Representation::Integer32(), false, false, false); |
| 1580 HChange::cast(new_right)->InsertBefore(this); | 1605 HChange::cast(new_right)->InsertBefore(this); |
| 1581 } | 1606 } |
| 1582 | 1607 |
| 1583 // Return if left or right are not optimizable. | 1608 // Return if left or right are not optimizable. |
| 1584 if ((new_left == NULL) || (new_right == NULL)) return this; | 1609 if ((new_left == NULL) || (new_right == NULL)) return this; |
| 1585 | 1610 |
| 1586 // Insert the new values in the graph. | 1611 // Insert the new values in the graph. |
| 1587 if (new_left->IsInstruction() && | 1612 if (new_left->IsInstruction() && |
| 1588 !HInstruction::cast(new_left)->IsLinked()) { | 1613 !HInstruction::cast(new_left)->IsLinked()) { |
| 1589 HInstruction::cast(new_left)->InsertBefore(this); | 1614 HInstruction::cast(new_left)->InsertBefore(this); |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1679 } | 1704 } |
| 1680 } | 1705 } |
| 1681 | 1706 |
| 1682 | 1707 |
| 1683 void HCheckMaps::PrintDataTo(StringStream* stream) { | 1708 void HCheckMaps::PrintDataTo(StringStream* stream) { |
| 1684 value()->PrintNameTo(stream); | 1709 value()->PrintNameTo(stream); |
| 1685 stream->Add(" [%p", *map_set()->first()); | 1710 stream->Add(" [%p", *map_set()->first()); |
| 1686 for (int i = 1; i < map_set()->length(); ++i) { | 1711 for (int i = 1; i < map_set()->length(); ++i) { |
| 1687 stream->Add(",%p", *map_set()->at(i)); | 1712 stream->Add(",%p", *map_set()->at(i)); |
| 1688 } | 1713 } |
| 1689 stream->Add("]"); | 1714 stream->Add("]%s", CanOmitMapChecks() ? "(omitted)" : ""); |
| 1690 } | 1715 } |
| 1691 | 1716 |
| 1692 | 1717 |
| 1693 void HCheckFunction::PrintDataTo(StringStream* stream) { | 1718 void HCheckFunction::PrintDataTo(StringStream* stream) { |
| 1694 value()->PrintNameTo(stream); | 1719 value()->PrintNameTo(stream); |
| 1695 stream->Add(" %p", *target()); | 1720 stream->Add(" %p", *target()); |
| 1696 } | 1721 } |
| 1697 | 1722 |
| 1698 | 1723 |
| 1699 HValue* HCheckFunction::Canonicalize() { | 1724 HValue* HCheckFunction::Canonicalize() { |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1740 left()->PrintNameTo(stream); | 1765 left()->PrintNameTo(stream); |
| 1741 stream->Add(" "); | 1766 stream->Add(" "); |
| 1742 right()->PrintNameTo(stream); | 1767 right()->PrintNameTo(stream); |
| 1743 stream->Add(" "); | 1768 stream->Add(" "); |
| 1744 context()->PrintNameTo(stream); | 1769 context()->PrintNameTo(stream); |
| 1745 } | 1770 } |
| 1746 | 1771 |
| 1747 | 1772 |
| 1748 Range* HValue::InferRange(Zone* zone) { | 1773 Range* HValue::InferRange(Zone* zone) { |
| 1749 Range* result; | 1774 Range* result; |
| 1750 if (type().IsSmi()) { | 1775 if (representation().IsSmi() || type().IsSmi()) { |
| 1751 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue); | 1776 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue); |
| 1752 result->set_can_be_minus_zero(false); | 1777 result->set_can_be_minus_zero(false); |
| 1753 } else { | 1778 } else { |
| 1754 result = new(zone) Range(); | 1779 result = new(zone) Range(); |
| 1755 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32)); | 1780 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32)); |
| 1756 // TODO(jkummerow): The range cannot be minus zero when the upper type | 1781 // TODO(jkummerow): The range cannot be minus zero when the upper type |
| 1757 // bound is Integer32. | 1782 // bound is Integer32. |
| 1758 } | 1783 } |
| 1759 return result; | 1784 return result; |
| 1760 } | 1785 } |
| 1761 | 1786 |
| 1762 | 1787 |
| 1763 Range* HChange::InferRange(Zone* zone) { | 1788 Range* HChange::InferRange(Zone* zone) { |
| 1764 Range* input_range = value()->range(); | 1789 Range* input_range = value()->range(); |
| 1765 if (from().IsInteger32() && | 1790 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) && |
| 1766 to().IsSmiOrTagged() && | 1791 (to().IsSmi() || |
| 1767 !value()->CheckFlag(HInstruction::kUint32) && | 1792 (to().IsTagged() && |
| 1768 input_range != NULL && input_range->IsInSmiRange()) { | 1793 input_range != NULL && |
| 1794 input_range->IsInSmiRange()))) { |
| 1769 set_type(HType::Smi()); | 1795 set_type(HType::Smi()); |
| 1770 ClearGVNFlag(kChangesNewSpacePromotion); | 1796 ClearGVNFlag(kChangesNewSpacePromotion); |
| 1771 } | 1797 } |
| 1772 Range* result = (input_range != NULL) | 1798 Range* result = (input_range != NULL) |
| 1773 ? input_range->Copy(zone) | 1799 ? input_range->Copy(zone) |
| 1774 : HValue::InferRange(zone); | 1800 : HValue::InferRange(zone); |
| 1775 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() || | 1801 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() || |
| 1776 !CheckFlag(kAllUsesTruncatingToInt32)); | 1802 !(CheckFlag(kAllUsesTruncatingToInt32) || |
| 1803 CheckFlag(kAllUsesTruncatingToSmi))); |
| 1804 if (to().IsSmi()) result->ClampToSmi(); |
| 1777 return result; | 1805 return result; |
| 1778 } | 1806 } |
| 1779 | 1807 |
| 1780 | 1808 |
| 1781 Range* HConstant::InferRange(Zone* zone) { | 1809 Range* HConstant::InferRange(Zone* zone) { |
| 1782 if (has_int32_value_) { | 1810 if (has_int32_value_) { |
| 1783 Range* result = new(zone) Range(int32_value_, int32_value_); | 1811 Range* result = new(zone) Range(int32_value_, int32_value_); |
| 1784 result->set_can_be_minus_zero(false); | 1812 result->set_can_be_minus_zero(false); |
| 1785 return result; | 1813 return result; |
| 1786 } | 1814 } |
| (...skipping 16 matching lines...) Expand all Loading... |
| 1803 } | 1831 } |
| 1804 return range; | 1832 return range; |
| 1805 } | 1833 } |
| 1806 } else { | 1834 } else { |
| 1807 return HValue::InferRange(zone); | 1835 return HValue::InferRange(zone); |
| 1808 } | 1836 } |
| 1809 } | 1837 } |
| 1810 | 1838 |
| 1811 | 1839 |
| 1812 Range* HAdd::InferRange(Zone* zone) { | 1840 Range* HAdd::InferRange(Zone* zone) { |
| 1813 if (representation().IsInteger32()) { | 1841 Representation r = representation(); |
| 1842 if (r.IsSmiOrInteger32()) { |
| 1814 Range* a = left()->range(); | 1843 Range* a = left()->range(); |
| 1815 Range* b = right()->range(); | 1844 Range* b = right()->range(); |
| 1816 Range* res = a->Copy(zone); | 1845 Range* res = a->Copy(zone); |
| 1817 if (!res->AddAndCheckOverflow(b) || | 1846 if (!res->AddAndCheckOverflow(r, b) || |
| 1818 CheckFlag(kAllUsesTruncatingToInt32)) { | 1847 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || |
| 1848 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) { |
| 1819 ClearFlag(kCanOverflow); | 1849 ClearFlag(kCanOverflow); |
| 1820 } | 1850 } |
| 1821 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && | 1851 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && |
| 1852 !CheckFlag(kAllUsesTruncatingToInt32) && |
| 1822 a->CanBeMinusZero() && b->CanBeMinusZero()); | 1853 a->CanBeMinusZero() && b->CanBeMinusZero()); |
| 1823 return res; | 1854 return res; |
| 1824 } else { | 1855 } else { |
| 1825 return HValue::InferRange(zone); | 1856 return HValue::InferRange(zone); |
| 1826 } | 1857 } |
| 1827 } | 1858 } |
| 1828 | 1859 |
| 1829 | 1860 |
| 1830 Range* HSub::InferRange(Zone* zone) { | 1861 Range* HSub::InferRange(Zone* zone) { |
| 1831 if (representation().IsInteger32()) { | 1862 Representation r = representation(); |
| 1863 if (r.IsSmiOrInteger32()) { |
| 1832 Range* a = left()->range(); | 1864 Range* a = left()->range(); |
| 1833 Range* b = right()->range(); | 1865 Range* b = right()->range(); |
| 1834 Range* res = a->Copy(zone); | 1866 Range* res = a->Copy(zone); |
| 1835 if (!res->SubAndCheckOverflow(b) || | 1867 if (!res->SubAndCheckOverflow(r, b) || |
| 1836 CheckFlag(kAllUsesTruncatingToInt32)) { | 1868 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) || |
| 1869 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) { |
| 1837 ClearFlag(kCanOverflow); | 1870 ClearFlag(kCanOverflow); |
| 1838 } | 1871 } |
| 1839 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && | 1872 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && |
| 1873 !CheckFlag(kAllUsesTruncatingToInt32) && |
| 1840 a->CanBeMinusZero() && b->CanBeZero()); | 1874 a->CanBeMinusZero() && b->CanBeZero()); |
| 1841 return res; | 1875 return res; |
| 1842 } else { | 1876 } else { |
| 1843 return HValue::InferRange(zone); | 1877 return HValue::InferRange(zone); |
| 1844 } | 1878 } |
| 1845 } | 1879 } |
| 1846 | 1880 |
| 1847 | 1881 |
| 1848 Range* HMul::InferRange(Zone* zone) { | 1882 Range* HMul::InferRange(Zone* zone) { |
| 1849 if (representation().IsInteger32()) { | 1883 Representation r = representation(); |
| 1884 if (r.IsSmiOrInteger32()) { |
| 1850 Range* a = left()->range(); | 1885 Range* a = left()->range(); |
| 1851 Range* b = right()->range(); | 1886 Range* b = right()->range(); |
| 1852 Range* res = a->Copy(zone); | 1887 Range* res = a->Copy(zone); |
| 1853 if (!res->MulAndCheckOverflow(b)) { | 1888 if (!res->MulAndCheckOverflow(r, b)) { |
| 1854 // Clearing the kCanOverflow flag when kAllUsesAreTruncatingToInt32 | 1889 // Clearing the kCanOverflow flag when kAllUsesAreTruncatingToInt32 |
| 1855 // would be wrong, because truncated integer multiplication is too | 1890 // would be wrong, because truncated integer multiplication is too |
| 1856 // precise and therefore not the same as converting to Double and back. | 1891 // precise and therefore not the same as converting to Double and back. |
| 1857 ClearFlag(kCanOverflow); | 1892 ClearFlag(kCanOverflow); |
| 1858 } | 1893 } |
| 1859 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) && | 1894 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) && |
| 1895 !CheckFlag(kAllUsesTruncatingToInt32) && |
| 1860 ((a->CanBeZero() && b->CanBeNegative()) || | 1896 ((a->CanBeZero() && b->CanBeNegative()) || |
| 1861 (a->CanBeNegative() && b->CanBeZero()))); | 1897 (a->CanBeNegative() && b->CanBeZero()))); |
| 1862 return res; | 1898 return res; |
| 1863 } else { | 1899 } else { |
| 1864 return HValue::InferRange(zone); | 1900 return HValue::InferRange(zone); |
| 1865 } | 1901 } |
| 1866 } | 1902 } |
| 1867 | 1903 |
| 1868 | 1904 |
| 1869 Range* HDiv::InferRange(Zone* zone) { | 1905 Range* HDiv::InferRange(Zone* zone) { |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1967 result = false; | 2003 result = false; |
| 1968 break; | 2004 break; |
| 1969 } | 2005 } |
| 1970 } | 2006 } |
| 1971 ClearFlag(kNumericConstraintEvaluationInProgress); | 2007 ClearFlag(kNumericConstraintEvaluationInProgress); |
| 1972 | 2008 |
| 1973 return result; | 2009 return result; |
| 1974 } | 2010 } |
| 1975 | 2011 |
| 1976 | 2012 |
| 2013 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) { |
| 2014 if (phi->block()->loop_information() == NULL) return NULL; |
| 2015 if (phi->OperandCount() != 2) return NULL; |
| 2016 int32_t candidate_increment; |
| 2017 |
| 2018 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0)); |
| 2019 if (candidate_increment != 0) { |
| 2020 return new(phi->block()->graph()->zone()) |
| 2021 InductionVariableData(phi, phi->OperandAt(1), candidate_increment); |
| 2022 } |
| 2023 |
| 2024 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1)); |
| 2025 if (candidate_increment != 0) { |
| 2026 return new(phi->block()->graph()->zone()) |
| 2027 InductionVariableData(phi, phi->OperandAt(0), candidate_increment); |
| 2028 } |
| 2029 |
| 2030 return NULL; |
| 2031 } |
| 2032 |
| 2033 |
| 2034 /* |
| 2035 * This function tries to match the following patterns (and all the relevant |
| 2036 * variants related to |, & and + being commutative): |
| 2037 * base | constant_or_mask |
| 2038 * base & constant_and_mask |
| 2039 * (base + constant_offset) & constant_and_mask |
| 2040 * (base - constant_offset) & constant_and_mask |
| 2041 */ |
| 2042 void InductionVariableData::DecomposeBitwise( |
| 2043 HValue* value, |
| 2044 BitwiseDecompositionResult* result) { |
| 2045 HValue* base = IgnoreOsrValue(value); |
| 2046 result->base = value; |
| 2047 |
| 2048 if (!base->representation().IsInteger32()) return; |
| 2049 |
| 2050 if (base->IsBitwise()) { |
| 2051 bool allow_offset = false; |
| 2052 int32_t mask = 0; |
| 2053 |
| 2054 HBitwise* bitwise = HBitwise::cast(base); |
| 2055 if (bitwise->right()->IsInteger32Constant()) { |
| 2056 mask = bitwise->right()->GetInteger32Constant(); |
| 2057 base = bitwise->left(); |
| 2058 } else if (bitwise->left()->IsInteger32Constant()) { |
| 2059 mask = bitwise->left()->GetInteger32Constant(); |
| 2060 base = bitwise->right(); |
| 2061 } else { |
| 2062 return; |
| 2063 } |
| 2064 if (bitwise->op() == Token::BIT_AND) { |
| 2065 result->and_mask = mask; |
| 2066 allow_offset = true; |
| 2067 } else if (bitwise->op() == Token::BIT_OR) { |
| 2068 result->or_mask = mask; |
| 2069 } else { |
| 2070 return; |
| 2071 } |
| 2072 |
| 2073 result->context = bitwise->context(); |
| 2074 |
| 2075 if (allow_offset) { |
| 2076 if (base->IsAdd()) { |
| 2077 HAdd* add = HAdd::cast(base); |
| 2078 if (add->right()->IsInteger32Constant()) { |
| 2079 base = add->left(); |
| 2080 } else if (add->left()->IsInteger32Constant()) { |
| 2081 base = add->right(); |
| 2082 } |
| 2083 } else if (base->IsSub()) { |
| 2084 HSub* sub = HSub::cast(base); |
| 2085 if (sub->right()->IsInteger32Constant()) { |
| 2086 base = sub->left(); |
| 2087 } |
| 2088 } |
| 2089 } |
| 2090 |
| 2091 result->base = base; |
| 2092 } |
| 2093 } |
| 2094 |
| 2095 |
| 2096 void InductionVariableData::AddCheck(HBoundsCheck* check, |
| 2097 int32_t upper_limit) { |
| 2098 ASSERT(limit_validity() != NULL); |
| 2099 if (limit_validity() != check->block() && |
| 2100 !limit_validity()->Dominates(check->block())) return; |
| 2101 if (!phi()->block()->current_loop()->IsNestedInThisLoop( |
| 2102 check->block()->current_loop())) return; |
| 2103 |
| 2104 ChecksRelatedToLength* length_checks = checks(); |
| 2105 while (length_checks != NULL) { |
| 2106 if (length_checks->length() == check->length()) break; |
| 2107 length_checks = length_checks->next(); |
| 2108 } |
| 2109 if (length_checks == NULL) { |
| 2110 length_checks = new(check->block()->zone()) |
| 2111 ChecksRelatedToLength(check->length(), checks()); |
| 2112 checks_ = length_checks; |
| 2113 } |
| 2114 |
| 2115 length_checks->AddCheck(check, upper_limit); |
| 2116 } |
| 2117 |
| 2118 |
| 2119 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() { |
| 2120 if (checks() != NULL) { |
| 2121 InductionVariableCheck* c = checks(); |
| 2122 HBasicBlock* current_block = c->check()->block(); |
| 2123 while (c != NULL && c->check()->block() == current_block) { |
| 2124 c->set_upper_limit(current_upper_limit_); |
| 2125 c = c->next(); |
| 2126 } |
| 2127 } |
| 2128 } |
| 2129 |
| 2130 |
| 2131 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock( |
| 2132 Token::Value token, |
| 2133 int32_t mask, |
| 2134 HValue* index_base, |
| 2135 HValue* context) { |
| 2136 ASSERT(first_check_in_block() != NULL); |
| 2137 HValue* previous_index = first_check_in_block()->index(); |
| 2138 ASSERT(context != NULL); |
| 2139 |
| 2140 set_added_constant(new(index_base->block()->graph()->zone()) HConstant( |
| 2141 mask, index_base->representation())); |
| 2142 if (added_index() != NULL) { |
| 2143 added_constant()->InsertBefore(added_index()); |
| 2144 } else { |
| 2145 added_constant()->InsertBefore(first_check_in_block()); |
| 2146 } |
| 2147 |
| 2148 if (added_index() == NULL) { |
| 2149 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index()); |
| 2150 HInstruction* new_index = HBitwise::New( |
| 2151 index_base->block()->graph()->zone(), |
| 2152 token, context, index_base, added_constant()); |
| 2153 ASSERT(new_index->IsBitwise()); |
| 2154 new_index->ClearAllSideEffects(); |
| 2155 new_index->AssumeRepresentation(Representation::Integer32()); |
| 2156 set_added_index(HBitwise::cast(new_index)); |
| 2157 added_index()->InsertBefore(first_check_in_block()); |
| 2158 } |
| 2159 ASSERT(added_index()->op() == token); |
| 2160 |
| 2161 added_index()->SetOperandAt(1, index_base); |
| 2162 added_index()->SetOperandAt(2, added_constant()); |
| 2163 first_check_in_block()->SetOperandAt(0, added_index()); |
| 2164 if (previous_index->UseCount() == 0) { |
| 2165 previous_index->DeleteAndReplaceWith(NULL); |
| 2166 } |
| 2167 } |
| 2168 |
| 2169 void InductionVariableData::ChecksRelatedToLength::AddCheck( |
| 2170 HBoundsCheck* check, |
| 2171 int32_t upper_limit) { |
| 2172 BitwiseDecompositionResult decomposition; |
| 2173 InductionVariableData::DecomposeBitwise(check->index(), &decomposition); |
| 2174 |
| 2175 if (first_check_in_block() == NULL || |
| 2176 first_check_in_block()->block() != check->block()) { |
| 2177 CloseCurrentBlock(); |
| 2178 |
| 2179 first_check_in_block_ = check; |
| 2180 set_added_index(NULL); |
| 2181 set_added_constant(NULL); |
| 2182 current_and_mask_in_block_ = decomposition.and_mask; |
| 2183 current_or_mask_in_block_ = decomposition.or_mask; |
| 2184 current_upper_limit_ = upper_limit; |
| 2185 |
| 2186 InductionVariableCheck* new_check = new(check->block()->graph()->zone()) |
| 2187 InductionVariableCheck(check, checks_, upper_limit); |
| 2188 checks_ = new_check; |
| 2189 return; |
| 2190 } |
| 2191 |
| 2192 if (upper_limit > current_upper_limit()) { |
| 2193 current_upper_limit_ = upper_limit; |
| 2194 } |
| 2195 |
| 2196 if (decomposition.and_mask != 0 && |
| 2197 current_or_mask_in_block() == 0) { |
| 2198 if (current_and_mask_in_block() == 0 || |
| 2199 decomposition.and_mask > current_and_mask_in_block()) { |
| 2200 UseNewIndexInCurrentBlock(Token::BIT_AND, |
| 2201 decomposition.and_mask, |
| 2202 decomposition.base, |
| 2203 decomposition.context); |
| 2204 current_and_mask_in_block_ = decomposition.and_mask; |
| 2205 } |
| 2206 check->set_skip_check(); |
| 2207 } |
| 2208 if (current_and_mask_in_block() == 0) { |
| 2209 if (decomposition.or_mask > current_or_mask_in_block()) { |
| 2210 UseNewIndexInCurrentBlock(Token::BIT_OR, |
| 2211 decomposition.or_mask, |
| 2212 decomposition.base, |
| 2213 decomposition.context); |
| 2214 current_or_mask_in_block_ = decomposition.or_mask; |
| 2215 } |
| 2216 check->set_skip_check(); |
| 2217 } |
| 2218 |
| 2219 if (!check->skip_check()) { |
| 2220 InductionVariableCheck* new_check = new(check->block()->graph()->zone()) |
| 2221 InductionVariableCheck(check, checks_, upper_limit); |
| 2222 checks_ = new_check; |
| 2223 } |
| 2224 } |
| 2225 |
| 2226 |
| 2227 /* |
| 2228 * This method detects if phi is an induction variable, with phi_operand as |
| 2229 * its "incremented" value (the other operand would be the "base" value). |
| 2230 * |
| 2231 * It cheks is phi_operand has the form "phi + constant". |
| 2232 * If yes, the constant is the increment that the induction variable gets at |
| 2233 * every loop iteration. |
| 2234 * Otherwise it returns 0. |
| 2235 */ |
| 2236 int32_t InductionVariableData::ComputeIncrement(HPhi* phi, |
| 2237 HValue* phi_operand) { |
| 2238 if (!phi_operand->representation().IsInteger32()) return 0; |
| 2239 |
| 2240 if (phi_operand->IsAdd()) { |
| 2241 HAdd* operation = HAdd::cast(phi_operand); |
| 2242 if (operation->left() == phi && |
| 2243 operation->right()->IsInteger32Constant()) { |
| 2244 return operation->right()->GetInteger32Constant(); |
| 2245 } else if (operation->right() == phi && |
| 2246 operation->left()->IsInteger32Constant()) { |
| 2247 return operation->left()->GetInteger32Constant(); |
| 2248 } |
| 2249 } else if (phi_operand->IsSub()) { |
| 2250 HSub* operation = HSub::cast(phi_operand); |
| 2251 if (operation->left() == phi && |
| 2252 operation->right()->IsInteger32Constant()) { |
| 2253 return -operation->right()->GetInteger32Constant(); |
| 2254 } |
| 2255 } |
| 2256 |
| 2257 return 0; |
| 2258 } |
| 2259 |
| 2260 |
| 2261 /* |
| 2262 * Swaps the information in "update" with the one contained in "this". |
| 2263 * The swapping is important because this method is used while doing a |
| 2264 * dominator tree traversal, and "update" will retain the old data that |
| 2265 * will be restored while backtracking. |
| 2266 */ |
| 2267 void InductionVariableData::UpdateAdditionalLimit( |
| 2268 InductionVariableLimitUpdate* update) { |
| 2269 ASSERT(update->updated_variable == this); |
| 2270 if (update->limit_is_upper) { |
| 2271 swap(&additional_upper_limit_, &update->limit); |
| 2272 swap(&additional_upper_limit_is_included_, &update->limit_is_included); |
| 2273 } else { |
| 2274 swap(&additional_lower_limit_, &update->limit); |
| 2275 swap(&additional_lower_limit_is_included_, &update->limit_is_included); |
| 2276 } |
| 2277 } |
| 2278 |
| 2279 |
| 2280 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask, |
| 2281 int32_t or_mask) { |
| 2282 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway. |
| 2283 const int32_t MAX_LIMIT = 1 << 30; |
| 2284 |
| 2285 int32_t result = MAX_LIMIT; |
| 2286 |
| 2287 if (limit() != NULL && |
| 2288 limit()->IsInteger32Constant()) { |
| 2289 int32_t limit_value = limit()->GetInteger32Constant(); |
| 2290 if (!limit_included()) { |
| 2291 limit_value--; |
| 2292 } |
| 2293 if (limit_value < result) result = limit_value; |
| 2294 } |
| 2295 |
| 2296 if (additional_upper_limit() != NULL && |
| 2297 additional_upper_limit()->IsInteger32Constant()) { |
| 2298 int32_t limit_value = additional_upper_limit()->GetInteger32Constant(); |
| 2299 if (!additional_upper_limit_is_included()) { |
| 2300 limit_value--; |
| 2301 } |
| 2302 if (limit_value < result) result = limit_value; |
| 2303 } |
| 2304 |
| 2305 if (and_mask > 0 && and_mask < MAX_LIMIT) { |
| 2306 if (and_mask < result) result = and_mask; |
| 2307 return result; |
| 2308 } |
| 2309 |
| 2310 // Add the effect of the or_mask. |
| 2311 result |= or_mask; |
| 2312 |
| 2313 return result >= MAX_LIMIT ? kNoLimit : result; |
| 2314 } |
| 2315 |
| 2316 |
| 2317 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) { |
| 2318 if (!v->IsPhi()) return v; |
| 2319 HPhi* phi = HPhi::cast(v); |
| 2320 if (phi->OperandCount() != 2) return v; |
| 2321 if (phi->OperandAt(0)->block()->is_osr_entry()) { |
| 2322 return phi->OperandAt(1); |
| 2323 } else if (phi->OperandAt(1)->block()->is_osr_entry()) { |
| 2324 return phi->OperandAt(0); |
| 2325 } else { |
| 2326 return v; |
| 2327 } |
| 2328 } |
| 2329 |
| 2330 |
| 2331 InductionVariableData* InductionVariableData::GetInductionVariableData( |
| 2332 HValue* v) { |
| 2333 v = IgnoreOsrValue(v); |
| 2334 if (v->IsPhi()) { |
| 2335 return HPhi::cast(v)->induction_variable_data(); |
| 2336 } |
| 2337 return NULL; |
| 2338 } |
| 2339 |
| 2340 |
| 2341 /* |
| 2342 * Check if a conditional branch to "current_branch" with token "token" is |
| 2343 * the branch that keeps the induction loop running (and, conversely, will |
| 2344 * terminate it if the "other_branch" is taken). |
| 2345 * |
| 2346 * Three conditions must be met: |
| 2347 * - "current_branch" must be in the induction loop. |
| 2348 * - "other_branch" must be out of the induction loop. |
| 2349 * - "token" and the induction increment must be "compatible": the token should |
| 2350 * be a condition that keeps the execution inside the loop until the limit is |
| 2351 * reached. |
| 2352 */ |
| 2353 bool InductionVariableData::CheckIfBranchIsLoopGuard( |
| 2354 Token::Value token, |
| 2355 HBasicBlock* current_branch, |
| 2356 HBasicBlock* other_branch) { |
| 2357 if (!phi()->block()->current_loop()->IsNestedInThisLoop( |
| 2358 current_branch->current_loop())) { |
| 2359 return false; |
| 2360 } |
| 2361 |
| 2362 if (phi()->block()->current_loop()->IsNestedInThisLoop( |
| 2363 other_branch->current_loop())) { |
| 2364 return false; |
| 2365 } |
| 2366 |
| 2367 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) { |
| 2368 return true; |
| 2369 } |
| 2370 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) { |
| 2371 return true; |
| 2372 } |
| 2373 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) { |
| 2374 return true; |
| 2375 } |
| 2376 |
| 2377 return false; |
| 2378 } |
| 2379 |
| 2380 |
| 2381 void InductionVariableData::ComputeLimitFromPredecessorBlock( |
| 2382 HBasicBlock* block, |
| 2383 LimitFromPredecessorBlock* result) { |
| 2384 if (block->predecessors()->length() != 1) return; |
| 2385 HBasicBlock* predecessor = block->predecessors()->at(0); |
| 2386 HInstruction* end = predecessor->last(); |
| 2387 |
| 2388 if (!end->IsCompareNumericAndBranch()) return; |
| 2389 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end); |
| 2390 |
| 2391 Token::Value token = branch->token(); |
| 2392 if (!Token::IsArithmeticCompareOp(token)) return; |
| 2393 |
| 2394 HBasicBlock* other_target; |
| 2395 if (block == branch->SuccessorAt(0)) { |
| 2396 other_target = branch->SuccessorAt(1); |
| 2397 } else { |
| 2398 other_target = branch->SuccessorAt(0); |
| 2399 token = Token::NegateCompareOp(token); |
| 2400 ASSERT(block == branch->SuccessorAt(1)); |
| 2401 } |
| 2402 |
| 2403 InductionVariableData* data; |
| 2404 |
| 2405 data = GetInductionVariableData(branch->left()); |
| 2406 HValue* limit = branch->right(); |
| 2407 if (data == NULL) { |
| 2408 data = GetInductionVariableData(branch->right()); |
| 2409 token = Token::ReverseCompareOp(token); |
| 2410 limit = branch->left(); |
| 2411 } |
| 2412 |
| 2413 if (data != NULL) { |
| 2414 result->variable = data; |
| 2415 result->token = token; |
| 2416 result->limit = limit; |
| 2417 result->other_target = other_target; |
| 2418 } |
| 2419 } |
| 2420 |
| 2421 |
| 2422 /* |
| 2423 * Compute the limit that is imposed on an induction variable when entering |
| 2424 * "block" (if any). |
| 2425 * If the limit is the "proper" induction limit (the one that makes the loop |
| 2426 * terminate when the induction variable reaches it) it is stored directly in |
| 2427 * the induction variable data. |
| 2428 * Otherwise the limit is written in "additional_limit" and the method |
| 2429 * returns true. |
| 2430 */ |
| 2431 bool InductionVariableData::ComputeInductionVariableLimit( |
| 2432 HBasicBlock* block, |
| 2433 InductionVariableLimitUpdate* additional_limit) { |
| 2434 LimitFromPredecessorBlock limit; |
| 2435 ComputeLimitFromPredecessorBlock(block, &limit); |
| 2436 if (!limit.LimitIsValid()) return false; |
| 2437 |
| 2438 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token, |
| 2439 block, |
| 2440 limit.other_target)) { |
| 2441 limit.variable->limit_ = limit.limit; |
| 2442 limit.variable->limit_included_ = limit.LimitIsIncluded(); |
| 2443 limit.variable->limit_validity_ = block; |
| 2444 limit.variable->induction_exit_block_ = block->predecessors()->at(0); |
| 2445 limit.variable->induction_exit_target_ = limit.other_target; |
| 2446 return false; |
| 2447 } else { |
| 2448 additional_limit->updated_variable = limit.variable; |
| 2449 additional_limit->limit = limit.limit; |
| 2450 additional_limit->limit_is_upper = limit.LimitIsUpper(); |
| 2451 additional_limit->limit_is_included = limit.LimitIsIncluded(); |
| 2452 return true; |
| 2453 } |
| 2454 } |
| 2455 |
| 2456 |
| 1977 Range* HMathMinMax::InferRange(Zone* zone) { | 2457 Range* HMathMinMax::InferRange(Zone* zone) { |
| 1978 if (representation().IsInteger32()) { | 2458 if (representation().IsSmiOrInteger32()) { |
| 1979 Range* a = left()->range(); | 2459 Range* a = left()->range(); |
| 1980 Range* b = right()->range(); | 2460 Range* b = right()->range(); |
| 1981 Range* res = a->Copy(zone); | 2461 Range* res = a->Copy(zone); |
| 1982 if (operation_ == kMathMax) { | 2462 if (operation_ == kMathMax) { |
| 1983 res->CombinedMax(b); | 2463 res->CombinedMax(b); |
| 1984 } else { | 2464 } else { |
| 1985 ASSERT(operation_ == kMathMin); | 2465 ASSERT(operation_ == kMathMin); |
| 1986 res->CombinedMin(b); | 2466 res->CombinedMin(b); |
| 1987 } | 2467 } |
| 1988 return res; | 2468 return res; |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2053 ASSERT(block() == NULL); | 2533 ASSERT(block() == NULL); |
| 2054 } | 2534 } |
| 2055 | 2535 |
| 2056 | 2536 |
| 2057 void HPhi::InitRealUses(int phi_id) { | 2537 void HPhi::InitRealUses(int phi_id) { |
| 2058 // Initialize real uses. | 2538 // Initialize real uses. |
| 2059 phi_id_ = phi_id; | 2539 phi_id_ = phi_id; |
| 2060 // Compute a conservative approximation of truncating uses before inferring | 2540 // Compute a conservative approximation of truncating uses before inferring |
| 2061 // representations. The proper, exact computation will be done later, when | 2541 // representations. The proper, exact computation will be done later, when |
| 2062 // inserting representation changes. | 2542 // inserting representation changes. |
| 2543 SetFlag(kTruncatingToSmi); |
| 2063 SetFlag(kTruncatingToInt32); | 2544 SetFlag(kTruncatingToInt32); |
| 2064 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { | 2545 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { |
| 2065 HValue* value = it.value(); | 2546 HValue* value = it.value(); |
| 2066 if (!value->IsPhi()) { | 2547 if (!value->IsPhi()) { |
| 2067 Representation rep = value->observed_input_representation(it.index()); | 2548 Representation rep = value->observed_input_representation(it.index()); |
| 2068 non_phi_uses_[rep.kind()] += value->LoopWeight(); | 2549 non_phi_uses_[rep.kind()] += value->LoopWeight(); |
| 2069 if (FLAG_trace_representation) { | 2550 if (FLAG_trace_representation) { |
| 2070 PrintF("#%d Phi is used by real #%d %s as %s\n", | 2551 PrintF("#%d Phi is used by real #%d %s as %s\n", |
| 2071 id(), value->id(), value->Mnemonic(), rep.Mnemonic()); | 2552 id(), value->id(), value->Mnemonic(), rep.Mnemonic()); |
| 2072 } | 2553 } |
| 2073 if (!value->IsSimulate() && !value->CheckFlag(kTruncatingToInt32)) { | 2554 if (!value->IsSimulate()) { |
| 2074 ClearFlag(kTruncatingToInt32); | 2555 if (!value->CheckFlag(kTruncatingToSmi)) { |
| 2556 ClearFlag(kTruncatingToSmi); |
| 2557 } |
| 2558 if (!value->CheckFlag(kTruncatingToInt32)) { |
| 2559 ClearFlag(kTruncatingToInt32); |
| 2560 } |
| 2075 } | 2561 } |
| 2076 } | 2562 } |
| 2077 } | 2563 } |
| 2078 } | 2564 } |
| 2079 | 2565 |
| 2080 | 2566 |
| 2081 void HPhi::AddNonPhiUsesFrom(HPhi* other) { | 2567 void HPhi::AddNonPhiUsesFrom(HPhi* other) { |
| 2082 if (FLAG_trace_representation) { | 2568 if (FLAG_trace_representation) { |
| 2083 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n", | 2569 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n", |
| 2084 id(), other->id(), | 2570 id(), other->id(), |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2135 } else { | 2621 } else { |
| 2136 stream->Add(" push "); | 2622 stream->Add(" push "); |
| 2137 } | 2623 } |
| 2138 values_[i]->PrintNameTo(stream); | 2624 values_[i]->PrintNameTo(stream); |
| 2139 if (i > 0) stream->Add(","); | 2625 if (i > 0) stream->Add(","); |
| 2140 } | 2626 } |
| 2141 } | 2627 } |
| 2142 } | 2628 } |
| 2143 | 2629 |
| 2144 | 2630 |
| 2145 void HDeoptimize::PrintDataTo(StringStream* stream) { | |
| 2146 if (OperandCount() == 0) return; | |
| 2147 OperandAt(0)->PrintNameTo(stream); | |
| 2148 for (int i = 1; i < OperandCount(); ++i) { | |
| 2149 stream->Add(" "); | |
| 2150 OperandAt(i)->PrintNameTo(stream); | |
| 2151 } | |
| 2152 } | |
| 2153 | |
| 2154 | |
| 2155 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target, | 2631 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target, |
| 2156 Zone* zone) { | 2632 Zone* zone) { |
| 2157 ASSERT(return_target->IsInlineReturnTarget()); | 2633 ASSERT(return_target->IsInlineReturnTarget()); |
| 2158 return_targets_.Add(return_target, zone); | 2634 return_targets_.Add(return_target, zone); |
| 2159 } | 2635 } |
| 2160 | 2636 |
| 2161 | 2637 |
| 2162 void HEnterInlined::PrintDataTo(StringStream* stream) { | 2638 void HEnterInlined::PrintDataTo(StringStream* stream) { |
| 2163 SmartArrayPointer<char> name = function()->debug_name()->ToCString(); | 2639 SmartArrayPointer<char> name = function()->debug_name()->ToCString(); |
| 2164 stream->Add("%s, id=%d", *name, function()->id().ToInt()); | 2640 stream->Add("%s, id=%d", *name, function()->id().ToInt()); |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2261 boolean_value_(double_value != 0 && !std::isnan(double_value)), | 2737 boolean_value_(double_value != 0 && !std::isnan(double_value)), |
| 2262 int32_value_(DoubleToInt32(double_value)), | 2738 int32_value_(DoubleToInt32(double_value)), |
| 2263 double_value_(double_value) { | 2739 double_value_(double_value) { |
| 2264 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); | 2740 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_); |
| 2265 Initialize(r); | 2741 Initialize(r); |
| 2266 } | 2742 } |
| 2267 | 2743 |
| 2268 | 2744 |
| 2269 void HConstant::Initialize(Representation r) { | 2745 void HConstant::Initialize(Representation r) { |
| 2270 if (r.IsNone()) { | 2746 if (r.IsNone()) { |
| 2271 if (has_smi_value_) { | 2747 if (has_smi_value_ && kSmiValueSize == 31) { |
| 2272 r = Representation::Smi(); | 2748 r = Representation::Smi(); |
| 2273 } else if (has_int32_value_) { | 2749 } else if (has_int32_value_) { |
| 2274 r = Representation::Integer32(); | 2750 r = Representation::Integer32(); |
| 2275 } else if (has_double_value_) { | 2751 } else if (has_double_value_) { |
| 2276 r = Representation::Double(); | 2752 r = Representation::Double(); |
| 2277 } else { | 2753 } else { |
| 2278 r = Representation::Tagged(); | 2754 r = Representation::Tagged(); |
| 2279 } | 2755 } |
| 2280 } | 2756 } |
| 2281 set_representation(r); | 2757 set_representation(r); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2309 unique_id_, | 2785 unique_id_, |
| 2310 r, | 2786 r, |
| 2311 type_from_value_, | 2787 type_from_value_, |
| 2312 is_internalized_string_, | 2788 is_internalized_string_, |
| 2313 is_not_in_new_space_, | 2789 is_not_in_new_space_, |
| 2314 is_cell_, | 2790 is_cell_, |
| 2315 boolean_value_); | 2791 boolean_value_); |
| 2316 } | 2792 } |
| 2317 | 2793 |
| 2318 | 2794 |
| 2319 HConstant* HConstant::CopyToTruncatedInt32(Zone* zone) const { | 2795 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) { |
| 2796 HConstant* res = NULL; |
| 2320 if (has_int32_value_) { | 2797 if (has_int32_value_) { |
| 2321 return new(zone) HConstant(int32_value_, | 2798 res = new(zone) HConstant(int32_value_, |
| 2322 Representation::Integer32(), | 2799 Representation::Integer32(), |
| 2323 is_not_in_new_space_, | 2800 is_not_in_new_space_, |
| 2324 handle_); | 2801 handle_); |
| 2802 } else if (has_double_value_) { |
| 2803 res = new(zone) HConstant(DoubleToInt32(double_value_), |
| 2804 Representation::Integer32(), |
| 2805 is_not_in_new_space_, |
| 2806 handle_); |
| 2807 } else { |
| 2808 ASSERT(!HasNumberValue()); |
| 2809 Maybe<HConstant*> number = CopyToTruncatedNumber(zone); |
| 2810 if (number.has_value) return number.value->CopyToTruncatedInt32(zone); |
| 2325 } | 2811 } |
| 2326 if (has_double_value_) { | 2812 return Maybe<HConstant*>(res != NULL, res); |
| 2327 return new(zone) HConstant(DoubleToInt32(double_value_), | |
| 2328 Representation::Integer32(), | |
| 2329 is_not_in_new_space_, | |
| 2330 handle_); | |
| 2331 } | |
| 2332 return NULL; | |
| 2333 } | 2813 } |
| 2334 | 2814 |
| 2335 | 2815 |
| 2816 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) { |
| 2817 HConstant* res = NULL; |
| 2818 if (handle()->IsBoolean()) { |
| 2819 res = handle()->BooleanValue() ? |
| 2820 new(zone) HConstant(1) : new(zone) HConstant(0); |
| 2821 } else if (handle()->IsUndefined()) { |
| 2822 res = new(zone) HConstant(OS::nan_value()); |
| 2823 } else if (handle()->IsNull()) { |
| 2824 res = new(zone) HConstant(0); |
| 2825 } |
| 2826 return Maybe<HConstant*>(res != NULL, res); |
| 2827 } |
| 2828 |
| 2829 |
| 2336 void HConstant::PrintDataTo(StringStream* stream) { | 2830 void HConstant::PrintDataTo(StringStream* stream) { |
| 2337 if (has_int32_value_) { | 2831 if (has_int32_value_) { |
| 2338 stream->Add("%d ", int32_value_); | 2832 stream->Add("%d ", int32_value_); |
| 2339 } else if (has_double_value_) { | 2833 } else if (has_double_value_) { |
| 2340 stream->Add("%f ", FmtElm(double_value_)); | 2834 stream->Add("%f ", FmtElm(double_value_)); |
| 2341 } else { | 2835 } else { |
| 2342 handle()->ShortPrint(stream); | 2836 handle()->ShortPrint(stream); |
| 2343 } | 2837 } |
| 2344 } | 2838 } |
| 2345 | 2839 |
| 2346 | 2840 |
| 2347 void HBinaryOperation::PrintDataTo(StringStream* stream) { | 2841 void HBinaryOperation::PrintDataTo(StringStream* stream) { |
| 2348 left()->PrintNameTo(stream); | 2842 left()->PrintNameTo(stream); |
| 2349 stream->Add(" "); | 2843 stream->Add(" "); |
| 2350 right()->PrintNameTo(stream); | 2844 right()->PrintNameTo(stream); |
| 2351 if (CheckFlag(kCanOverflow)) stream->Add(" !"); | 2845 if (CheckFlag(kCanOverflow)) stream->Add(" !"); |
| 2352 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); | 2846 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?"); |
| 2353 } | 2847 } |
| 2354 | 2848 |
| 2355 | 2849 |
| 2356 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) { | 2850 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) { |
| 2357 ASSERT(CheckFlag(kFlexibleRepresentation)); | 2851 ASSERT(CheckFlag(kFlexibleRepresentation)); |
| 2358 Representation new_rep = RepresentationFromInputs(); | 2852 Representation new_rep = RepresentationFromInputs(); |
| 2359 UpdateRepresentation(new_rep, h_infer, "inputs"); | 2853 UpdateRepresentation(new_rep, h_infer, "inputs"); |
| 2360 // When the operation has information about its own output type, don't look | 2854 if (observed_output_representation_.IsNone()) { |
| 2361 // at uses. | 2855 new_rep = RepresentationFromUses(); |
| 2362 if (!observed_output_representation_.IsNone()) return; | 2856 UpdateRepresentation(new_rep, h_infer, "uses"); |
| 2363 new_rep = RepresentationFromUses(); | 2857 } else { |
| 2364 UpdateRepresentation(new_rep, h_infer, "uses"); | 2858 new_rep = RepresentationFromOutput(); |
| 2365 new_rep = RepresentationFromUseRequirements(); | 2859 UpdateRepresentation(new_rep, h_infer, "output"); |
| 2366 if (new_rep.fits_into(Representation::Integer32())) { | 2860 } |
| 2367 UpdateRepresentation(new_rep, h_infer, "use requirements"); | 2861 |
| 2862 if (representation().IsSmi() && HasNonSmiUse()) { |
| 2863 UpdateRepresentation( |
| 2864 Representation::Integer32(), h_infer, "use requirements"); |
| 2368 } | 2865 } |
| 2369 } | 2866 } |
| 2370 | 2867 |
| 2371 | 2868 |
| 2372 bool HBinaryOperation::IgnoreObservedOutputRepresentation( | |
| 2373 Representation current_rep) { | |
| 2374 return observed_output_representation_.IsDouble() && | |
| 2375 current_rep.IsInteger32() && | |
| 2376 // Mul in Integer32 mode would be too precise. | |
| 2377 !this->IsMul() && | |
| 2378 CheckUsesForFlag(kTruncatingToInt32); | |
| 2379 } | |
| 2380 | |
| 2381 | |
| 2382 Representation HBinaryOperation::RepresentationFromInputs() { | 2869 Representation HBinaryOperation::RepresentationFromInputs() { |
| 2383 // Determine the worst case of observed input representations and | 2870 // Determine the worst case of observed input representations and |
| 2384 // the currently assumed output representation. | 2871 // the currently assumed output representation. |
| 2385 Representation rep = representation(); | 2872 Representation rep = representation(); |
| 2386 for (int i = 1; i <= 2; ++i) { | 2873 for (int i = 1; i <= 2; ++i) { |
| 2387 Representation input_rep = observed_input_representation(i); | 2874 rep = rep.generalize(observed_input_representation(i)); |
| 2388 if (input_rep.is_more_general_than(rep)) rep = input_rep; | |
| 2389 } | 2875 } |
| 2390 // If any of the actual input representation is more general than what we | 2876 // If any of the actual input representation is more general than what we |
| 2391 // have so far but not Tagged, use that representation instead. | 2877 // have so far but not Tagged, use that representation instead. |
| 2392 Representation left_rep = left()->representation(); | 2878 Representation left_rep = left()->representation(); |
| 2393 Representation right_rep = right()->representation(); | 2879 Representation right_rep = right()->representation(); |
| 2880 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep); |
| 2881 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep); |
| 2394 | 2882 |
| 2395 if (left_rep.is_more_general_than(rep) && !left_rep.IsTagged()) { | 2883 return rep; |
| 2396 rep = left_rep; | 2884 } |
| 2397 } | 2885 |
| 2398 if (right_rep.is_more_general_than(rep) && !right_rep.IsTagged()) { | 2886 |
| 2399 rep = right_rep; | 2887 bool HBinaryOperation::IgnoreObservedOutputRepresentation( |
| 2400 } | 2888 Representation current_rep) { |
| 2889 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) || |
| 2890 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) && |
| 2891 // Mul in Integer32 mode would be too precise. |
| 2892 !this->IsMul(); |
| 2893 } |
| 2894 |
| 2895 |
| 2896 Representation HBinaryOperation::RepresentationFromOutput() { |
| 2897 Representation rep = representation(); |
| 2401 // Consider observed output representation, but ignore it if it's Double, | 2898 // Consider observed output representation, but ignore it if it's Double, |
| 2402 // this instruction is not a division, and all its uses are truncating | 2899 // this instruction is not a division, and all its uses are truncating |
| 2403 // to Integer32. | 2900 // to Integer32. |
| 2404 if (observed_output_representation_.is_more_general_than(rep) && | 2901 if (observed_output_representation_.is_more_general_than(rep) && |
| 2405 !IgnoreObservedOutputRepresentation(rep)) { | 2902 !IgnoreObservedOutputRepresentation(rep)) { |
| 2406 rep = observed_output_representation_; | 2903 return observed_output_representation_; |
| 2407 } | 2904 } |
| 2408 return rep; | 2905 return Representation::None(); |
| 2409 } | 2906 } |
| 2410 | 2907 |
| 2411 | 2908 |
| 2412 void HBinaryOperation::AssumeRepresentation(Representation r) { | 2909 void HBinaryOperation::AssumeRepresentation(Representation r) { |
| 2413 set_observed_input_representation(1, r); | 2910 set_observed_input_representation(1, r); |
| 2414 set_observed_input_representation(2, r); | 2911 set_observed_input_representation(2, r); |
| 2415 HValue::AssumeRepresentation(r); | 2912 HValue::AssumeRepresentation(r); |
| 2416 } | 2913 } |
| 2417 | 2914 |
| 2418 | 2915 |
| (...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2714 } | 3211 } |
| 2715 if (FLAG_track_double_fields && | 3212 if (FLAG_track_double_fields && |
| 2716 lookup.representation().IsDouble()) { | 3213 lookup.representation().IsDouble()) { |
| 2717 // Since the value needs to be boxed, use a generic handler for | 3214 // Since the value needs to be boxed, use a generic handler for |
| 2718 // loading doubles. | 3215 // loading doubles. |
| 2719 continue; | 3216 continue; |
| 2720 } | 3217 } |
| 2721 types_.Add(types->at(i), zone); | 3218 types_.Add(types->at(i), zone); |
| 2722 break; | 3219 break; |
| 2723 } | 3220 } |
| 2724 case CONSTANT_FUNCTION: | 3221 case CONSTANT: |
| 2725 types_.Add(types->at(i), zone); | 3222 types_.Add(types->at(i), zone); |
| 2726 break; | 3223 break; |
| 2727 case CALLBACKS: | 3224 case CALLBACKS: |
| 2728 break; | 3225 break; |
| 2729 case TRANSITION: | 3226 case TRANSITION: |
| 2730 case INTERCEPTOR: | 3227 case INTERCEPTOR: |
| 2731 case NONEXISTENT: | 3228 case NONEXISTENT: |
| 2732 case NORMAL: | 3229 case NORMAL: |
| 2733 case HANDLER: | 3230 case HANDLER: |
| 2734 UNREACHABLE(); | 3231 UNREACHABLE(); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 2756 // We don't have an easy way to handle both a call (to the generic stub) and | 3253 // We don't have an easy way to handle both a call (to the generic stub) and |
| 2757 // a deopt in the same hydrogen instruction, so in this case we don't add | 3254 // a deopt in the same hydrogen instruction, so in this case we don't add |
| 2758 // the negative lookups which can deopt - just let the generic stub handle | 3255 // the negative lookups which can deopt - just let the generic stub handle |
| 2759 // them. | 3256 // them. |
| 2760 SetAllSideEffects(); | 3257 SetAllSideEffects(); |
| 2761 need_generic_ = true; | 3258 need_generic_ = true; |
| 2762 } | 3259 } |
| 2763 } | 3260 } |
| 2764 | 3261 |
| 2765 | 3262 |
| 3263 HCheckMaps* HCheckMaps::New(HValue* value, |
| 3264 Handle<Map> map, |
| 3265 Zone* zone, |
| 3266 CompilationInfo* info, |
| 3267 HValue* typecheck) { |
| 3268 HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck); |
| 3269 check_map->map_set_.Add(map, zone); |
| 3270 if (map->CanOmitMapChecks() && |
| 3271 value->IsConstant() && |
| 3272 HConstant::cast(value)->InstanceOf(map)) { |
| 3273 check_map->omit(info); |
| 3274 } |
| 3275 return check_map; |
| 3276 } |
| 3277 |
| 3278 |
| 3279 HCheckMaps* HCheckMaps::NewWithTransitions(HValue* value, |
| 3280 Handle<Map> map, |
| 3281 Zone* zone, |
| 3282 CompilationInfo* info) { |
| 3283 HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, value); |
| 3284 check_map->map_set_.Add(map, zone); |
| 3285 |
| 3286 // Since transitioned elements maps of the initial map don't fail the map |
| 3287 // check, the CheckMaps instruction doesn't need to depend on ElementsKinds. |
| 3288 check_map->ClearGVNFlag(kDependsOnElementsKind); |
| 3289 |
| 3290 ElementsKind kind = map->elements_kind(); |
| 3291 bool packed = IsFastPackedElementsKind(kind); |
| 3292 while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) { |
| 3293 kind = GetNextMoreGeneralFastElementsKind(kind, packed); |
| 3294 Map* transitioned_map = |
| 3295 map->LookupElementsTransitionMap(kind); |
| 3296 if (transitioned_map) { |
| 3297 check_map->map_set_.Add(Handle<Map>(transitioned_map), zone); |
| 3298 } |
| 3299 }; |
| 3300 |
| 3301 if (map->CanOmitMapChecks() && |
| 3302 value->IsConstant() && |
| 3303 HConstant::cast(value)->InstanceOf(map)) { |
| 3304 check_map->omit(info); |
| 3305 } |
| 3306 |
| 3307 check_map->map_set_.Sort(); |
| 3308 return check_map; |
| 3309 } |
| 3310 |
| 3311 |
| 2766 void HCheckMaps::FinalizeUniqueValueId() { | 3312 void HCheckMaps::FinalizeUniqueValueId() { |
| 2767 if (!map_unique_ids_.is_empty()) return; | 3313 if (!map_unique_ids_.is_empty()) return; |
| 2768 Zone* zone = block()->zone(); | 3314 Zone* zone = block()->zone(); |
| 2769 map_unique_ids_.Initialize(map_set_.length(), zone); | 3315 map_unique_ids_.Initialize(map_set_.length(), zone); |
| 2770 for (int i = 0; i < map_set_.length(); i++) { | 3316 for (int i = 0; i < map_set_.length(); i++) { |
| 2771 map_unique_ids_.Add(UniqueValueId(map_set_.at(i)), zone); | 3317 map_unique_ids_.Add(UniqueValueId(map_set_.at(i)), zone); |
| 2772 } | 3318 } |
| 2773 } | 3319 } |
| 2774 | 3320 |
| 2775 | 3321 |
| (...skipping 410 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3186 if (!input_rep.IsTagged()) rep = rep.generalize(input_rep); | 3732 if (!input_rep.IsTagged()) rep = rep.generalize(input_rep); |
| 3187 return rep; | 3733 return rep; |
| 3188 } | 3734 } |
| 3189 | 3735 |
| 3190 | 3736 |
| 3191 HType HStringCharFromCode::CalculateInferredType() { | 3737 HType HStringCharFromCode::CalculateInferredType() { |
| 3192 return HType::String(); | 3738 return HType::String(); |
| 3193 } | 3739 } |
| 3194 | 3740 |
| 3195 | 3741 |
| 3196 HType HAllocate::CalculateInferredType() { | |
| 3197 return type_; | |
| 3198 } | |
| 3199 | |
| 3200 | |
| 3201 void HAllocate::HandleSideEffectDominator(GVNFlag side_effect, | 3742 void HAllocate::HandleSideEffectDominator(GVNFlag side_effect, |
| 3202 HValue* dominator) { | 3743 HValue* dominator) { |
| 3203 ASSERT(side_effect == kChangesNewSpacePromotion); | 3744 ASSERT(side_effect == kChangesNewSpacePromotion); |
| 3204 if (!FLAG_use_allocation_folding) return; | 3745 if (!FLAG_use_allocation_folding) return; |
| 3205 | 3746 |
| 3206 // Try to fold allocations together with their dominating allocations. | 3747 // Try to fold allocations together with their dominating allocations. |
| 3207 if (!dominator->IsAllocate()) { | 3748 if (!dominator->IsAllocate()) { |
| 3208 if (FLAG_trace_allocation_folding) { | 3749 if (FLAG_trace_allocation_folding) { |
| 3209 PrintF("#%d (%s) cannot fold into #%d (%s)\n", | 3750 PrintF("#%d (%s) cannot fold into #%d (%s)\n", |
| 3210 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); | 3751 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); |
| 3211 } | 3752 } |
| 3212 return; | 3753 return; |
| 3213 } | 3754 } |
| 3214 | 3755 |
| 3215 HAllocate* dominator_allocate_instr = HAllocate::cast(dominator); | 3756 HAllocate* dominator_allocate_instr = HAllocate::cast(dominator); |
| 3216 HValue* dominator_size = dominator_allocate_instr->size(); | 3757 HValue* dominator_size = dominator_allocate_instr->size(); |
| 3217 HValue* current_size = size(); | 3758 HValue* current_size = size(); |
| 3218 // We can just fold allocations that are guaranteed in new space. | 3759 // We can just fold allocations that are guaranteed in new space. |
| 3219 // TODO(hpayer): Support double aligned allocations. | |
| 3220 // TODO(hpayer): Add support for non-constant allocation in dominator. | 3760 // TODO(hpayer): Add support for non-constant allocation in dominator. |
| 3221 if (!GuaranteedInNewSpace() || MustAllocateDoubleAligned() || | 3761 if (!GuaranteedInNewSpace() || !current_size->IsInteger32Constant() || |
| 3222 !current_size->IsInteger32Constant() || | |
| 3223 !dominator_allocate_instr->GuaranteedInNewSpace() || | 3762 !dominator_allocate_instr->GuaranteedInNewSpace() || |
| 3224 dominator_allocate_instr->MustAllocateDoubleAligned() || | |
| 3225 !dominator_size->IsInteger32Constant()) { | 3763 !dominator_size->IsInteger32Constant()) { |
| 3226 if (FLAG_trace_allocation_folding) { | 3764 if (FLAG_trace_allocation_folding) { |
| 3227 PrintF("#%d (%s) cannot fold into #%d (%s)\n", | 3765 PrintF("#%d (%s) cannot fold into #%d (%s)\n", |
| 3228 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); | 3766 id(), Mnemonic(), dominator->id(), dominator->Mnemonic()); |
| 3229 } | 3767 } |
| 3230 return; | 3768 return; |
| 3231 } | 3769 } |
| 3232 | 3770 |
| 3233 // First update the size of the dominator allocate instruction. | 3771 // First update the size of the dominator allocate instruction. |
| 3234 int32_t dominator_size_constant = | 3772 int32_t dominator_size_constant = |
| 3235 HConstant::cast(dominator_size)->GetInteger32Constant(); | 3773 HConstant::cast(dominator_size)->GetInteger32Constant(); |
| 3236 int32_t current_size_constant = | 3774 int32_t current_size_constant = |
| 3237 HConstant::cast(current_size)->GetInteger32Constant(); | 3775 HConstant::cast(current_size)->GetInteger32Constant(); |
| 3238 int32_t new_dominator_size = dominator_size_constant + current_size_constant; | 3776 int32_t new_dominator_size = dominator_size_constant + current_size_constant; |
| 3777 |
| 3778 if (MustAllocateDoubleAligned()) { |
| 3779 if (!dominator_allocate_instr->MustAllocateDoubleAligned()) { |
| 3780 dominator_allocate_instr->SetFlags(HAllocate::ALLOCATE_DOUBLE_ALIGNED); |
| 3781 } |
| 3782 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) { |
| 3783 dominator_size_constant += kDoubleSize / 2; |
| 3784 new_dominator_size += kDoubleSize / 2; |
| 3785 } |
| 3786 } |
| 3787 |
| 3239 if (new_dominator_size > Page::kMaxNonCodeHeapObjectSize) { | 3788 if (new_dominator_size > Page::kMaxNonCodeHeapObjectSize) { |
| 3240 if (FLAG_trace_allocation_folding) { | 3789 if (FLAG_trace_allocation_folding) { |
| 3241 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n", | 3790 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n", |
| 3242 id(), Mnemonic(), dominator->id(), dominator->Mnemonic(), | 3791 id(), Mnemonic(), dominator->id(), dominator->Mnemonic(), |
| 3243 new_dominator_size); | 3792 new_dominator_size); |
| 3244 } | 3793 } |
| 3245 return; | 3794 return; |
| 3246 } | 3795 } |
| 3247 HBasicBlock* block = dominator->block(); | 3796 HBasicBlock* block = dominator->block(); |
| 3248 Zone* zone = block->zone(); | 3797 Zone* zone = block->zone(); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3283 | 3832 |
| 3284 | 3833 |
| 3285 HType HFunctionLiteral::CalculateInferredType() { | 3834 HType HFunctionLiteral::CalculateInferredType() { |
| 3286 return HType::JSObject(); | 3835 return HType::JSObject(); |
| 3287 } | 3836 } |
| 3288 | 3837 |
| 3289 | 3838 |
| 3290 HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero( | 3839 HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero( |
| 3291 BitVector* visited) { | 3840 BitVector* visited) { |
| 3292 visited->Add(id()); | 3841 visited->Add(id()); |
| 3293 if (representation().IsInteger32() && | 3842 if (representation().IsSmiOrInteger32() && |
| 3294 !value()->representation().IsInteger32()) { | 3843 !value()->representation().Equals(representation())) { |
| 3295 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) { | 3844 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) { |
| 3296 SetFlag(kBailoutOnMinusZero); | 3845 SetFlag(kBailoutOnMinusZero); |
| 3297 } | 3846 } |
| 3298 } | 3847 } |
| 3299 if (RequiredInputRepresentation(0).IsInteger32() && | 3848 if (RequiredInputRepresentation(0).IsSmiOrInteger32() && |
| 3300 representation().IsInteger32()) { | 3849 representation().Equals(RequiredInputRepresentation(0))) { |
| 3301 return value(); | 3850 return value(); |
| 3302 } | 3851 } |
| 3303 return NULL; | 3852 return NULL; |
| 3304 } | 3853 } |
| 3305 | 3854 |
| 3306 | 3855 |
| 3307 | |
| 3308 HValue* HChange::EnsureAndPropagateNotMinusZero(BitVector* visited) { | 3856 HValue* HChange::EnsureAndPropagateNotMinusZero(BitVector* visited) { |
| 3309 visited->Add(id()); | 3857 visited->Add(id()); |
| 3310 if (from().IsInteger32()) return NULL; | 3858 if (from().IsSmiOrInteger32()) return NULL; |
| 3311 if (CanTruncateToInt32()) return NULL; | 3859 if (CanTruncateToInt32()) return NULL; |
| 3312 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) { | 3860 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) { |
| 3313 SetFlag(kBailoutOnMinusZero); | 3861 SetFlag(kBailoutOnMinusZero); |
| 3314 } | 3862 } |
| 3315 ASSERT(!from().IsInteger32() || !to().IsInteger32()); | 3863 ASSERT(!from().IsSmiOrInteger32() || !to().IsSmiOrInteger32()); |
| 3316 return NULL; | 3864 return NULL; |
| 3317 } | 3865 } |
| 3318 | 3866 |
| 3319 | 3867 |
| 3320 HValue* HForceRepresentation::EnsureAndPropagateNotMinusZero( | 3868 HValue* HForceRepresentation::EnsureAndPropagateNotMinusZero( |
| 3321 BitVector* visited) { | 3869 BitVector* visited) { |
| 3322 visited->Add(id()); | 3870 visited->Add(id()); |
| 3323 return value(); | 3871 return value(); |
| 3324 } | 3872 } |
| 3325 | 3873 |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3392 if (value()->IsConstant()) { | 3940 if (value()->IsConstant()) { |
| 3393 return false; | 3941 return false; |
| 3394 } | 3942 } |
| 3395 | 3943 |
| 3396 if (value()->IsLoadKeyed()) { | 3944 if (value()->IsLoadKeyed()) { |
| 3397 return IsExternalFloatOrDoubleElementsKind( | 3945 return IsExternalFloatOrDoubleElementsKind( |
| 3398 HLoadKeyed::cast(value())->elements_kind()); | 3946 HLoadKeyed::cast(value())->elements_kind()); |
| 3399 } | 3947 } |
| 3400 | 3948 |
| 3401 if (value()->IsChange()) { | 3949 if (value()->IsChange()) { |
| 3402 if (HChange::cast(value())->from().IsInteger32()) { | 3950 if (HChange::cast(value())->from().IsSmiOrInteger32()) { |
| 3403 return false; | 3951 return false; |
| 3404 } | 3952 } |
| 3405 if (HChange::cast(value())->value()->type().IsSmi()) { | 3953 if (HChange::cast(value())->value()->type().IsSmi()) { |
| 3406 return false; | 3954 return false; |
| 3407 } | 3955 } |
| 3408 } | 3956 } |
| 3409 return true; | 3957 return true; |
| 3410 } | 3958 } |
| 3411 | 3959 |
| 3412 | 3960 |
| 3413 #define H_CONSTANT_INT32(val) \ | 3961 #define H_CONSTANT_INT(val) \ |
| 3414 new(zone) HConstant(static_cast<int32_t>(val), Representation::Integer32()) | 3962 new(zone) HConstant(static_cast<int32_t>(val)) |
| 3415 #define H_CONSTANT_DOUBLE(val) \ | 3963 #define H_CONSTANT_DOUBLE(val) \ |
| 3416 new(zone) HConstant(static_cast<double>(val), Representation::Double()) | 3964 new(zone) HConstant(static_cast<double>(val), Representation::Double()) |
| 3417 | 3965 |
| 3418 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \ | 3966 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \ |
| 3419 HInstruction* HInstr::New( \ | 3967 HInstruction* HInstr::New( \ |
| 3420 Zone* zone, HValue* context, HValue* left, HValue* right) { \ | 3968 Zone* zone, HValue* context, HValue* left, HValue* right) { \ |
| 3421 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ | 3969 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ |
| 3422 HConstant* c_left = HConstant::cast(left); \ | 3970 HConstant* c_left = HConstant::cast(left); \ |
| 3423 HConstant* c_right = HConstant::cast(right); \ | 3971 HConstant* c_right = HConstant::cast(right); \ |
| 3424 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ | 3972 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ |
| 3425 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \ | 3973 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \ |
| 3426 if (TypeInfo::IsInt32Double(double_res)) { \ | 3974 if (TypeInfo::IsInt32Double(double_res)) { \ |
| 3427 return H_CONSTANT_INT32(double_res); \ | 3975 return H_CONSTANT_INT(double_res); \ |
| 3428 } \ | 3976 } \ |
| 3429 return H_CONSTANT_DOUBLE(double_res); \ | 3977 return H_CONSTANT_DOUBLE(double_res); \ |
| 3430 } \ | 3978 } \ |
| 3431 } \ | 3979 } \ |
| 3432 return new(zone) HInstr(context, left, right); \ | 3980 return new(zone) HInstr(context, left, right); \ |
| 3433 } | 3981 } |
| 3434 | 3982 |
| 3435 | 3983 |
| 3436 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +) | 3984 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +) |
| 3437 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *) | 3985 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *) |
| (...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3616 int32_t dividend = c_left->Integer32Value(); | 4164 int32_t dividend = c_left->Integer32Value(); |
| 3617 int32_t divisor = c_right->Integer32Value(); | 4165 int32_t divisor = c_right->Integer32Value(); |
| 3618 if (dividend == kMinInt && divisor == -1) { | 4166 if (dividend == kMinInt && divisor == -1) { |
| 3619 return H_CONSTANT_DOUBLE(-0.0); | 4167 return H_CONSTANT_DOUBLE(-0.0); |
| 3620 } | 4168 } |
| 3621 if (divisor != 0) { | 4169 if (divisor != 0) { |
| 3622 int32_t res = dividend % divisor; | 4170 int32_t res = dividend % divisor; |
| 3623 if ((res == 0) && (dividend < 0)) { | 4171 if ((res == 0) && (dividend < 0)) { |
| 3624 return H_CONSTANT_DOUBLE(-0.0); | 4172 return H_CONSTANT_DOUBLE(-0.0); |
| 3625 } | 4173 } |
| 3626 return H_CONSTANT_INT32(res); | 4174 return H_CONSTANT_INT(res); |
| 3627 } | 4175 } |
| 3628 } | 4176 } |
| 3629 } | 4177 } |
| 3630 return new(zone) HMod(context, left, right, fixed_right_arg); | 4178 return new(zone) HMod(context, left, right, fixed_right_arg); |
| 3631 } | 4179 } |
| 3632 | 4180 |
| 3633 | 4181 |
| 3634 HInstruction* HDiv::New( | 4182 HInstruction* HDiv::New( |
| 3635 Zone* zone, HValue* context, HValue* left, HValue* right) { | 4183 Zone* zone, HValue* context, HValue* left, HValue* right) { |
| 3636 // If left and right are constant values, try to return a constant value. | 4184 // If left and right are constant values, try to return a constant value. |
| 3637 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { | 4185 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { |
| 3638 HConstant* c_left = HConstant::cast(left); | 4186 HConstant* c_left = HConstant::cast(left); |
| 3639 HConstant* c_right = HConstant::cast(right); | 4187 HConstant* c_right = HConstant::cast(right); |
| 3640 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { | 4188 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { |
| 3641 if (c_right->DoubleValue() != 0) { | 4189 if (c_right->DoubleValue() != 0) { |
| 3642 double double_res = c_left->DoubleValue() / c_right->DoubleValue(); | 4190 double double_res = c_left->DoubleValue() / c_right->DoubleValue(); |
| 3643 if (TypeInfo::IsInt32Double(double_res)) { | 4191 if (TypeInfo::IsInt32Double(double_res)) { |
| 3644 return H_CONSTANT_INT32(double_res); | 4192 return H_CONSTANT_INT(double_res); |
| 3645 } | 4193 } |
| 3646 return H_CONSTANT_DOUBLE(double_res); | 4194 return H_CONSTANT_DOUBLE(double_res); |
| 3647 } else { | 4195 } else { |
| 3648 int sign = Double(c_left->DoubleValue()).Sign() * | 4196 int sign = Double(c_left->DoubleValue()).Sign() * |
| 3649 Double(c_right->DoubleValue()).Sign(); // Right could be -0. | 4197 Double(c_right->DoubleValue()).Sign(); // Right could be -0. |
| 3650 return H_CONSTANT_DOUBLE(sign * V8_INFINITY); | 4198 return H_CONSTANT_DOUBLE(sign * V8_INFINITY); |
| 3651 } | 4199 } |
| 3652 } | 4200 } |
| 3653 } | 4201 } |
| 3654 return new(zone) HDiv(context, left, right); | 4202 return new(zone) HDiv(context, left, right); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 3671 case Token::BIT_AND: | 4219 case Token::BIT_AND: |
| 3672 result = v_left & v_right; | 4220 result = v_left & v_right; |
| 3673 break; | 4221 break; |
| 3674 case Token::BIT_OR: | 4222 case Token::BIT_OR: |
| 3675 result = v_left | v_right; | 4223 result = v_left | v_right; |
| 3676 break; | 4224 break; |
| 3677 default: | 4225 default: |
| 3678 result = 0; // Please the compiler. | 4226 result = 0; // Please the compiler. |
| 3679 UNREACHABLE(); | 4227 UNREACHABLE(); |
| 3680 } | 4228 } |
| 3681 return H_CONSTANT_INT32(result); | 4229 return H_CONSTANT_INT(result); |
| 3682 } | 4230 } |
| 3683 } | 4231 } |
| 3684 return new(zone) HBitwise(op, context, left, right); | 4232 return new(zone) HBitwise(op, context, left, right); |
| 3685 } | 4233 } |
| 3686 | 4234 |
| 3687 | 4235 |
| 3688 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \ | 4236 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \ |
| 3689 HInstruction* HInstr::New( \ | 4237 HInstruction* HInstr::New( \ |
| 3690 Zone* zone, HValue* context, HValue* left, HValue* right) { \ | 4238 Zone* zone, HValue* context, HValue* left, HValue* right) { \ |
| 3691 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ | 4239 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \ |
| 3692 HConstant* c_left = HConstant::cast(left); \ | 4240 HConstant* c_left = HConstant::cast(left); \ |
| 3693 HConstant* c_right = HConstant::cast(right); \ | 4241 HConstant* c_right = HConstant::cast(right); \ |
| 3694 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ | 4242 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \ |
| 3695 return H_CONSTANT_INT32(result); \ | 4243 return H_CONSTANT_INT(result); \ |
| 3696 } \ | 4244 } \ |
| 3697 } \ | 4245 } \ |
| 3698 return new(zone) HInstr(context, left, right); \ | 4246 return new(zone) HInstr(context, left, right); \ |
| 3699 } | 4247 } |
| 3700 | 4248 |
| 3701 | 4249 |
| 3702 DEFINE_NEW_H_BITWISE_INSTR(HSar, | 4250 DEFINE_NEW_H_BITWISE_INSTR(HSar, |
| 3703 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f)) | 4251 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f)) |
| 3704 DEFINE_NEW_H_BITWISE_INSTR(HShl, | 4252 DEFINE_NEW_H_BITWISE_INSTR(HShl, |
| 3705 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f)) | 4253 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f)) |
| 3706 | 4254 |
| 3707 #undef DEFINE_NEW_H_BITWISE_INSTR | 4255 #undef DEFINE_NEW_H_BITWISE_INSTR |
| 3708 | 4256 |
| 3709 | 4257 |
| 3710 HInstruction* HShr::New( | 4258 HInstruction* HShr::New( |
| 3711 Zone* zone, HValue* context, HValue* left, HValue* right) { | 4259 Zone* zone, HValue* context, HValue* left, HValue* right) { |
| 3712 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { | 4260 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { |
| 3713 HConstant* c_left = HConstant::cast(left); | 4261 HConstant* c_left = HConstant::cast(left); |
| 3714 HConstant* c_right = HConstant::cast(right); | 4262 HConstant* c_right = HConstant::cast(right); |
| 3715 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { | 4263 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { |
| 3716 int32_t left_val = c_left->NumberValueAsInteger32(); | 4264 int32_t left_val = c_left->NumberValueAsInteger32(); |
| 3717 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f; | 4265 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f; |
| 3718 if ((right_val == 0) && (left_val < 0)) { | 4266 if ((right_val == 0) && (left_val < 0)) { |
| 3719 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val)); | 4267 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val)); |
| 3720 } | 4268 } |
| 3721 return H_CONSTANT_INT32(static_cast<uint32_t>(left_val) >> right_val); | 4269 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val); |
| 3722 } | 4270 } |
| 3723 } | 4271 } |
| 3724 return new(zone) HShr(context, left, right); | 4272 return new(zone) HShr(context, left, right); |
| 3725 } | 4273 } |
| 3726 | 4274 |
| 3727 | 4275 |
| 3728 #undef H_CONSTANT_INT32 | 4276 #undef H_CONSTANT_INT |
| 3729 #undef H_CONSTANT_DOUBLE | 4277 #undef H_CONSTANT_DOUBLE |
| 3730 | 4278 |
| 3731 | 4279 |
| 3732 void HBitwise::PrintDataTo(StringStream* stream) { | 4280 void HBitwise::PrintDataTo(StringStream* stream) { |
| 3733 stream->Add(Token::Name(op_)); | 4281 stream->Add(Token::Name(op_)); |
| 3734 stream->Add(" "); | 4282 stream->Add(" "); |
| 3735 HBitwiseBinaryOperation::PrintDataTo(stream); | 4283 HBitwiseBinaryOperation::PrintDataTo(stream); |
| 3736 } | 4284 } |
| 3737 | 4285 |
| 3738 | 4286 |
| 3739 void HPhi::SimplifyConstantInputs() { | 4287 void HPhi::SimplifyConstantInputs() { |
| 3740 // Convert constant inputs to integers when all uses are truncating. | 4288 // Convert constant inputs to integers when all uses are truncating. |
| 3741 // This must happen before representation inference takes place. | 4289 // This must happen before representation inference takes place. |
| 3742 if (!CheckUsesForFlag(kTruncatingToInt32)) return; | 4290 if (!CheckUsesForFlag(kTruncatingToInt32)) return; |
| 3743 for (int i = 0; i < OperandCount(); ++i) { | 4291 for (int i = 0; i < OperandCount(); ++i) { |
| 3744 if (!OperandAt(i)->IsConstant()) return; | 4292 if (!OperandAt(i)->IsConstant()) return; |
| 3745 } | 4293 } |
| 3746 HGraph* graph = block()->graph(); | 4294 HGraph* graph = block()->graph(); |
| 3747 for (int i = 0; i < OperandCount(); ++i) { | 4295 for (int i = 0; i < OperandCount(); ++i) { |
| 3748 HConstant* operand = HConstant::cast(OperandAt(i)); | 4296 HConstant* operand = HConstant::cast(OperandAt(i)); |
| 3749 if (operand->HasInteger32Value()) { | 4297 if (operand->HasInteger32Value()) { |
| 3750 continue; | 4298 continue; |
| 3751 } else if (operand->HasDoubleValue()) { | 4299 } else if (operand->HasDoubleValue()) { |
| 3752 HConstant* integer_input = | 4300 HConstant* integer_input = |
| 3753 new(graph->zone()) HConstant(DoubleToInt32(operand->DoubleValue()), | 4301 new(graph->zone()) HConstant(DoubleToInt32(operand->DoubleValue())); |
| 3754 Representation::Integer32()); | |
| 3755 integer_input->InsertAfter(operand); | 4302 integer_input->InsertAfter(operand); |
| 3756 SetOperandAt(i, integer_input); | 4303 SetOperandAt(i, integer_input); |
| 3757 } else if (operand == graph->GetConstantTrue()) { | 4304 } else if (operand == graph->GetConstantTrue()) { |
| 3758 SetOperandAt(i, graph->GetConstant1()); | 4305 SetOperandAt(i, graph->GetConstant1()); |
| 3759 } else { | 4306 } else { |
| 3760 // This catches |false|, |undefined|, strings and objects. | 4307 // This catches |false|, |undefined|, strings and objects. |
| 3761 SetOperandAt(i, graph->GetConstant0()); | 4308 SetOperandAt(i, graph->GetConstant0()); |
| 3762 } | 4309 } |
| 3763 } | 4310 } |
| 3764 // Overwrite observed input representations because they are likely Tagged. | 4311 // Overwrite observed input representations because they are likely Tagged. |
| 3765 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { | 4312 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { |
| 3766 HValue* use = it.value(); | 4313 HValue* use = it.value(); |
| 3767 if (use->IsBinaryOperation()) { | 4314 if (use->IsBinaryOperation()) { |
| 3768 HBinaryOperation::cast(use)->set_observed_input_representation( | 4315 HBinaryOperation::cast(use)->set_observed_input_representation( |
| 3769 it.index(), Representation::Integer32()); | 4316 it.index(), Representation::Smi()); |
| 3770 } | 4317 } |
| 3771 } | 4318 } |
| 3772 } | 4319 } |
| 3773 | 4320 |
| 3774 | 4321 |
| 3775 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) { | 4322 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) { |
| 3776 ASSERT(CheckFlag(kFlexibleRepresentation)); | 4323 ASSERT(CheckFlag(kFlexibleRepresentation)); |
| 3777 Representation new_rep = RepresentationFromInputs(); | 4324 Representation new_rep = RepresentationFromInputs(); |
| 3778 UpdateRepresentation(new_rep, h_infer, "inputs"); | 4325 UpdateRepresentation(new_rep, h_infer, "inputs"); |
| 3779 new_rep = RepresentationFromUses(); | 4326 new_rep = RepresentationFromUses(); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 3808 if (rep.generalize(use_rep).IsInteger32()) { | 4355 if (rep.generalize(use_rep).IsInteger32()) { |
| 3809 rep = Representation::Integer32(); | 4356 rep = Representation::Integer32(); |
| 3810 continue; | 4357 continue; |
| 3811 } | 4358 } |
| 3812 return Representation::None(); | 4359 return Representation::None(); |
| 3813 } | 4360 } |
| 3814 return rep; | 4361 return rep; |
| 3815 } | 4362 } |
| 3816 | 4363 |
| 3817 | 4364 |
| 4365 bool HValue::HasNonSmiUse() { |
| 4366 for (HUseIterator it(uses()); !it.Done(); it.Advance()) { |
| 4367 // We check for observed_input_representation elsewhere. |
| 4368 Representation use_rep = |
| 4369 it.value()->RequiredInputRepresentation(it.index()); |
| 4370 if (!use_rep.IsNone() && !use_rep.IsSmi()) return true; |
| 4371 } |
| 4372 return false; |
| 4373 } |
| 4374 |
| 4375 |
| 3818 // Node-specific verification code is only included in debug mode. | 4376 // Node-specific verification code is only included in debug mode. |
| 3819 #ifdef DEBUG | 4377 #ifdef DEBUG |
| 3820 | 4378 |
| 3821 void HPhi::Verify() { | 4379 void HPhi::Verify() { |
| 3822 ASSERT(OperandCount() == block()->predecessors()->length()); | 4380 ASSERT(OperandCount() == block()->predecessors()->length()); |
| 3823 for (int i = 0; i < OperandCount(); ++i) { | 4381 for (int i = 0; i < OperandCount(); ++i) { |
| 3824 HValue* value = OperandAt(i); | 4382 HValue* value = OperandAt(i); |
| 3825 HBasicBlock* defining_block = value->block(); | 4383 HBasicBlock* defining_block = value->block(); |
| 3826 HBasicBlock* predecessor_block = block()->predecessors()->at(i); | 4384 HBasicBlock* predecessor_block = block()->predecessors()->at(i); |
| 3827 ASSERT(defining_block == predecessor_block || | 4385 ASSERT(defining_block == predecessor_block || |
| (...skipping 23 matching lines...) Expand all Loading... |
| 3851 | 4409 |
| 3852 | 4410 |
| 3853 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) { | 4411 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) { |
| 3854 ASSERT(offset >= 0); | 4412 ASSERT(offset >= 0); |
| 3855 ASSERT(offset < FixedArray::kHeaderSize); | 4413 ASSERT(offset < FixedArray::kHeaderSize); |
| 3856 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength(); | 4414 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength(); |
| 3857 return HObjectAccess(kInobject, offset); | 4415 return HObjectAccess(kInobject, offset); |
| 3858 } | 4416 } |
| 3859 | 4417 |
| 3860 | 4418 |
| 3861 HObjectAccess HObjectAccess::ForJSObjectOffset(int offset) { | 4419 HObjectAccess HObjectAccess::ForJSObjectOffset(int offset, |
| 4420 Representation representation) { |
| 3862 ASSERT(offset >= 0); | 4421 ASSERT(offset >= 0); |
| 3863 Portion portion = kInobject; | 4422 Portion portion = kInobject; |
| 3864 | 4423 |
| 3865 if (offset == JSObject::kElementsOffset) { | 4424 if (offset == JSObject::kElementsOffset) { |
| 3866 portion = kElementsPointer; | 4425 portion = kElementsPointer; |
| 3867 } else if (offset == JSObject::kMapOffset) { | 4426 } else if (offset == JSObject::kMapOffset) { |
| 3868 portion = kMaps; | 4427 portion = kMaps; |
| 3869 } | 4428 } |
| 3870 return HObjectAccess(portion, offset, Handle<String>::null()); | 4429 return HObjectAccess(portion, offset, representation); |
| 3871 } | 4430 } |
| 3872 | 4431 |
| 3873 | 4432 |
| 3874 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) { | 4433 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) { |
| 3875 ASSERT(offset >= 0); | 4434 ASSERT(offset >= 0); |
| 3876 Portion portion = kInobject; | 4435 Portion portion = kInobject; |
| 3877 | 4436 |
| 3878 if (offset == JSObject::kElementsOffset) { | 4437 if (offset == JSObject::kElementsOffset) { |
| 3879 portion = kElementsPointer; | 4438 portion = kElementsPointer; |
| 3880 } else if (offset == JSArray::kLengthOffset) { | 4439 } else if (offset == JSArray::kLengthOffset) { |
| 3881 portion = kArrayLengths; | 4440 portion = kArrayLengths; |
| 3882 } else if (offset == JSObject::kMapOffset) { | 4441 } else if (offset == JSObject::kMapOffset) { |
| 3883 portion = kMaps; | 4442 portion = kMaps; |
| 3884 } | 4443 } |
| 3885 return HObjectAccess(portion, offset, Handle<String>::null()); | 4444 return HObjectAccess(portion, offset); |
| 3886 } | 4445 } |
| 3887 | 4446 |
| 3888 | 4447 |
| 3889 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset) { | 4448 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset, |
| 4449 Representation representation) { |
| 3890 ASSERT(offset >= 0); | 4450 ASSERT(offset >= 0); |
| 3891 return HObjectAccess(kBackingStore, offset, Handle<String>::null()); | 4451 return HObjectAccess(kBackingStore, offset, representation); |
| 3892 } | 4452 } |
| 3893 | 4453 |
| 3894 | 4454 |
| 3895 HObjectAccess HObjectAccess::ForField(Handle<Map> map, | 4455 HObjectAccess HObjectAccess::ForField(Handle<Map> map, |
| 3896 LookupResult *lookup, Handle<String> name) { | 4456 LookupResult *lookup, Handle<String> name) { |
| 3897 ASSERT(lookup->IsField() || lookup->IsTransitionToField(*map)); | 4457 ASSERT(lookup->IsField() || lookup->IsTransitionToField(*map)); |
| 3898 int index; | 4458 int index; |
| 4459 Representation representation; |
| 3899 if (lookup->IsField()) { | 4460 if (lookup->IsField()) { |
| 3900 index = lookup->GetLocalFieldIndexFromMap(*map); | 4461 index = lookup->GetLocalFieldIndexFromMap(*map); |
| 4462 representation = lookup->representation(); |
| 3901 } else { | 4463 } else { |
| 3902 Map* transition = lookup->GetTransitionMapFromMap(*map); | 4464 Map* transition = lookup->GetTransitionMapFromMap(*map); |
| 3903 int descriptor = transition->LastAdded(); | 4465 int descriptor = transition->LastAdded(); |
| 3904 index = transition->instance_descriptors()->GetFieldIndex(descriptor) - | 4466 index = transition->instance_descriptors()->GetFieldIndex(descriptor) - |
| 3905 map->inobject_properties(); | 4467 map->inobject_properties(); |
| 4468 PropertyDetails details = |
| 4469 transition->instance_descriptors()->GetDetails(descriptor); |
| 4470 representation = details.representation(); |
| 3906 } | 4471 } |
| 3907 if (index < 0) { | 4472 if (index < 0) { |
| 3908 // Negative property indices are in-object properties, indexed | 4473 // Negative property indices are in-object properties, indexed |
| 3909 // from the end of the fixed part of the object. | 4474 // from the end of the fixed part of the object. |
| 3910 int offset = (index * kPointerSize) + map->instance_size(); | 4475 int offset = (index * kPointerSize) + map->instance_size(); |
| 3911 return HObjectAccess(kInobject, offset); | 4476 return HObjectAccess(kInobject, offset, representation); |
| 3912 } else { | 4477 } else { |
| 3913 // Non-negative property indices are in the properties array. | 4478 // Non-negative property indices are in the properties array. |
| 3914 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; | 4479 int offset = (index * kPointerSize) + FixedArray::kHeaderSize; |
| 3915 return HObjectAccess(kBackingStore, offset, name); | 4480 return HObjectAccess(kBackingStore, offset, representation, name); |
| 3916 } | 4481 } |
| 3917 } | 4482 } |
| 3918 | 4483 |
| 3919 | 4484 |
| 3920 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) { | 4485 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) { |
| 3921 return HObjectAccess( | 4486 return HObjectAccess( |
| 3922 kInobject, Cell::kValueOffset, | 4487 kInobject, Cell::kValueOffset, Representation::Tagged(), |
| 3923 Handle<String>(isolate->heap()->cell_value_string())); | 4488 Handle<String>(isolate->heap()->cell_value_string())); |
| 3924 } | 4489 } |
| 3925 | 4490 |
| 3926 | 4491 |
| 3927 void HObjectAccess::SetGVNFlags(HValue *instr, bool is_store) { | 4492 void HObjectAccess::SetGVNFlags(HValue *instr, bool is_store) { |
| 3928 // set the appropriate GVN flags for a given load or store instruction | 4493 // set the appropriate GVN flags for a given load or store instruction |
| 3929 if (is_store) { | 4494 if (is_store) { |
| 3930 // track dominating allocations in order to eliminate write barriers | 4495 // track dominating allocations in order to eliminate write barriers |
| 3931 instr->SetGVNFlag(kDependsOnNewSpacePromotion); | 4496 instr->SetGVNFlag(kDependsOnNewSpacePromotion); |
| 3932 instr->SetFlag(HValue::kTrackSideEffectDominators); | 4497 instr->SetFlag(HValue::kTrackSideEffectDominators); |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3986 case kBackingStore: | 4551 case kBackingStore: |
| 3987 if (!name_.is_null()) stream->Add(*String::cast(*name_)->ToCString()); | 4552 if (!name_.is_null()) stream->Add(*String::cast(*name_)->ToCString()); |
| 3988 stream->Add("[backing-store]"); | 4553 stream->Add("[backing-store]"); |
| 3989 break; | 4554 break; |
| 3990 } | 4555 } |
| 3991 | 4556 |
| 3992 stream->Add("@%d", offset()); | 4557 stream->Add("@%d", offset()); |
| 3993 } | 4558 } |
| 3994 | 4559 |
| 3995 } } // namespace v8::internal | 4560 } } // namespace v8::internal |
| OLD | NEW |