OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 432 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
443 ASSERT(in_spilled_code()); | 443 ASSERT(in_spilled_code()); |
444 set_in_spilled_code(false); | 444 set_in_spilled_code(false); |
445 Visit(statement); | 445 Visit(statement); |
446 if (frame_ != NULL) { | 446 if (frame_ != NULL) { |
447 frame_->SpillAll(); | 447 frame_->SpillAll(); |
448 } | 448 } |
449 set_in_spilled_code(true); | 449 set_in_spilled_code(true); |
450 } | 450 } |
451 | 451 |
452 | 452 |
| 453 void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) { |
| 454 ASSERT(in_spilled_code()); |
| 455 set_in_spilled_code(false); |
| 456 VisitStatements(statements); |
| 457 if (frame_ != NULL) { |
| 458 frame_->SpillAll(); |
| 459 } |
| 460 set_in_spilled_code(true); |
| 461 } |
| 462 |
| 463 |
453 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { | 464 void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) { |
454 ASSERT(!in_spilled_code()); | 465 ASSERT(!in_spilled_code()); |
455 for (int i = 0; has_valid_frame() && i < statements->length(); i++) { | 466 for (int i = 0; has_valid_frame() && i < statements->length(); i++) { |
456 Visit(statements->at(i)); | 467 Visit(statements->at(i)); |
457 } | 468 } |
458 } | 469 } |
459 | 470 |
460 | 471 |
461 void CodeGenerator::VisitBlock(Block* node) { | 472 void CodeGenerator::VisitBlock(Block* node) { |
462 ASSERT(!in_spilled_code()); | 473 ASSERT(!in_spilled_code()); |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
648 frame_->Drop(); | 659 frame_->Drop(); |
649 } | 660 } |
650 } | 661 } |
651 | 662 |
652 if (exit.is_linked()) { | 663 if (exit.is_linked()) { |
653 exit.Bind(); | 664 exit.Bind(); |
654 } | 665 } |
655 } | 666 } |
656 | 667 |
657 | 668 |
658 void CodeGenerator::VisitContinueStatement(ContinueStatement* a) { | 669 void CodeGenerator::VisitContinueStatement(ContinueStatement* node) { |
659 UNIMPLEMENTED(); | 670 ASSERT(!in_spilled_code()); |
660 } | 671 Comment cmnt(masm_, "[ ContinueStatement"); |
661 | 672 CodeForStatementPosition(node); |
662 void CodeGenerator::VisitBreakStatement(BreakStatement* a) { | 673 node->target()->continue_target()->Jump(); |
663 UNIMPLEMENTED(); | |
664 } | 674 } |
665 | 675 |
666 | 676 |
| 677 void CodeGenerator::VisitBreakStatement(BreakStatement* node) { |
| 678 ASSERT(!in_spilled_code()); |
| 679 Comment cmnt(masm_, "[ BreakStatement"); |
| 680 CodeForStatementPosition(node); |
| 681 node->target()->break_target()->Jump(); |
| 682 } |
| 683 |
| 684 |
667 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { | 685 void CodeGenerator::VisitReturnStatement(ReturnStatement* node) { |
668 ASSERT(!in_spilled_code()); | 686 ASSERT(!in_spilled_code()); |
669 Comment cmnt(masm_, "[ ReturnStatement"); | 687 Comment cmnt(masm_, "[ ReturnStatement"); |
670 | 688 |
671 CodeForStatementPosition(node); | 689 CodeForStatementPosition(node); |
672 Load(node->expression()); | 690 Load(node->expression()); |
673 Result return_value = frame_->Pop(); | 691 Result return_value = frame_->Pop(); |
674 if (function_return_is_shadowed_) { | 692 if (function_return_is_shadowed_) { |
675 function_return_.Jump(&return_value); | 693 function_return_.Jump(&return_value); |
676 } else { | 694 } else { |
(...skipping 685 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1362 node->break_target()->Bind(); | 1380 node->break_target()->Bind(); |
1363 frame_->Drop(5); | 1381 frame_->Drop(5); |
1364 | 1382 |
1365 // Exit. | 1383 // Exit. |
1366 exit.Bind(); | 1384 exit.Bind(); |
1367 | 1385 |
1368 node->continue_target()->Unuse(); | 1386 node->continue_target()->Unuse(); |
1369 node->break_target()->Unuse(); | 1387 node->break_target()->Unuse(); |
1370 } | 1388 } |
1371 | 1389 |
1372 void CodeGenerator::VisitTryCatch(TryCatch* a) { | 1390 void CodeGenerator::VisitTryCatch(TryCatch* node) { |
1373 UNIMPLEMENTED(); | 1391 ASSERT(!in_spilled_code()); |
| 1392 VirtualFrame::SpilledScope spilled_scope; |
| 1393 Comment cmnt(masm_, "[ TryCatch"); |
| 1394 CodeForStatementPosition(node); |
| 1395 |
| 1396 JumpTarget try_block; |
| 1397 JumpTarget exit; |
| 1398 |
| 1399 try_block.Call(); |
| 1400 // --- Catch block --- |
| 1401 frame_->EmitPush(rax); |
| 1402 |
| 1403 // Store the caught exception in the catch variable. |
| 1404 { Reference ref(this, node->catch_var()); |
| 1405 ASSERT(ref.is_slot()); |
| 1406 // Load the exception to the top of the stack. Here we make use of the |
| 1407 // convenient property that it doesn't matter whether a value is |
| 1408 // immediately on top of or underneath a zero-sized reference. |
| 1409 ref.SetValue(NOT_CONST_INIT); |
| 1410 } |
| 1411 |
| 1412 // Remove the exception from the stack. |
| 1413 frame_->Drop(); |
| 1414 |
| 1415 VisitStatementsAndSpill(node->catch_block()->statements()); |
| 1416 if (has_valid_frame()) { |
| 1417 exit.Jump(); |
| 1418 } |
| 1419 |
| 1420 |
| 1421 // --- Try block --- |
| 1422 try_block.Bind(); |
| 1423 |
| 1424 frame_->PushTryHandler(TRY_CATCH_HANDLER); |
| 1425 int handler_height = frame_->height(); |
| 1426 |
| 1427 // Shadow the jump targets for all escapes from the try block, including |
| 1428 // returns. During shadowing, the original target is hidden as the |
| 1429 // ShadowTarget and operations on the original actually affect the |
| 1430 // shadowing target. |
| 1431 // |
| 1432 // We should probably try to unify the escaping targets and the return |
| 1433 // target. |
| 1434 int nof_escapes = node->escaping_targets()->length(); |
| 1435 List<ShadowTarget*> shadows(1 + nof_escapes); |
| 1436 |
| 1437 // Add the shadow target for the function return. |
| 1438 static const int kReturnShadowIndex = 0; |
| 1439 shadows.Add(new ShadowTarget(&function_return_)); |
| 1440 bool function_return_was_shadowed = function_return_is_shadowed_; |
| 1441 function_return_is_shadowed_ = true; |
| 1442 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_); |
| 1443 |
| 1444 // Add the remaining shadow targets. |
| 1445 for (int i = 0; i < nof_escapes; i++) { |
| 1446 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i))); |
| 1447 } |
| 1448 |
| 1449 // Generate code for the statements in the try block. |
| 1450 VisitStatementsAndSpill(node->try_block()->statements()); |
| 1451 |
| 1452 // Stop the introduced shadowing and count the number of required unlinks. |
| 1453 // After shadowing stops, the original targets are unshadowed and the |
| 1454 // ShadowTargets represent the formerly shadowing targets. |
| 1455 bool has_unlinks = false; |
| 1456 for (int i = 0; i < shadows.length(); i++) { |
| 1457 shadows[i]->StopShadowing(); |
| 1458 has_unlinks = has_unlinks || shadows[i]->is_linked(); |
| 1459 } |
| 1460 function_return_is_shadowed_ = function_return_was_shadowed; |
| 1461 |
| 1462 // Get an external reference to the handler address. |
| 1463 ExternalReference handler_address(Top::k_handler_address); |
| 1464 |
| 1465 // Make sure that there's nothing left on the stack above the |
| 1466 // handler structure. |
| 1467 if (FLAG_debug_code) { |
| 1468 __ movq(kScratchRegister, handler_address); |
| 1469 __ cmpq(rsp, Operand(kScratchRegister, 0)); |
| 1470 __ Assert(equal, "stack pointer should point to top handler"); |
| 1471 } |
| 1472 |
| 1473 // If we can fall off the end of the try block, unlink from try chain. |
| 1474 if (has_valid_frame()) { |
| 1475 // The next handler address is on top of the frame. Unlink from |
| 1476 // the handler list and drop the rest of this handler from the |
| 1477 // frame. |
| 1478 ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 1479 __ movq(kScratchRegister, handler_address); |
| 1480 frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 1481 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 1482 if (has_unlinks) { |
| 1483 exit.Jump(); |
| 1484 } |
| 1485 } |
| 1486 |
| 1487 // Generate unlink code for the (formerly) shadowing targets that |
| 1488 // have been jumped to. Deallocate each shadow target. |
| 1489 Result return_value; |
| 1490 for (int i = 0; i < shadows.length(); i++) { |
| 1491 if (shadows[i]->is_linked()) { |
| 1492 // Unlink from try chain; be careful not to destroy the TOS if |
| 1493 // there is one. |
| 1494 if (i == kReturnShadowIndex) { |
| 1495 shadows[i]->Bind(&return_value); |
| 1496 return_value.ToRegister(rax); |
| 1497 } else { |
| 1498 shadows[i]->Bind(); |
| 1499 } |
| 1500 // Because we can be jumping here (to spilled code) from |
| 1501 // unspilled code, we need to reestablish a spilled frame at |
| 1502 // this block. |
| 1503 frame_->SpillAll(); |
| 1504 |
| 1505 // Reload sp from the top handler, because some statements that we |
| 1506 // break from (eg, for...in) may have left stuff on the stack. |
| 1507 __ movq(kScratchRegister, handler_address); |
| 1508 __ movq(rsp, Operand(kScratchRegister, 0)); |
| 1509 frame_->Forget(frame_->height() - handler_height); |
| 1510 |
| 1511 ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 1512 __ movq(kScratchRegister, handler_address); |
| 1513 frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 1514 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 1515 |
| 1516 if (i == kReturnShadowIndex) { |
| 1517 if (!function_return_is_shadowed_) frame_->PrepareForReturn(); |
| 1518 shadows[i]->other_target()->Jump(&return_value); |
| 1519 } else { |
| 1520 shadows[i]->other_target()->Jump(); |
| 1521 } |
| 1522 } |
| 1523 } |
| 1524 |
| 1525 exit.Bind(); |
1374 } | 1526 } |
1375 | 1527 |
1376 void CodeGenerator::VisitTryFinally(TryFinally* a) { | 1528 |
1377 UNIMPLEMENTED(); | 1529 void CodeGenerator::VisitTryFinally(TryFinally* node) { |
| 1530 ASSERT(!in_spilled_code()); |
| 1531 VirtualFrame::SpilledScope spilled_scope; |
| 1532 Comment cmnt(masm_, "[ TryFinally"); |
| 1533 CodeForStatementPosition(node); |
| 1534 |
| 1535 // State: Used to keep track of reason for entering the finally |
| 1536 // block. Should probably be extended to hold information for |
| 1537 // break/continue from within the try block. |
| 1538 enum { FALLING, THROWING, JUMPING }; |
| 1539 |
| 1540 JumpTarget try_block; |
| 1541 JumpTarget finally_block; |
| 1542 |
| 1543 try_block.Call(); |
| 1544 |
| 1545 frame_->EmitPush(rax); |
| 1546 // In case of thrown exceptions, this is where we continue. |
| 1547 __ movq(rcx, Immediate(Smi::FromInt(THROWING))); |
| 1548 finally_block.Jump(); |
| 1549 |
| 1550 // --- Try block --- |
| 1551 try_block.Bind(); |
| 1552 |
| 1553 frame_->PushTryHandler(TRY_FINALLY_HANDLER); |
| 1554 int handler_height = frame_->height(); |
| 1555 |
| 1556 // Shadow the jump targets for all escapes from the try block, including |
| 1557 // returns. During shadowing, the original target is hidden as the |
| 1558 // ShadowTarget and operations on the original actually affect the |
| 1559 // shadowing target. |
| 1560 // |
| 1561 // We should probably try to unify the escaping targets and the return |
| 1562 // target. |
| 1563 int nof_escapes = node->escaping_targets()->length(); |
| 1564 List<ShadowTarget*> shadows(1 + nof_escapes); |
| 1565 |
| 1566 // Add the shadow target for the function return. |
| 1567 static const int kReturnShadowIndex = 0; |
| 1568 shadows.Add(new ShadowTarget(&function_return_)); |
| 1569 bool function_return_was_shadowed = function_return_is_shadowed_; |
| 1570 function_return_is_shadowed_ = true; |
| 1571 ASSERT(shadows[kReturnShadowIndex]->other_target() == &function_return_); |
| 1572 |
| 1573 // Add the remaining shadow targets. |
| 1574 for (int i = 0; i < nof_escapes; i++) { |
| 1575 shadows.Add(new ShadowTarget(node->escaping_targets()->at(i))); |
| 1576 } |
| 1577 |
| 1578 // Generate code for the statements in the try block. |
| 1579 VisitStatementsAndSpill(node->try_block()->statements()); |
| 1580 |
| 1581 // Stop the introduced shadowing and count the number of required unlinks. |
| 1582 // After shadowing stops, the original targets are unshadowed and the |
| 1583 // ShadowTargets represent the formerly shadowing targets. |
| 1584 int nof_unlinks = 0; |
| 1585 for (int i = 0; i < shadows.length(); i++) { |
| 1586 shadows[i]->StopShadowing(); |
| 1587 if (shadows[i]->is_linked()) nof_unlinks++; |
| 1588 } |
| 1589 function_return_is_shadowed_ = function_return_was_shadowed; |
| 1590 |
| 1591 // Get an external reference to the handler address. |
| 1592 ExternalReference handler_address(Top::k_handler_address); |
| 1593 |
| 1594 // If we can fall off the end of the try block, unlink from the try |
| 1595 // chain and set the state on the frame to FALLING. |
| 1596 if (has_valid_frame()) { |
| 1597 // The next handler address is on top of the frame. |
| 1598 ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 1599 __ movq(kScratchRegister, handler_address); |
| 1600 frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 1601 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 1602 |
| 1603 // Fake a top of stack value (unneeded when FALLING) and set the |
| 1604 // state in ecx, then jump around the unlink blocks if any. |
| 1605 __ movq(kScratchRegister, |
| 1606 Factory::undefined_value(), |
| 1607 RelocInfo::EMBEDDED_OBJECT); |
| 1608 frame_->EmitPush(kScratchRegister); |
| 1609 __ movq(rcx, Immediate(Smi::FromInt(FALLING))); |
| 1610 if (nof_unlinks > 0) { |
| 1611 finally_block.Jump(); |
| 1612 } |
| 1613 } |
| 1614 |
| 1615 // Generate code to unlink and set the state for the (formerly) |
| 1616 // shadowing targets that have been jumped to. |
| 1617 for (int i = 0; i < shadows.length(); i++) { |
| 1618 if (shadows[i]->is_linked()) { |
| 1619 // If we have come from the shadowed return, the return value is |
| 1620 // on the virtual frame. We must preserve it until it is |
| 1621 // pushed. |
| 1622 if (i == kReturnShadowIndex) { |
| 1623 Result return_value; |
| 1624 shadows[i]->Bind(&return_value); |
| 1625 return_value.ToRegister(rax); |
| 1626 } else { |
| 1627 shadows[i]->Bind(); |
| 1628 } |
| 1629 // Because we can be jumping here (to spilled code) from |
| 1630 // unspilled code, we need to reestablish a spilled frame at |
| 1631 // this block. |
| 1632 frame_->SpillAll(); |
| 1633 |
| 1634 // Reload sp from the top handler, because some statements that |
| 1635 // we break from (eg, for...in) may have left stuff on the |
| 1636 // stack. |
| 1637 __ movq(kScratchRegister, handler_address); |
| 1638 __ movq(rsp, Operand(kScratchRegister, 0)); |
| 1639 frame_->Forget(frame_->height() - handler_height); |
| 1640 |
| 1641 // Unlink this handler and drop it from the frame. |
| 1642 ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 1643 __ movq(kScratchRegister, handler_address); |
| 1644 frame_->EmitPop(Operand(kScratchRegister, 0)); |
| 1645 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
| 1646 |
| 1647 if (i == kReturnShadowIndex) { |
| 1648 // If this target shadowed the function return, materialize |
| 1649 // the return value on the stack. |
| 1650 frame_->EmitPush(rax); |
| 1651 } else { |
| 1652 // Fake TOS for targets that shadowed breaks and continues. |
| 1653 __ movq(kScratchRegister, |
| 1654 Factory::undefined_value(), |
| 1655 RelocInfo::EMBEDDED_OBJECT); |
| 1656 frame_->EmitPush(kScratchRegister); |
| 1657 } |
| 1658 __ movq(rcx, Immediate(Smi::FromInt(JUMPING + i))); |
| 1659 if (--nof_unlinks > 0) { |
| 1660 // If this is not the last unlink block, jump around the next. |
| 1661 finally_block.Jump(); |
| 1662 } |
| 1663 } |
| 1664 } |
| 1665 |
| 1666 // --- Finally block --- |
| 1667 finally_block.Bind(); |
| 1668 |
| 1669 // Push the state on the stack. |
| 1670 frame_->EmitPush(rcx); |
| 1671 |
| 1672 // We keep two elements on the stack - the (possibly faked) result |
| 1673 // and the state - while evaluating the finally block. |
| 1674 // |
| 1675 // Generate code for the statements in the finally block. |
| 1676 VisitStatementsAndSpill(node->finally_block()->statements()); |
| 1677 |
| 1678 if (has_valid_frame()) { |
| 1679 // Restore state and return value or faked TOS. |
| 1680 frame_->EmitPop(rcx); |
| 1681 frame_->EmitPop(rax); |
| 1682 } |
| 1683 |
| 1684 // Generate code to jump to the right destination for all used |
| 1685 // formerly shadowing targets. Deallocate each shadow target. |
| 1686 for (int i = 0; i < shadows.length(); i++) { |
| 1687 if (has_valid_frame() && shadows[i]->is_bound()) { |
| 1688 BreakTarget* original = shadows[i]->other_target(); |
| 1689 __ cmpq(rcx, Immediate(Smi::FromInt(JUMPING + i))); |
| 1690 if (i == kReturnShadowIndex) { |
| 1691 // The return value is (already) in rax. |
| 1692 Result return_value = allocator_->Allocate(rax); |
| 1693 ASSERT(return_value.is_valid()); |
| 1694 if (function_return_is_shadowed_) { |
| 1695 original->Branch(equal, &return_value); |
| 1696 } else { |
| 1697 // Branch around the preparation for return which may emit |
| 1698 // code. |
| 1699 JumpTarget skip; |
| 1700 skip.Branch(not_equal); |
| 1701 frame_->PrepareForReturn(); |
| 1702 original->Jump(&return_value); |
| 1703 skip.Bind(); |
| 1704 } |
| 1705 } else { |
| 1706 original->Branch(equal); |
| 1707 } |
| 1708 } |
| 1709 } |
| 1710 |
| 1711 if (has_valid_frame()) { |
| 1712 // Check if we need to rethrow the exception. |
| 1713 JumpTarget exit; |
| 1714 __ cmpq(rcx, Immediate(Smi::FromInt(THROWING))); |
| 1715 exit.Branch(not_equal); |
| 1716 |
| 1717 // Rethrow exception. |
| 1718 frame_->EmitPush(rax); // undo pop from above |
| 1719 frame_->CallRuntime(Runtime::kReThrow, 1); |
| 1720 |
| 1721 // Done. |
| 1722 exit.Bind(); |
| 1723 } |
1378 } | 1724 } |
1379 | 1725 |
1380 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* a) { | 1726 |
1381 UNIMPLEMENTED(); | 1727 void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) { |
| 1728 ASSERT(!in_spilled_code()); |
| 1729 Comment cmnt(masm_, "[ DebuggerStatement"); |
| 1730 CodeForStatementPosition(node); |
| 1731 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 1732 // Spill everything, even constants, to the frame. |
| 1733 frame_->SpillAll(); |
| 1734 frame_->CallRuntime(Runtime::kDebugBreak, 0); |
| 1735 // Ignore the return value. |
| 1736 #endif |
1382 } | 1737 } |
1383 | 1738 |
1384 | 1739 |
1385 void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) { | 1740 void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) { |
1386 // Call the runtime to instantiate the function boilerplate object. | 1741 // Call the runtime to instantiate the function boilerplate object. |
1387 // The inevitable call will sync frame elements to memory anyway, so | 1742 // The inevitable call will sync frame elements to memory anyway, so |
1388 // we do it eagerly to allow us to push the arguments directly into | 1743 // we do it eagerly to allow us to push the arguments directly into |
1389 // place. | 1744 // place. |
1390 ASSERT(boilerplate->IsBoilerplate()); | 1745 ASSERT(boilerplate->IsBoilerplate()); |
1391 frame_->SyncRange(0, frame_->element_count() - 1); | 1746 frame_->SyncRange(0, frame_->element_count() - 1); |
(...skipping 403 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1795 | 2150 |
1796 // Update the write barrier for the array address. | 2151 // Update the write barrier for the array address. |
1797 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier. | 2152 frame_->Spill(prop_value.reg()); // Overwritten by the write barrier. |
1798 Result scratch = allocator_->Allocate(); | 2153 Result scratch = allocator_->Allocate(); |
1799 ASSERT(scratch.is_valid()); | 2154 ASSERT(scratch.is_valid()); |
1800 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg()); | 2155 __ RecordWrite(elements.reg(), offset, prop_value.reg(), scratch.reg()); |
1801 } | 2156 } |
1802 } | 2157 } |
1803 | 2158 |
1804 | 2159 |
1805 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* a) { | 2160 void CodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* node) { |
1806 UNIMPLEMENTED(); | 2161 ASSERT(!in_spilled_code()); |
| 2162 // Call runtime routine to allocate the catch extension object and |
| 2163 // assign the exception value to the catch variable. |
| 2164 Comment cmnt(masm_, "[ CatchExtensionObject"); |
| 2165 Load(node->key()); |
| 2166 Load(node->value()); |
| 2167 Result result = |
| 2168 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2); |
| 2169 frame_->Push(&result); |
1807 } | 2170 } |
1808 | 2171 |
1809 | 2172 |
1810 void CodeGenerator::VisitAssignment(Assignment* node) { | 2173 void CodeGenerator::VisitAssignment(Assignment* node) { |
1811 Comment cmnt(masm_, "[ Assignment"); | 2174 Comment cmnt(masm_, "[ Assignment"); |
1812 CodeForStatementPosition(node); | 2175 CodeForStatementPosition(node); |
1813 | 2176 |
1814 { Reference target(this, node->target()); | 2177 { Reference target(this, node->target()); |
1815 if (target.is_illegal()) { | 2178 if (target.is_illegal()) { |
1816 // Fool the virtual frame into thinking that we left the assignment's | 2179 // Fool the virtual frame into thinking that we left the assignment's |
(...skipping 970 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2787 | 3150 |
2788 | 3151 |
2789 void CodeGenerator::VisitThisFunction(ThisFunction* node) { | 3152 void CodeGenerator::VisitThisFunction(ThisFunction* node) { |
2790 frame_->PushFunction(); | 3153 frame_->PushFunction(); |
2791 } | 3154 } |
2792 | 3155 |
2793 | 3156 |
2794 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { | 3157 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { |
2795 ASSERT(args->length() == 1); | 3158 ASSERT(args->length() == 1); |
2796 | 3159 |
2797 // ArgumentsAccessStub expects the key in edx and the formal | 3160 // ArgumentsAccessStub expects the key in rdx and the formal |
2798 // parameter count in eax. | 3161 // parameter count in rax. |
2799 Load(args->at(0)); | 3162 Load(args->at(0)); |
2800 Result key = frame_->Pop(); | 3163 Result key = frame_->Pop(); |
2801 // Explicitly create a constant result. | 3164 // Explicitly create a constant result. |
2802 Result count(Handle<Smi>(Smi::FromInt(scope_->num_parameters()))); | 3165 Result count(Handle<Smi>(Smi::FromInt(scope_->num_parameters()))); |
2803 // Call the shared stub to get to arguments[key]. | 3166 // Call the shared stub to get to arguments[key]. |
2804 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); | 3167 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); |
2805 Result result = frame_->CallStub(&stub, &key, &count); | 3168 Result result = frame_->CallStub(&stub, &key, &count); |
2806 frame_->Push(&result); | 3169 frame_->Push(&result); |
2807 } | 3170 } |
2808 | 3171 |
(...skipping 3348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6157 break; | 6520 break; |
6158 default: | 6521 default: |
6159 UNREACHABLE(); | 6522 UNREACHABLE(); |
6160 } | 6523 } |
6161 } | 6524 } |
6162 | 6525 |
6163 | 6526 |
6164 #undef __ | 6527 #undef __ |
6165 | 6528 |
6166 } } // namespace v8::internal | 6529 } } // namespace v8::internal |
OLD | NEW |