OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
298 } | 298 } |
299 } | 299 } |
300 | 300 |
301 // Generate the return sequence if necessary. | 301 // Generate the return sequence if necessary. |
302 if (frame_ != NULL || function_return_.is_linked()) { | 302 if (frame_ != NULL || function_return_.is_linked()) { |
303 // exit | 303 // exit |
304 // r0: result | 304 // r0: result |
305 // sp: stack pointer | 305 // sp: stack pointer |
306 // fp: frame pointer | 306 // fp: frame pointer |
307 // cp: callee's context | 307 // cp: callee's context |
308 __ mov(r0, Operand(Factory::undefined_value())); | 308 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
309 | 309 |
310 function_return_.Bind(); | 310 function_return_.Bind(); |
311 if (FLAG_trace) { | 311 if (FLAG_trace) { |
312 // Push the return value on the stack as the parameter. | 312 // Push the return value on the stack as the parameter. |
313 // Runtime::TraceExit returns the parameter as it is. | 313 // Runtime::TraceExit returns the parameter as it is. |
314 frame_->EmitPush(r0); | 314 frame_->EmitPush(r0); |
315 frame_->CallRuntime(Runtime::kTraceExit, 1); | 315 frame_->CallRuntime(Runtime::kTraceExit, 1); |
316 } | 316 } |
317 | 317 |
318 // Tear down the frame which will restore the caller's frame pointer and | 318 // Tear down the frame which will restore the caller's frame pointer and |
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
471 #endif | 471 #endif |
472 JumpTarget true_target; | 472 JumpTarget true_target; |
473 JumpTarget false_target; | 473 JumpTarget false_target; |
474 LoadCondition(x, typeof_state, &true_target, &false_target, false); | 474 LoadCondition(x, typeof_state, &true_target, &false_target, false); |
475 | 475 |
476 if (has_cc()) { | 476 if (has_cc()) { |
477 // Convert cc_reg_ into a boolean value. | 477 // Convert cc_reg_ into a boolean value. |
478 JumpTarget loaded; | 478 JumpTarget loaded; |
479 JumpTarget materialize_true; | 479 JumpTarget materialize_true; |
480 materialize_true.Branch(cc_reg_); | 480 materialize_true.Branch(cc_reg_); |
481 __ mov(r0, Operand(Factory::false_value())); | 481 __ LoadRoot(r0, Heap::kFalseValueRootIndex); |
482 frame_->EmitPush(r0); | 482 frame_->EmitPush(r0); |
483 loaded.Jump(); | 483 loaded.Jump(); |
484 materialize_true.Bind(); | 484 materialize_true.Bind(); |
485 __ mov(r0, Operand(Factory::true_value())); | 485 __ LoadRoot(r0, Heap::kTrueValueRootIndex); |
486 frame_->EmitPush(r0); | 486 frame_->EmitPush(r0); |
487 loaded.Bind(); | 487 loaded.Bind(); |
488 cc_reg_ = al; | 488 cc_reg_ = al; |
489 } | 489 } |
490 | 490 |
491 if (true_target.is_linked() || false_target.is_linked()) { | 491 if (true_target.is_linked() || false_target.is_linked()) { |
492 // We have at least one condition value that has been "translated" | 492 // We have at least one condition value that has been "translated" |
493 // into a branch, thus it needs to be loaded explicitly. | 493 // into a branch, thus it needs to be loaded explicitly. |
494 JumpTarget loaded; | 494 JumpTarget loaded; |
495 if (frame_ != NULL) { | 495 if (frame_ != NULL) { |
496 loaded.Jump(); // Don't lose the current TOS. | 496 loaded.Jump(); // Don't lose the current TOS. |
497 } | 497 } |
498 bool both = true_target.is_linked() && false_target.is_linked(); | 498 bool both = true_target.is_linked() && false_target.is_linked(); |
499 // Load "true" if necessary. | 499 // Load "true" if necessary. |
500 if (true_target.is_linked()) { | 500 if (true_target.is_linked()) { |
501 true_target.Bind(); | 501 true_target.Bind(); |
502 __ mov(r0, Operand(Factory::true_value())); | 502 __ LoadRoot(r0, Heap::kTrueValueRootIndex); |
503 frame_->EmitPush(r0); | 503 frame_->EmitPush(r0); |
504 } | 504 } |
505 // If both "true" and "false" need to be loaded jump across the code for | 505 // If both "true" and "false" need to be loaded jump across the code for |
506 // "false". | 506 // "false". |
507 if (both) { | 507 if (both) { |
508 loaded.Jump(); | 508 loaded.Jump(); |
509 } | 509 } |
510 // Load "false" if necessary. | 510 // Load "false" if necessary. |
511 if (false_target.is_linked()) { | 511 if (false_target.is_linked()) { |
512 false_target.Bind(); | 512 false_target.Bind(); |
513 __ mov(r0, Operand(Factory::false_value())); | 513 __ LoadRoot(r0, Heap::kFalseValueRootIndex); |
514 frame_->EmitPush(r0); | 514 frame_->EmitPush(r0); |
515 } | 515 } |
516 // A value is loaded on all paths reaching this point. | 516 // A value is loaded on all paths reaching this point. |
517 loaded.Bind(); | 517 loaded.Bind(); |
518 } | 518 } |
519 ASSERT(has_valid_frame()); | 519 ASSERT(has_valid_frame()); |
520 ASSERT(!has_cc()); | 520 ASSERT(!has_cc()); |
521 ASSERT(frame_->height() == original_height + 1); | 521 ASSERT(frame_->height() == original_height + 1); |
522 } | 522 } |
523 | 523 |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
633 void CodeGenerator::ToBoolean(JumpTarget* true_target, | 633 void CodeGenerator::ToBoolean(JumpTarget* true_target, |
634 JumpTarget* false_target) { | 634 JumpTarget* false_target) { |
635 VirtualFrame::SpilledScope spilled_scope; | 635 VirtualFrame::SpilledScope spilled_scope; |
636 // Note: The generated code snippet does not change stack variables. | 636 // Note: The generated code snippet does not change stack variables. |
637 // Only the condition code should be set. | 637 // Only the condition code should be set. |
638 frame_->EmitPop(r0); | 638 frame_->EmitPop(r0); |
639 | 639 |
640 // Fast case checks | 640 // Fast case checks |
641 | 641 |
642 // Check if the value is 'false'. | 642 // Check if the value is 'false'. |
643 __ cmp(r0, Operand(Factory::false_value())); | 643 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 644 __ cmp(r0, ip); |
644 false_target->Branch(eq); | 645 false_target->Branch(eq); |
645 | 646 |
646 // Check if the value is 'true'. | 647 // Check if the value is 'true'. |
647 __ cmp(r0, Operand(Factory::true_value())); | 648 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 649 __ cmp(r0, ip); |
648 true_target->Branch(eq); | 650 true_target->Branch(eq); |
649 | 651 |
650 // Check if the value is 'undefined'. | 652 // Check if the value is 'undefined'. |
651 __ cmp(r0, Operand(Factory::undefined_value())); | 653 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 654 __ cmp(r0, ip); |
652 false_target->Branch(eq); | 655 false_target->Branch(eq); |
653 | 656 |
654 // Check if the value is a smi. | 657 // Check if the value is a smi. |
655 __ cmp(r0, Operand(Smi::FromInt(0))); | 658 __ cmp(r0, Operand(Smi::FromInt(0))); |
656 false_target->Branch(eq); | 659 false_target->Branch(eq); |
657 __ tst(r0, Operand(kSmiTagMask)); | 660 __ tst(r0, Operand(kSmiTagMask)); |
658 true_target->Branch(eq); | 661 true_target->Branch(eq); |
659 | 662 |
660 // Slow case: call the runtime. | 663 // Slow case: call the runtime. |
661 frame_->EmitPush(r0); | 664 frame_->EmitPush(r0); |
662 frame_->CallRuntime(Runtime::kToBool, 1); | 665 frame_->CallRuntime(Runtime::kToBool, 1); |
663 // Convert the result (r0) to a condition code. | 666 // Convert the result (r0) to a condition code. |
664 __ cmp(r0, Operand(Factory::false_value())); | 667 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 668 __ cmp(r0, ip); |
665 | 669 |
666 cc_reg_ = ne; | 670 cc_reg_ = ne; |
667 } | 671 } |
668 | 672 |
669 | 673 |
670 void CodeGenerator::GenericBinaryOperation(Token::Value op, | 674 void CodeGenerator::GenericBinaryOperation(Token::Value op, |
671 OverwriteMode overwrite_mode, | 675 OverwriteMode overwrite_mode, |
672 int constant_rhs) { | 676 int constant_rhs) { |
673 VirtualFrame::SpilledScope spilled_scope; | 677 VirtualFrame::SpilledScope spilled_scope; |
674 // sp[0] : y | 678 // sp[0] : y |
(...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1178 // Declaration nodes are always declared in only two modes. | 1182 // Declaration nodes are always declared in only two modes. |
1179 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); | 1183 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); |
1180 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; | 1184 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; |
1181 __ mov(r0, Operand(Smi::FromInt(attr))); | 1185 __ mov(r0, Operand(Smi::FromInt(attr))); |
1182 frame_->EmitPush(r0); | 1186 frame_->EmitPush(r0); |
1183 // Push initial value, if any. | 1187 // Push initial value, if any. |
1184 // Note: For variables we must not push an initial value (such as | 1188 // Note: For variables we must not push an initial value (such as |
1185 // 'undefined') because we may have a (legal) redeclaration and we | 1189 // 'undefined') because we may have a (legal) redeclaration and we |
1186 // must not destroy the current value. | 1190 // must not destroy the current value. |
1187 if (node->mode() == Variable::CONST) { | 1191 if (node->mode() == Variable::CONST) { |
1188 __ mov(r0, Operand(Factory::the_hole_value())); | 1192 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); |
1189 frame_->EmitPush(r0); | 1193 frame_->EmitPush(r0); |
1190 } else if (node->fun() != NULL) { | 1194 } else if (node->fun() != NULL) { |
1191 LoadAndSpill(node->fun()); | 1195 LoadAndSpill(node->fun()); |
1192 } else { | 1196 } else { |
1193 __ mov(r0, Operand(0)); // no initial value! | 1197 __ mov(r0, Operand(0)); // no initial value! |
1194 frame_->EmitPush(r0); | 1198 frame_->EmitPush(r0); |
1195 } | 1199 } |
1196 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); | 1200 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); |
1197 // Ignore the return value (declarations are statements). | 1201 // Ignore the return value (declarations are statements). |
1198 ASSERT(frame_->height() == original_height); | 1202 ASSERT(frame_->height() == original_height); |
(...skipping 519 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1718 JumpTarget entry(JumpTarget::BIDIRECTIONAL); | 1722 JumpTarget entry(JumpTarget::BIDIRECTIONAL); |
1719 JumpTarget end_del_check; | 1723 JumpTarget end_del_check; |
1720 JumpTarget exit; | 1724 JumpTarget exit; |
1721 | 1725 |
1722 // Get the object to enumerate over (converted to JSObject). | 1726 // Get the object to enumerate over (converted to JSObject). |
1723 LoadAndSpill(node->enumerable()); | 1727 LoadAndSpill(node->enumerable()); |
1724 | 1728 |
1725 // Both SpiderMonkey and kjs ignore null and undefined in contrast | 1729 // Both SpiderMonkey and kjs ignore null and undefined in contrast |
1726 // to the specification. 12.6.4 mandates a call to ToObject. | 1730 // to the specification. 12.6.4 mandates a call to ToObject. |
1727 frame_->EmitPop(r0); | 1731 frame_->EmitPop(r0); |
1728 __ cmp(r0, Operand(Factory::undefined_value())); | 1732 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 1733 __ cmp(r0, ip); |
1729 exit.Branch(eq); | 1734 exit.Branch(eq); |
1730 __ cmp(r0, Operand(Factory::null_value())); | 1735 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 1736 __ cmp(r0, ip); |
1731 exit.Branch(eq); | 1737 exit.Branch(eq); |
1732 | 1738 |
1733 // Stack layout in body: | 1739 // Stack layout in body: |
1734 // [iteration counter (Smi)] | 1740 // [iteration counter (Smi)] |
1735 // [length of array] | 1741 // [length of array] |
1736 // [FixedArray] | 1742 // [FixedArray] |
1737 // [Map or 0] | 1743 // [Map or 0] |
1738 // [Object] | 1744 // [Object] |
1739 | 1745 |
1740 // Check if enumerable is already a JSObject | 1746 // Check if enumerable is already a JSObject |
(...skipping 11 matching lines...) Expand all Loading... |
1752 jsobject.Bind(); | 1758 jsobject.Bind(); |
1753 // Get the set of properties (as a FixedArray or Map). | 1759 // Get the set of properties (as a FixedArray or Map). |
1754 frame_->EmitPush(r0); // duplicate the object being enumerated | 1760 frame_->EmitPush(r0); // duplicate the object being enumerated |
1755 frame_->EmitPush(r0); | 1761 frame_->EmitPush(r0); |
1756 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); | 1762 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
1757 | 1763 |
1758 // If we got a Map, we can do a fast modification check. | 1764 // If we got a Map, we can do a fast modification check. |
1759 // Otherwise, we got a FixedArray, and we have to do a slow check. | 1765 // Otherwise, we got a FixedArray, and we have to do a slow check. |
1760 __ mov(r2, Operand(r0)); | 1766 __ mov(r2, Operand(r0)); |
1761 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); | 1767 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); |
1762 __ cmp(r1, Operand(Factory::meta_map())); | 1768 __ LoadRoot(ip, Heap::kMetaMapRootIndex); |
| 1769 __ cmp(r1, ip); |
1763 fixed_array.Branch(ne); | 1770 fixed_array.Branch(ne); |
1764 | 1771 |
1765 // Get enum cache | 1772 // Get enum cache |
1766 __ mov(r1, Operand(r0)); | 1773 __ mov(r1, Operand(r0)); |
1767 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset)); | 1774 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset)); |
1768 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset)); | 1775 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset)); |
1769 __ ldr(r2, | 1776 __ ldr(r2, |
1770 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1777 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
1771 | 1778 |
1772 frame_->EmitPush(r0); // map | 1779 frame_->EmitPush(r0); // map |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1826 // Convert the entry to a string (or null if it isn't a property anymore). | 1833 // Convert the entry to a string (or null if it isn't a property anymore). |
1827 __ ldr(r0, frame_->ElementAt(4)); // push enumerable | 1834 __ ldr(r0, frame_->ElementAt(4)); // push enumerable |
1828 frame_->EmitPush(r0); | 1835 frame_->EmitPush(r0); |
1829 frame_->EmitPush(r3); // push entry | 1836 frame_->EmitPush(r3); // push entry |
1830 Result arg_count_reg(r0); | 1837 Result arg_count_reg(r0); |
1831 __ mov(r0, Operand(1)); | 1838 __ mov(r0, Operand(1)); |
1832 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, &arg_count_reg, 2); | 1839 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, &arg_count_reg, 2); |
1833 __ mov(r3, Operand(r0)); | 1840 __ mov(r3, Operand(r0)); |
1834 | 1841 |
1835 // If the property has been removed while iterating, we just skip it. | 1842 // If the property has been removed while iterating, we just skip it. |
1836 __ cmp(r3, Operand(Factory::null_value())); | 1843 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 1844 __ cmp(r3, ip); |
1837 node->continue_target()->Branch(eq); | 1845 node->continue_target()->Branch(eq); |
1838 | 1846 |
1839 end_del_check.Bind(); | 1847 end_del_check.Bind(); |
1840 // Store the entry in the 'each' expression and take another spin in the | 1848 // Store the entry in the 'each' expression and take another spin in the |
1841 // loop. r3: i'th entry of the enum cache (or string there of) | 1849 // loop. r3: i'th entry of the enum cache (or string there of) |
1842 frame_->EmitPush(r3); // push entry | 1850 frame_->EmitPush(r3); // push entry |
1843 { Reference each(this, node->each()); | 1851 { Reference each(this, node->each()); |
1844 if (!each.is_illegal()) { | 1852 if (!each.is_illegal()) { |
1845 if (each.size() > 0) { | 1853 if (each.size() > 0) { |
1846 __ ldr(r0, frame_->ElementAt(each.size())); | 1854 __ ldr(r0, frame_->ElementAt(each.size())); |
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2086 if (has_valid_frame()) { | 2094 if (has_valid_frame()) { |
2087 // The next handler address is on top of the frame. | 2095 // The next handler address is on top of the frame. |
2088 ASSERT(StackHandlerConstants::kNextOffset == 0); | 2096 ASSERT(StackHandlerConstants::kNextOffset == 0); |
2089 frame_->EmitPop(r1); | 2097 frame_->EmitPop(r1); |
2090 __ mov(r3, Operand(handler_address)); | 2098 __ mov(r3, Operand(handler_address)); |
2091 __ str(r1, MemOperand(r3)); | 2099 __ str(r1, MemOperand(r3)); |
2092 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 2100 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
2093 | 2101 |
2094 // Fake a top of stack value (unneeded when FALLING) and set the | 2102 // Fake a top of stack value (unneeded when FALLING) and set the |
2095 // state in r2, then jump around the unlink blocks if any. | 2103 // state in r2, then jump around the unlink blocks if any. |
2096 __ mov(r0, Operand(Factory::undefined_value())); | 2104 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
2097 frame_->EmitPush(r0); | 2105 frame_->EmitPush(r0); |
2098 __ mov(r2, Operand(Smi::FromInt(FALLING))); | 2106 __ mov(r2, Operand(Smi::FromInt(FALLING))); |
2099 if (nof_unlinks > 0) { | 2107 if (nof_unlinks > 0) { |
2100 finally_block.Jump(); | 2108 finally_block.Jump(); |
2101 } | 2109 } |
2102 } | 2110 } |
2103 | 2111 |
2104 // Generate code to unlink and set the state for the (formerly) | 2112 // Generate code to unlink and set the state for the (formerly) |
2105 // shadowing targets that have been jumped to. | 2113 // shadowing targets that have been jumped to. |
2106 for (int i = 0; i < shadows.length(); i++) { | 2114 for (int i = 0; i < shadows.length(); i++) { |
(...skipping 21 matching lines...) Expand all Loading... |
2128 frame_->EmitPop(r1); | 2136 frame_->EmitPop(r1); |
2129 __ str(r1, MemOperand(r3)); | 2137 __ str(r1, MemOperand(r3)); |
2130 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 2138 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
2131 | 2139 |
2132 if (i == kReturnShadowIndex) { | 2140 if (i == kReturnShadowIndex) { |
2133 // If this label shadowed the function return, materialize the | 2141 // If this label shadowed the function return, materialize the |
2134 // return value on the stack. | 2142 // return value on the stack. |
2135 frame_->EmitPush(r0); | 2143 frame_->EmitPush(r0); |
2136 } else { | 2144 } else { |
2137 // Fake TOS for targets that shadowed breaks and continues. | 2145 // Fake TOS for targets that shadowed breaks and continues. |
2138 __ mov(r0, Operand(Factory::undefined_value())); | 2146 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
2139 frame_->EmitPush(r0); | 2147 frame_->EmitPush(r0); |
2140 } | 2148 } |
2141 __ mov(r2, Operand(Smi::FromInt(JUMPING + i))); | 2149 __ mov(r2, Operand(Smi::FromInt(JUMPING + i))); |
2142 if (--nof_unlinks > 0) { | 2150 if (--nof_unlinks > 0) { |
2143 // If this is not the last unlink block, jump around the next. | 2151 // If this is not the last unlink block, jump around the next. |
2144 finally_block.Jump(); | 2152 finally_block.Jump(); |
2145 } | 2153 } |
2146 } | 2154 } |
2147 } | 2155 } |
2148 | 2156 |
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2315 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); | 2323 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); |
2316 // Only generate the fast case for locals that rewrite to slots. | 2324 // Only generate the fast case for locals that rewrite to slots. |
2317 // This rules out argument loads. | 2325 // This rules out argument loads. |
2318 if (potential_slot != NULL) { | 2326 if (potential_slot != NULL) { |
2319 __ ldr(r0, | 2327 __ ldr(r0, |
2320 ContextSlotOperandCheckExtensions(potential_slot, | 2328 ContextSlotOperandCheckExtensions(potential_slot, |
2321 r1, | 2329 r1, |
2322 r2, | 2330 r2, |
2323 &slow)); | 2331 &slow)); |
2324 if (potential_slot->var()->mode() == Variable::CONST) { | 2332 if (potential_slot->var()->mode() == Variable::CONST) { |
2325 __ cmp(r0, Operand(Factory::the_hole_value())); | 2333 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
2326 __ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq); | 2334 __ cmp(r0, ip); |
| 2335 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); |
2327 } | 2336 } |
2328 // There is always control flow to slow from | 2337 // There is always control flow to slow from |
2329 // ContextSlotOperandCheckExtensions so we have to jump around | 2338 // ContextSlotOperandCheckExtensions so we have to jump around |
2330 // it. | 2339 // it. |
2331 done.Jump(); | 2340 done.Jump(); |
2332 } | 2341 } |
2333 } | 2342 } |
2334 | 2343 |
2335 slow.Bind(); | 2344 slow.Bind(); |
2336 frame_->EmitPush(cp); | 2345 frame_->EmitPush(cp); |
(...skipping 16 matching lines...) Expand all Loading... |
2353 | 2362 |
2354 // Special handling for locals allocated in registers. | 2363 // Special handling for locals allocated in registers. |
2355 __ ldr(r0, SlotOperand(slot, r2)); | 2364 __ ldr(r0, SlotOperand(slot, r2)); |
2356 frame_->EmitPush(r0); | 2365 frame_->EmitPush(r0); |
2357 if (slot->var()->mode() == Variable::CONST) { | 2366 if (slot->var()->mode() == Variable::CONST) { |
2358 // Const slots may contain 'the hole' value (the constant hasn't been | 2367 // Const slots may contain 'the hole' value (the constant hasn't been |
2359 // initialized yet) which needs to be converted into the 'undefined' | 2368 // initialized yet) which needs to be converted into the 'undefined' |
2360 // value. | 2369 // value. |
2361 Comment cmnt(masm_, "[ Unhole const"); | 2370 Comment cmnt(masm_, "[ Unhole const"); |
2362 frame_->EmitPop(r0); | 2371 frame_->EmitPop(r0); |
2363 __ cmp(r0, Operand(Factory::the_hole_value())); | 2372 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
2364 __ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq); | 2373 __ cmp(r0, ip); |
| 2374 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); |
2365 frame_->EmitPush(r0); | 2375 frame_->EmitPush(r0); |
2366 } | 2376 } |
2367 } | 2377 } |
2368 } | 2378 } |
2369 | 2379 |
2370 | 2380 |
2371 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, | 2381 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, |
2372 TypeofState typeof_state, | 2382 TypeofState typeof_state, |
2373 Register tmp, | 2383 Register tmp, |
2374 Register tmp2, | 2384 Register tmp2, |
(...skipping 22 matching lines...) Expand all Loading... |
2397 } | 2407 } |
2398 | 2408 |
2399 if (s->is_eval_scope()) { | 2409 if (s->is_eval_scope()) { |
2400 Label next, fast; | 2410 Label next, fast; |
2401 if (!context.is(tmp)) { | 2411 if (!context.is(tmp)) { |
2402 __ mov(tmp, Operand(context)); | 2412 __ mov(tmp, Operand(context)); |
2403 } | 2413 } |
2404 __ bind(&next); | 2414 __ bind(&next); |
2405 // Terminate at global context. | 2415 // Terminate at global context. |
2406 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset)); | 2416 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset)); |
2407 __ cmp(tmp2, Operand(Factory::global_context_map())); | 2417 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex); |
| 2418 __ cmp(tmp2, ip); |
2408 __ b(eq, &fast); | 2419 __ b(eq, &fast); |
2409 // Check that extension is NULL. | 2420 // Check that extension is NULL. |
2410 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); | 2421 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); |
2411 __ tst(tmp2, tmp2); | 2422 __ tst(tmp2, tmp2); |
2412 slow->Branch(ne); | 2423 slow->Branch(ne); |
2413 // Load next context in chain. | 2424 // Load next context in chain. |
2414 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); | 2425 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); |
2415 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); | 2426 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
2416 __ b(&next); | 2427 __ b(&next); |
2417 __ bind(&fast); | 2428 __ bind(&fast); |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2494 | 2505 |
2495 // Load the literals array of the function. | 2506 // Load the literals array of the function. |
2496 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); | 2507 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
2497 | 2508 |
2498 // Load the literal at the ast saved index. | 2509 // Load the literal at the ast saved index. |
2499 int literal_offset = | 2510 int literal_offset = |
2500 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; | 2511 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
2501 __ ldr(r2, FieldMemOperand(r1, literal_offset)); | 2512 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
2502 | 2513 |
2503 JumpTarget done; | 2514 JumpTarget done; |
2504 __ cmp(r2, Operand(Factory::undefined_value())); | 2515 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 2516 __ cmp(r2, ip); |
2505 done.Branch(ne); | 2517 done.Branch(ne); |
2506 | 2518 |
2507 // If the entry is undefined we call the runtime system to computed | 2519 // If the entry is undefined we call the runtime system to computed |
2508 // the literal. | 2520 // the literal. |
2509 frame_->EmitPush(r1); // literal array (0) | 2521 frame_->EmitPush(r1); // literal array (0) |
2510 __ mov(r0, Operand(Smi::FromInt(node->literal_index()))); | 2522 __ mov(r0, Operand(Smi::FromInt(node->literal_index()))); |
2511 frame_->EmitPush(r0); // literal index (1) | 2523 frame_->EmitPush(r0); // literal index (1) |
2512 __ mov(r0, Operand(node->pattern())); // RegExp pattern (2) | 2524 __ mov(r0, Operand(node->pattern())); // RegExp pattern (2) |
2513 frame_->EmitPush(r0); | 2525 frame_->EmitPush(r0); |
2514 __ mov(r0, Operand(node->flags())); // RegExp flags (3) | 2526 __ mov(r0, Operand(node->flags())); // RegExp flags (3) |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2576 // Load the literals array of the function. | 2588 // Load the literals array of the function. |
2577 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); | 2589 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
2578 | 2590 |
2579 // Load the literal at the ast saved index. | 2591 // Load the literal at the ast saved index. |
2580 int literal_offset = | 2592 int literal_offset = |
2581 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; | 2593 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
2582 __ ldr(r2, FieldMemOperand(r1, literal_offset)); | 2594 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
2583 | 2595 |
2584 // Check whether we need to materialize the object literal boilerplate. | 2596 // Check whether we need to materialize the object literal boilerplate. |
2585 // If so, jump to the deferred code. | 2597 // If so, jump to the deferred code. |
2586 __ cmp(r2, Operand(Factory::undefined_value())); | 2598 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 2599 __ cmp(r2, Operand(ip)); |
2587 deferred->Branch(eq); | 2600 deferred->Branch(eq); |
2588 deferred->BindExit(); | 2601 deferred->BindExit(); |
2589 | 2602 |
2590 // Push the object literal boilerplate. | 2603 // Push the object literal boilerplate. |
2591 frame_->EmitPush(r2); | 2604 frame_->EmitPush(r2); |
2592 | 2605 |
2593 // Clone the boilerplate object. | 2606 // Clone the boilerplate object. |
2594 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; | 2607 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; |
2595 if (node->depth() == 1) { | 2608 if (node->depth() == 1) { |
2596 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; | 2609 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2698 // Load the literals array of the function. | 2711 // Load the literals array of the function. |
2699 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); | 2712 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
2700 | 2713 |
2701 // Load the literal at the ast saved index. | 2714 // Load the literal at the ast saved index. |
2702 int literal_offset = | 2715 int literal_offset = |
2703 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; | 2716 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
2704 __ ldr(r2, FieldMemOperand(r1, literal_offset)); | 2717 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
2705 | 2718 |
2706 // Check whether we need to materialize the object literal boilerplate. | 2719 // Check whether we need to materialize the object literal boilerplate. |
2707 // If so, jump to the deferred code. | 2720 // If so, jump to the deferred code. |
2708 __ cmp(r2, Operand(Factory::undefined_value())); | 2721 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 2722 __ cmp(r2, Operand(ip)); |
2709 deferred->Branch(eq); | 2723 deferred->Branch(eq); |
2710 deferred->BindExit(); | 2724 deferred->BindExit(); |
2711 | 2725 |
2712 // Push the object literal boilerplate. | 2726 // Push the object literal boilerplate. |
2713 frame_->EmitPush(r2); | 2727 frame_->EmitPush(r2); |
2714 | 2728 |
2715 // Clone the boilerplate object. | 2729 // Clone the boilerplate object. |
2716 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; | 2730 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; |
2717 if (node->depth() == 1) { | 2731 if (node->depth() == 1) { |
2718 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; | 2732 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; |
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3029 // the function we need to call and the receiver of the call. | 3043 // the function we need to call and the receiver of the call. |
3030 // Then we call the resolved function using the given arguments. | 3044 // Then we call the resolved function using the given arguments. |
3031 | 3045 |
3032 ZoneList<Expression*>* args = node->arguments(); | 3046 ZoneList<Expression*>* args = node->arguments(); |
3033 Expression* function = node->expression(); | 3047 Expression* function = node->expression(); |
3034 | 3048 |
3035 CodeForStatementPosition(node); | 3049 CodeForStatementPosition(node); |
3036 | 3050 |
3037 // Prepare stack for call to resolved function. | 3051 // Prepare stack for call to resolved function. |
3038 LoadAndSpill(function); | 3052 LoadAndSpill(function); |
3039 __ mov(r2, Operand(Factory::undefined_value())); | 3053 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
3040 frame_->EmitPush(r2); // Slot for receiver | 3054 frame_->EmitPush(r2); // Slot for receiver |
3041 int arg_count = args->length(); | 3055 int arg_count = args->length(); |
3042 for (int i = 0; i < arg_count; i++) { | 3056 for (int i = 0; i < arg_count; i++) { |
3043 LoadAndSpill(args->at(i)); | 3057 LoadAndSpill(args->at(i)); |
3044 } | 3058 } |
3045 | 3059 |
3046 // Prepare stack for call to ResolvePossiblyDirectEval. | 3060 // Prepare stack for call to ResolvePossiblyDirectEval. |
3047 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize)); | 3061 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize)); |
3048 frame_->EmitPush(r1); | 3062 frame_->EmitPush(r1); |
3049 if (arg_count > 0) { | 3063 if (arg_count > 0) { |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3173 leave.Jump(); | 3187 leave.Jump(); |
3174 | 3188 |
3175 // Objects with a non-function constructor have class 'Object'. | 3189 // Objects with a non-function constructor have class 'Object'. |
3176 non_function_constructor.Bind(); | 3190 non_function_constructor.Bind(); |
3177 __ mov(r0, Operand(Factory::Object_symbol())); | 3191 __ mov(r0, Operand(Factory::Object_symbol())); |
3178 frame_->EmitPush(r0); | 3192 frame_->EmitPush(r0); |
3179 leave.Jump(); | 3193 leave.Jump(); |
3180 | 3194 |
3181 // Non-JS objects have class null. | 3195 // Non-JS objects have class null. |
3182 null.Bind(); | 3196 null.Bind(); |
3183 __ mov(r0, Operand(Factory::null_value())); | 3197 __ LoadRoot(r0, Heap::kNullValueRootIndex); |
3184 frame_->EmitPush(r0); | 3198 frame_->EmitPush(r0); |
3185 | 3199 |
3186 // All done. | 3200 // All done. |
3187 leave.Bind(); | 3201 leave.Bind(); |
3188 } | 3202 } |
3189 | 3203 |
3190 | 3204 |
3191 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { | 3205 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { |
3192 VirtualFrame::SpilledScope spilled_scope; | 3206 VirtualFrame::SpilledScope spilled_scope; |
3193 ASSERT(args->length() == 1); | 3207 ASSERT(args->length() == 1); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3246 VirtualFrame::SpilledScope spilled_scope; | 3260 VirtualFrame::SpilledScope spilled_scope; |
3247 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc. | 3261 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc. |
3248 ASSERT_EQ(args->length(), 3); | 3262 ASSERT_EQ(args->length(), 3); |
3249 #ifdef ENABLE_LOGGING_AND_PROFILING | 3263 #ifdef ENABLE_LOGGING_AND_PROFILING |
3250 if (ShouldGenerateLog(args->at(0))) { | 3264 if (ShouldGenerateLog(args->at(0))) { |
3251 LoadAndSpill(args->at(1)); | 3265 LoadAndSpill(args->at(1)); |
3252 LoadAndSpill(args->at(2)); | 3266 LoadAndSpill(args->at(2)); |
3253 __ CallRuntime(Runtime::kLog, 2); | 3267 __ CallRuntime(Runtime::kLog, 2); |
3254 } | 3268 } |
3255 #endif | 3269 #endif |
3256 __ mov(r0, Operand(Factory::undefined_value())); | 3270 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
3257 frame_->EmitPush(r0); | 3271 frame_->EmitPush(r0); |
3258 } | 3272 } |
3259 | 3273 |
3260 | 3274 |
3261 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { | 3275 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { |
3262 VirtualFrame::SpilledScope spilled_scope; | 3276 VirtualFrame::SpilledScope spilled_scope; |
3263 ASSERT(args->length() == 1); | 3277 ASSERT(args->length() == 1); |
3264 LoadAndSpill(args->at(0)); | 3278 LoadAndSpill(args->at(0)); |
3265 frame_->EmitPop(r0); | 3279 frame_->EmitPop(r0); |
3266 __ tst(r0, Operand(kSmiTagMask | 0x80000000u)); | 3280 __ tst(r0, Operand(kSmiTagMask | 0x80000000u)); |
3267 cc_reg_ = eq; | 3281 cc_reg_ = eq; |
3268 } | 3282 } |
3269 | 3283 |
3270 | 3284 |
3271 // This should generate code that performs a charCodeAt() call or returns | 3285 // This should generate code that performs a charCodeAt() call or returns |
3272 // undefined in order to trigger the slow case, Runtime_StringCharCodeAt. | 3286 // undefined in order to trigger the slow case, Runtime_StringCharCodeAt. |
3273 // It is not yet implemented on ARM, so it always goes to the slow case. | 3287 // It is not yet implemented on ARM, so it always goes to the slow case. |
3274 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { | 3288 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { |
3275 VirtualFrame::SpilledScope spilled_scope; | 3289 VirtualFrame::SpilledScope spilled_scope; |
3276 ASSERT(args->length() == 2); | 3290 ASSERT(args->length() == 2); |
3277 __ mov(r0, Operand(Factory::undefined_value())); | 3291 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
3278 frame_->EmitPush(r0); | 3292 frame_->EmitPush(r0); |
3279 } | 3293 } |
3280 | 3294 |
3281 | 3295 |
3282 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { | 3296 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { |
3283 VirtualFrame::SpilledScope spilled_scope; | 3297 VirtualFrame::SpilledScope spilled_scope; |
3284 ASSERT(args->length() == 1); | 3298 ASSERT(args->length() == 1); |
3285 LoadAndSpill(args->at(0)); | 3299 LoadAndSpill(args->at(0)); |
3286 JumpTarget answer; | 3300 JumpTarget answer; |
3287 // We need the CC bits to come out as not_equal in the case where the | 3301 // We need the CC bits to come out as not_equal in the case where the |
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3487 frame_->EmitPush(r0); | 3501 frame_->EmitPush(r0); |
3488 __ mov(r0, Operand(variable->name())); | 3502 __ mov(r0, Operand(variable->name())); |
3489 frame_->EmitPush(r0); | 3503 frame_->EmitPush(r0); |
3490 Result arg_count(r0); | 3504 Result arg_count(r0); |
3491 __ mov(r0, Operand(1)); // not counting receiver | 3505 __ mov(r0, Operand(1)); // not counting receiver |
3492 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2); | 3506 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2); |
3493 | 3507 |
3494 } else { | 3508 } else { |
3495 // Default: Result of deleting non-global, not dynamically | 3509 // Default: Result of deleting non-global, not dynamically |
3496 // introduced variables is false. | 3510 // introduced variables is false. |
3497 __ mov(r0, Operand(Factory::false_value())); | 3511 __ LoadRoot(r0, Heap::kFalseValueRootIndex); |
3498 } | 3512 } |
3499 | 3513 |
3500 } else { | 3514 } else { |
3501 // Default: Result of deleting expressions is true. | 3515 // Default: Result of deleting expressions is true. |
3502 LoadAndSpill(node->expression()); // may have side-effects | 3516 LoadAndSpill(node->expression()); // may have side-effects |
3503 frame_->Drop(); | 3517 frame_->Drop(); |
3504 __ mov(r0, Operand(Factory::true_value())); | 3518 __ LoadRoot(r0, Heap::kTrueValueRootIndex); |
3505 } | 3519 } |
3506 frame_->EmitPush(r0); | 3520 frame_->EmitPush(r0); |
3507 | 3521 |
3508 } else if (op == Token::TYPEOF) { | 3522 } else if (op == Token::TYPEOF) { |
3509 // Special case for loading the typeof expression; see comment on | 3523 // Special case for loading the typeof expression; see comment on |
3510 // LoadTypeofExpression(). | 3524 // LoadTypeofExpression(). |
3511 LoadTypeofExpression(node->expression()); | 3525 LoadTypeofExpression(node->expression()); |
3512 frame_->CallRuntime(Runtime::kTypeof, 1); | 3526 frame_->CallRuntime(Runtime::kTypeof, 1); |
3513 frame_->EmitPush(r0); // r0 has result | 3527 frame_->EmitPush(r0); // r0 has result |
3514 | 3528 |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3547 smi_label.Bind(); | 3561 smi_label.Bind(); |
3548 __ mvn(r0, Operand(r0)); | 3562 __ mvn(r0, Operand(r0)); |
3549 __ bic(r0, r0, Operand(kSmiTagMask)); // bit-clear inverted smi-tag | 3563 __ bic(r0, r0, Operand(kSmiTagMask)); // bit-clear inverted smi-tag |
3550 continue_label.Bind(); | 3564 continue_label.Bind(); |
3551 break; | 3565 break; |
3552 } | 3566 } |
3553 | 3567 |
3554 case Token::VOID: | 3568 case Token::VOID: |
3555 // since the stack top is cached in r0, popping and then | 3569 // since the stack top is cached in r0, popping and then |
3556 // pushing a value can be done by just writing to r0. | 3570 // pushing a value can be done by just writing to r0. |
3557 __ mov(r0, Operand(Factory::undefined_value())); | 3571 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
3558 break; | 3572 break; |
3559 | 3573 |
3560 case Token::ADD: { | 3574 case Token::ADD: { |
3561 // Smi check. | 3575 // Smi check. |
3562 JumpTarget continue_label; | 3576 JumpTarget continue_label; |
3563 __ tst(r0, Operand(kSmiTagMask)); | 3577 __ tst(r0, Operand(kSmiTagMask)); |
3564 continue_label.Branch(eq); | 3578 continue_label.Branch(eq); |
3565 frame_->EmitPush(r0); | 3579 frame_->EmitPush(r0); |
3566 Result arg_count(r0); | 3580 Result arg_count(r0); |
3567 __ mov(r0, Operand(0)); // not counting receiver | 3581 __ mov(r0, Operand(0)); // not counting receiver |
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3873 // equality. | 3887 // equality. |
3874 if (op == Token::EQ || op == Token::EQ_STRICT) { | 3888 if (op == Token::EQ || op == Token::EQ_STRICT) { |
3875 bool left_is_null = | 3889 bool left_is_null = |
3876 left->AsLiteral() != NULL && left->AsLiteral()->IsNull(); | 3890 left->AsLiteral() != NULL && left->AsLiteral()->IsNull(); |
3877 bool right_is_null = | 3891 bool right_is_null = |
3878 right->AsLiteral() != NULL && right->AsLiteral()->IsNull(); | 3892 right->AsLiteral() != NULL && right->AsLiteral()->IsNull(); |
3879 // The 'null' value can only be equal to 'null' or 'undefined'. | 3893 // The 'null' value can only be equal to 'null' or 'undefined'. |
3880 if (left_is_null || right_is_null) { | 3894 if (left_is_null || right_is_null) { |
3881 LoadAndSpill(left_is_null ? right : left); | 3895 LoadAndSpill(left_is_null ? right : left); |
3882 frame_->EmitPop(r0); | 3896 frame_->EmitPop(r0); |
3883 __ cmp(r0, Operand(Factory::null_value())); | 3897 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 3898 __ cmp(r0, ip); |
3884 | 3899 |
3885 // The 'null' value is only equal to 'undefined' if using non-strict | 3900 // The 'null' value is only equal to 'undefined' if using non-strict |
3886 // comparisons. | 3901 // comparisons. |
3887 if (op != Token::EQ_STRICT) { | 3902 if (op != Token::EQ_STRICT) { |
3888 true_target()->Branch(eq); | 3903 true_target()->Branch(eq); |
3889 | 3904 |
3890 __ cmp(r0, Operand(Factory::undefined_value())); | 3905 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 3906 __ cmp(r0, Operand(ip)); |
3891 true_target()->Branch(eq); | 3907 true_target()->Branch(eq); |
3892 | 3908 |
3893 __ tst(r0, Operand(kSmiTagMask)); | 3909 __ tst(r0, Operand(kSmiTagMask)); |
3894 false_target()->Branch(eq); | 3910 false_target()->Branch(eq); |
3895 | 3911 |
3896 // It can be an undetectable object. | 3912 // It can be an undetectable object. |
3897 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | 3913 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
3898 __ ldrb(r0, FieldMemOperand(r0, Map::kBitFieldOffset)); | 3914 __ ldrb(r0, FieldMemOperand(r0, Map::kBitFieldOffset)); |
3899 __ and_(r0, r0, Operand(1 << Map::kIsUndetectable)); | 3915 __ and_(r0, r0, Operand(1 << Map::kIsUndetectable)); |
3900 __ cmp(r0, Operand(1 << Map::kIsUndetectable)); | 3916 __ cmp(r0, Operand(1 << Map::kIsUndetectable)); |
(...skipping 16 matching lines...) Expand all Loading... |
3917 Handle<String> check(String::cast(*right->AsLiteral()->handle())); | 3933 Handle<String> check(String::cast(*right->AsLiteral()->handle())); |
3918 | 3934 |
3919 // Load the operand, move it to register r1. | 3935 // Load the operand, move it to register r1. |
3920 LoadTypeofExpression(operation->expression()); | 3936 LoadTypeofExpression(operation->expression()); |
3921 frame_->EmitPop(r1); | 3937 frame_->EmitPop(r1); |
3922 | 3938 |
3923 if (check->Equals(Heap::number_symbol())) { | 3939 if (check->Equals(Heap::number_symbol())) { |
3924 __ tst(r1, Operand(kSmiTagMask)); | 3940 __ tst(r1, Operand(kSmiTagMask)); |
3925 true_target()->Branch(eq); | 3941 true_target()->Branch(eq); |
3926 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3942 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
3927 __ cmp(r1, Operand(Factory::heap_number_map())); | 3943 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
| 3944 __ cmp(r1, ip); |
3928 cc_reg_ = eq; | 3945 cc_reg_ = eq; |
3929 | 3946 |
3930 } else if (check->Equals(Heap::string_symbol())) { | 3947 } else if (check->Equals(Heap::string_symbol())) { |
3931 __ tst(r1, Operand(kSmiTagMask)); | 3948 __ tst(r1, Operand(kSmiTagMask)); |
3932 false_target()->Branch(eq); | 3949 false_target()->Branch(eq); |
3933 | 3950 |
3934 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3951 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
3935 | 3952 |
3936 // It can be an undetectable string object. | 3953 // It can be an undetectable string object. |
3937 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); | 3954 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); |
3938 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); | 3955 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); |
3939 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); | 3956 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); |
3940 false_target()->Branch(eq); | 3957 false_target()->Branch(eq); |
3941 | 3958 |
3942 __ ldrb(r2, FieldMemOperand(r1, Map::kInstanceTypeOffset)); | 3959 __ ldrb(r2, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |
3943 __ cmp(r2, Operand(FIRST_NONSTRING_TYPE)); | 3960 __ cmp(r2, Operand(FIRST_NONSTRING_TYPE)); |
3944 cc_reg_ = lt; | 3961 cc_reg_ = lt; |
3945 | 3962 |
3946 } else if (check->Equals(Heap::boolean_symbol())) { | 3963 } else if (check->Equals(Heap::boolean_symbol())) { |
3947 __ cmp(r1, Operand(Factory::true_value())); | 3964 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 3965 __ cmp(r1, ip); |
3948 true_target()->Branch(eq); | 3966 true_target()->Branch(eq); |
3949 __ cmp(r1, Operand(Factory::false_value())); | 3967 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 3968 __ cmp(r1, ip); |
3950 cc_reg_ = eq; | 3969 cc_reg_ = eq; |
3951 | 3970 |
3952 } else if (check->Equals(Heap::undefined_symbol())) { | 3971 } else if (check->Equals(Heap::undefined_symbol())) { |
3953 __ cmp(r1, Operand(Factory::undefined_value())); | 3972 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 3973 __ cmp(r1, ip); |
3954 true_target()->Branch(eq); | 3974 true_target()->Branch(eq); |
3955 | 3975 |
3956 __ tst(r1, Operand(kSmiTagMask)); | 3976 __ tst(r1, Operand(kSmiTagMask)); |
3957 false_target()->Branch(eq); | 3977 false_target()->Branch(eq); |
3958 | 3978 |
3959 // It can be an undetectable object. | 3979 // It can be an undetectable object. |
3960 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3980 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
3961 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); | 3981 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); |
3962 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); | 3982 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); |
3963 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); | 3983 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); |
3964 | 3984 |
3965 cc_reg_ = eq; | 3985 cc_reg_ = eq; |
3966 | 3986 |
3967 } else if (check->Equals(Heap::function_symbol())) { | 3987 } else if (check->Equals(Heap::function_symbol())) { |
3968 __ tst(r1, Operand(kSmiTagMask)); | 3988 __ tst(r1, Operand(kSmiTagMask)); |
3969 false_target()->Branch(eq); | 3989 false_target()->Branch(eq); |
3970 __ CompareObjectType(r1, r1, r1, JS_FUNCTION_TYPE); | 3990 __ CompareObjectType(r1, r1, r1, JS_FUNCTION_TYPE); |
3971 cc_reg_ = eq; | 3991 cc_reg_ = eq; |
3972 | 3992 |
3973 } else if (check->Equals(Heap::object_symbol())) { | 3993 } else if (check->Equals(Heap::object_symbol())) { |
3974 __ tst(r1, Operand(kSmiTagMask)); | 3994 __ tst(r1, Operand(kSmiTagMask)); |
3975 false_target()->Branch(eq); | 3995 false_target()->Branch(eq); |
3976 | 3996 |
3977 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3997 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
3978 __ cmp(r1, Operand(Factory::null_value())); | 3998 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 3999 __ cmp(r1, ip); |
3979 true_target()->Branch(eq); | 4000 true_target()->Branch(eq); |
3980 | 4001 |
3981 // It can be an undetectable object. | 4002 // It can be an undetectable object. |
3982 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); | 4003 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); |
3983 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable)); | 4004 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable)); |
3984 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); | 4005 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); |
3985 false_target()->Branch(eq); | 4006 false_target()->Branch(eq); |
3986 | 4007 |
3987 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | 4008 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
3988 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE)); | 4009 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE)); |
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4199 ASSERT(!slot->var()->is_dynamic()); | 4220 ASSERT(!slot->var()->is_dynamic()); |
4200 | 4221 |
4201 JumpTarget exit; | 4222 JumpTarget exit; |
4202 if (init_state == CONST_INIT) { | 4223 if (init_state == CONST_INIT) { |
4203 ASSERT(slot->var()->mode() == Variable::CONST); | 4224 ASSERT(slot->var()->mode() == Variable::CONST); |
4204 // Only the first const initialization must be executed (the slot | 4225 // Only the first const initialization must be executed (the slot |
4205 // still contains 'the hole' value). When the assignment is | 4226 // still contains 'the hole' value). When the assignment is |
4206 // executed, the code is identical to a normal store (see below). | 4227 // executed, the code is identical to a normal store (see below). |
4207 Comment cmnt(masm, "[ Init const"); | 4228 Comment cmnt(masm, "[ Init const"); |
4208 __ ldr(r2, cgen_->SlotOperand(slot, r2)); | 4229 __ ldr(r2, cgen_->SlotOperand(slot, r2)); |
4209 __ cmp(r2, Operand(Factory::the_hole_value())); | 4230 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 4231 __ cmp(r2, ip); |
4210 exit.Branch(ne); | 4232 exit.Branch(ne); |
4211 } | 4233 } |
4212 | 4234 |
4213 // We must execute the store. Storing a variable must keep the | 4235 // We must execute the store. Storing a variable must keep the |
4214 // (new) value on the stack. This is necessary for compiling | 4236 // (new) value on the stack. This is necessary for compiling |
4215 // assignment expressions. | 4237 // assignment expressions. |
4216 // | 4238 // |
4217 // Note: We will reach here even with slot->var()->mode() == | 4239 // Note: We will reach here even with slot->var()->mode() == |
4218 // Variable::CONST because of const declarations which will | 4240 // Variable::CONST because of const declarations which will |
4219 // initialize consts to 'the hole' value and by doing so, end up | 4241 // initialize consts to 'the hole' value and by doing so, end up |
(...skipping 712 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4932 __ add(result_reg, result_reg, Operand(HeapNumber::kSize)); | 4954 __ add(result_reg, result_reg, Operand(HeapNumber::kSize)); |
4933 // Compare new new allocation top and limit. | 4955 // Compare new new allocation top and limit. |
4934 __ cmp(result_reg, Operand(scratch2)); | 4956 __ cmp(result_reg, Operand(scratch2)); |
4935 // Branch if out of space in young generation. | 4957 // Branch if out of space in young generation. |
4936 __ b(hi, need_gc); | 4958 __ b(hi, need_gc); |
4937 // Store new allocation top. | 4959 // Store new allocation top. |
4938 __ str(result_reg, MemOperand(allocation_top_addr_reg)); // store new top | 4960 __ str(result_reg, MemOperand(allocation_top_addr_reg)); // store new top |
4939 // Tag and adjust back to start of new object. | 4961 // Tag and adjust back to start of new object. |
4940 __ sub(result_reg, result_reg, Operand(HeapNumber::kSize - kHeapObjectTag)); | 4962 __ sub(result_reg, result_reg, Operand(HeapNumber::kSize - kHeapObjectTag)); |
4941 // Get heap number map into scratch2. | 4963 // Get heap number map into scratch2. |
4942 __ mov(scratch2, Operand(Factory::heap_number_map())); | 4964 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex); |
4943 // Store heap number map in new object. | 4965 // Store heap number map in new object. |
4944 __ str(scratch2, FieldMemOperand(result_reg, HeapObject::kMapOffset)); | 4966 __ str(scratch2, FieldMemOperand(result_reg, HeapObject::kMapOffset)); |
4945 } | 4967 } |
4946 | 4968 |
4947 | 4969 |
4948 // We fall into this code if the operands were Smis, but the result was | 4970 // We fall into this code if the operands were Smis, but the result was |
4949 // not (eg. overflow). We branch into this code (to the not_smi label) if | 4971 // not (eg. overflow). We branch into this code (to the not_smi label) if |
4950 // the operands were not both Smi. The operands are in r0 and r1. In order | 4972 // the operands were not both Smi. The operands are in r0 and r1. In order |
4951 // to call the C-implemented binary fp operation routines we need to end up | 4973 // to call the C-implemented binary fp operation routines we need to end up |
4952 // with the double precision floating point operands in r0 and r1 (for the | 4974 // with the double precision floating point operands in r0 and r1 (for the |
(...skipping 1125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6078 __ b(gt, &slow); | 6100 __ b(gt, &slow); |
6079 | 6101 |
6080 // Register mapping: r3 is object map and r4 is function prototype. | 6102 // Register mapping: r3 is object map and r4 is function prototype. |
6081 // Get prototype of object into r2. | 6103 // Get prototype of object into r2. |
6082 __ ldr(r2, FieldMemOperand(r3, Map::kPrototypeOffset)); | 6104 __ ldr(r2, FieldMemOperand(r3, Map::kPrototypeOffset)); |
6083 | 6105 |
6084 // Loop through the prototype chain looking for the function prototype. | 6106 // Loop through the prototype chain looking for the function prototype. |
6085 __ bind(&loop); | 6107 __ bind(&loop); |
6086 __ cmp(r2, Operand(r4)); | 6108 __ cmp(r2, Operand(r4)); |
6087 __ b(eq, &is_instance); | 6109 __ b(eq, &is_instance); |
6088 __ cmp(r2, Operand(Factory::null_value())); | 6110 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 6111 __ cmp(r2, ip); |
6089 __ b(eq, &is_not_instance); | 6112 __ b(eq, &is_not_instance); |
6090 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); | 6113 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
6091 __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); | 6114 __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); |
6092 __ jmp(&loop); | 6115 __ jmp(&loop); |
6093 | 6116 |
6094 __ bind(&is_instance); | 6117 __ bind(&is_instance); |
6095 __ mov(r0, Operand(Smi::FromInt(0))); | 6118 __ mov(r0, Operand(Smi::FromInt(0))); |
6096 __ pop(); | 6119 __ pop(); |
6097 __ pop(); | 6120 __ pop(); |
6098 __ mov(pc, Operand(lr)); // Return. | 6121 __ mov(pc, Operand(lr)); // Return. |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6233 int CompareStub::MinorKey() { | 6256 int CompareStub::MinorKey() { |
6234 // Encode the two parameters in a unique 16 bit value. | 6257 // Encode the two parameters in a unique 16 bit value. |
6235 ASSERT(static_cast<unsigned>(cc_) >> 28 < (1 << 15)); | 6258 ASSERT(static_cast<unsigned>(cc_) >> 28 < (1 << 15)); |
6236 return (static_cast<unsigned>(cc_) >> 27) | (strict_ ? 1 : 0); | 6259 return (static_cast<unsigned>(cc_) >> 27) | (strict_ ? 1 : 0); |
6237 } | 6260 } |
6238 | 6261 |
6239 | 6262 |
6240 #undef __ | 6263 #undef __ |
6241 | 6264 |
6242 } } // namespace v8::internal | 6265 } } // namespace v8::internal |
OLD | NEW |