OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
298 } | 298 } |
299 } | 299 } |
300 | 300 |
301 // Generate the return sequence if necessary. | 301 // Generate the return sequence if necessary. |
302 if (frame_ != NULL || function_return_.is_linked()) { | 302 if (frame_ != NULL || function_return_.is_linked()) { |
303 // exit | 303 // exit |
304 // r0: result | 304 // r0: result |
305 // sp: stack pointer | 305 // sp: stack pointer |
306 // fp: frame pointer | 306 // fp: frame pointer |
307 // cp: callee's context | 307 // cp: callee's context |
308 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 308 __ mov(r0, Operand(Factory::undefined_value())); |
309 | 309 |
310 function_return_.Bind(); | 310 function_return_.Bind(); |
311 if (FLAG_trace) { | 311 if (FLAG_trace) { |
312 // Push the return value on the stack as the parameter. | 312 // Push the return value on the stack as the parameter. |
313 // Runtime::TraceExit returns the parameter as it is. | 313 // Runtime::TraceExit returns the parameter as it is. |
314 frame_->EmitPush(r0); | 314 frame_->EmitPush(r0); |
315 frame_->CallRuntime(Runtime::kTraceExit, 1); | 315 frame_->CallRuntime(Runtime::kTraceExit, 1); |
316 } | 316 } |
317 | 317 |
318 // Tear down the frame which will restore the caller's frame pointer and | 318 // Tear down the frame which will restore the caller's frame pointer and |
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
471 #endif | 471 #endif |
472 JumpTarget true_target; | 472 JumpTarget true_target; |
473 JumpTarget false_target; | 473 JumpTarget false_target; |
474 LoadCondition(x, typeof_state, &true_target, &false_target, false); | 474 LoadCondition(x, typeof_state, &true_target, &false_target, false); |
475 | 475 |
476 if (has_cc()) { | 476 if (has_cc()) { |
477 // Convert cc_reg_ into a boolean value. | 477 // Convert cc_reg_ into a boolean value. |
478 JumpTarget loaded; | 478 JumpTarget loaded; |
479 JumpTarget materialize_true; | 479 JumpTarget materialize_true; |
480 materialize_true.Branch(cc_reg_); | 480 materialize_true.Branch(cc_reg_); |
481 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | 481 __ mov(r0, Operand(Factory::false_value())); |
482 frame_->EmitPush(r0); | 482 frame_->EmitPush(r0); |
483 loaded.Jump(); | 483 loaded.Jump(); |
484 materialize_true.Bind(); | 484 materialize_true.Bind(); |
485 __ LoadRoot(r0, Heap::kTrueValueRootIndex); | 485 __ mov(r0, Operand(Factory::true_value())); |
486 frame_->EmitPush(r0); | 486 frame_->EmitPush(r0); |
487 loaded.Bind(); | 487 loaded.Bind(); |
488 cc_reg_ = al; | 488 cc_reg_ = al; |
489 } | 489 } |
490 | 490 |
491 if (true_target.is_linked() || false_target.is_linked()) { | 491 if (true_target.is_linked() || false_target.is_linked()) { |
492 // We have at least one condition value that has been "translated" | 492 // We have at least one condition value that has been "translated" |
493 // into a branch, thus it needs to be loaded explicitly. | 493 // into a branch, thus it needs to be loaded explicitly. |
494 JumpTarget loaded; | 494 JumpTarget loaded; |
495 if (frame_ != NULL) { | 495 if (frame_ != NULL) { |
496 loaded.Jump(); // Don't lose the current TOS. | 496 loaded.Jump(); // Don't lose the current TOS. |
497 } | 497 } |
498 bool both = true_target.is_linked() && false_target.is_linked(); | 498 bool both = true_target.is_linked() && false_target.is_linked(); |
499 // Load "true" if necessary. | 499 // Load "true" if necessary. |
500 if (true_target.is_linked()) { | 500 if (true_target.is_linked()) { |
501 true_target.Bind(); | 501 true_target.Bind(); |
502 __ LoadRoot(r0, Heap::kTrueValueRootIndex); | 502 __ mov(r0, Operand(Factory::true_value())); |
503 frame_->EmitPush(r0); | 503 frame_->EmitPush(r0); |
504 } | 504 } |
505 // If both "true" and "false" need to be loaded jump across the code for | 505 // If both "true" and "false" need to be loaded jump across the code for |
506 // "false". | 506 // "false". |
507 if (both) { | 507 if (both) { |
508 loaded.Jump(); | 508 loaded.Jump(); |
509 } | 509 } |
510 // Load "false" if necessary. | 510 // Load "false" if necessary. |
511 if (false_target.is_linked()) { | 511 if (false_target.is_linked()) { |
512 false_target.Bind(); | 512 false_target.Bind(); |
513 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | 513 __ mov(r0, Operand(Factory::false_value())); |
514 frame_->EmitPush(r0); | 514 frame_->EmitPush(r0); |
515 } | 515 } |
516 // A value is loaded on all paths reaching this point. | 516 // A value is loaded on all paths reaching this point. |
517 loaded.Bind(); | 517 loaded.Bind(); |
518 } | 518 } |
519 ASSERT(has_valid_frame()); | 519 ASSERT(has_valid_frame()); |
520 ASSERT(!has_cc()); | 520 ASSERT(!has_cc()); |
521 ASSERT(frame_->height() == original_height + 1); | 521 ASSERT(frame_->height() == original_height + 1); |
522 } | 522 } |
523 | 523 |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
633 void CodeGenerator::ToBoolean(JumpTarget* true_target, | 633 void CodeGenerator::ToBoolean(JumpTarget* true_target, |
634 JumpTarget* false_target) { | 634 JumpTarget* false_target) { |
635 VirtualFrame::SpilledScope spilled_scope; | 635 VirtualFrame::SpilledScope spilled_scope; |
636 // Note: The generated code snippet does not change stack variables. | 636 // Note: The generated code snippet does not change stack variables. |
637 // Only the condition code should be set. | 637 // Only the condition code should be set. |
638 frame_->EmitPop(r0); | 638 frame_->EmitPop(r0); |
639 | 639 |
640 // Fast case checks | 640 // Fast case checks |
641 | 641 |
642 // Check if the value is 'false'. | 642 // Check if the value is 'false'. |
643 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | 643 __ cmp(r0, Operand(Factory::false_value())); |
644 __ cmp(r0, ip); | |
645 false_target->Branch(eq); | 644 false_target->Branch(eq); |
646 | 645 |
647 // Check if the value is 'true'. | 646 // Check if the value is 'true'. |
648 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 647 __ cmp(r0, Operand(Factory::true_value())); |
649 __ cmp(r0, ip); | |
650 true_target->Branch(eq); | 648 true_target->Branch(eq); |
651 | 649 |
652 // Check if the value is 'undefined'. | 650 // Check if the value is 'undefined'. |
653 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 651 __ cmp(r0, Operand(Factory::undefined_value())); |
654 __ cmp(r0, ip); | |
655 false_target->Branch(eq); | 652 false_target->Branch(eq); |
656 | 653 |
657 // Check if the value is a smi. | 654 // Check if the value is a smi. |
658 __ cmp(r0, Operand(Smi::FromInt(0))); | 655 __ cmp(r0, Operand(Smi::FromInt(0))); |
659 false_target->Branch(eq); | 656 false_target->Branch(eq); |
660 __ tst(r0, Operand(kSmiTagMask)); | 657 __ tst(r0, Operand(kSmiTagMask)); |
661 true_target->Branch(eq); | 658 true_target->Branch(eq); |
662 | 659 |
663 // Slow case: call the runtime. | 660 // Slow case: call the runtime. |
664 frame_->EmitPush(r0); | 661 frame_->EmitPush(r0); |
665 frame_->CallRuntime(Runtime::kToBool, 1); | 662 frame_->CallRuntime(Runtime::kToBool, 1); |
666 // Convert the result (r0) to a condition code. | 663 // Convert the result (r0) to a condition code. |
667 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | 664 __ cmp(r0, Operand(Factory::false_value())); |
668 __ cmp(r0, ip); | |
669 | 665 |
670 cc_reg_ = ne; | 666 cc_reg_ = ne; |
671 } | 667 } |
672 | 668 |
673 | 669 |
674 void CodeGenerator::GenericBinaryOperation(Token::Value op, | 670 void CodeGenerator::GenericBinaryOperation(Token::Value op, |
675 OverwriteMode overwrite_mode, | 671 OverwriteMode overwrite_mode, |
676 int constant_rhs) { | 672 int constant_rhs) { |
677 VirtualFrame::SpilledScope spilled_scope; | 673 VirtualFrame::SpilledScope spilled_scope; |
678 // sp[0] : y | 674 // sp[0] : y |
(...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1182 // Declaration nodes are always declared in only two modes. | 1178 // Declaration nodes are always declared in only two modes. |
1183 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); | 1179 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); |
1184 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; | 1180 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; |
1185 __ mov(r0, Operand(Smi::FromInt(attr))); | 1181 __ mov(r0, Operand(Smi::FromInt(attr))); |
1186 frame_->EmitPush(r0); | 1182 frame_->EmitPush(r0); |
1187 // Push initial value, if any. | 1183 // Push initial value, if any. |
1188 // Note: For variables we must not push an initial value (such as | 1184 // Note: For variables we must not push an initial value (such as |
1189 // 'undefined') because we may have a (legal) redeclaration and we | 1185 // 'undefined') because we may have a (legal) redeclaration and we |
1190 // must not destroy the current value. | 1186 // must not destroy the current value. |
1191 if (node->mode() == Variable::CONST) { | 1187 if (node->mode() == Variable::CONST) { |
1192 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); | 1188 __ mov(r0, Operand(Factory::the_hole_value())); |
1193 frame_->EmitPush(r0); | 1189 frame_->EmitPush(r0); |
1194 } else if (node->fun() != NULL) { | 1190 } else if (node->fun() != NULL) { |
1195 LoadAndSpill(node->fun()); | 1191 LoadAndSpill(node->fun()); |
1196 } else { | 1192 } else { |
1197 __ mov(r0, Operand(0)); // no initial value! | 1193 __ mov(r0, Operand(0)); // no initial value! |
1198 frame_->EmitPush(r0); | 1194 frame_->EmitPush(r0); |
1199 } | 1195 } |
1200 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); | 1196 frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); |
1201 // Ignore the return value (declarations are statements). | 1197 // Ignore the return value (declarations are statements). |
1202 ASSERT(frame_->height() == original_height); | 1198 ASSERT(frame_->height() == original_height); |
(...skipping 519 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1722 JumpTarget entry(JumpTarget::BIDIRECTIONAL); | 1718 JumpTarget entry(JumpTarget::BIDIRECTIONAL); |
1723 JumpTarget end_del_check; | 1719 JumpTarget end_del_check; |
1724 JumpTarget exit; | 1720 JumpTarget exit; |
1725 | 1721 |
1726 // Get the object to enumerate over (converted to JSObject). | 1722 // Get the object to enumerate over (converted to JSObject). |
1727 LoadAndSpill(node->enumerable()); | 1723 LoadAndSpill(node->enumerable()); |
1728 | 1724 |
1729 // Both SpiderMonkey and kjs ignore null and undefined in contrast | 1725 // Both SpiderMonkey and kjs ignore null and undefined in contrast |
1730 // to the specification. 12.6.4 mandates a call to ToObject. | 1726 // to the specification. 12.6.4 mandates a call to ToObject. |
1731 frame_->EmitPop(r0); | 1727 frame_->EmitPop(r0); |
1732 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 1728 __ cmp(r0, Operand(Factory::undefined_value())); |
1733 __ cmp(r0, ip); | |
1734 exit.Branch(eq); | 1729 exit.Branch(eq); |
1735 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 1730 __ cmp(r0, Operand(Factory::null_value())); |
1736 __ cmp(r0, ip); | |
1737 exit.Branch(eq); | 1731 exit.Branch(eq); |
1738 | 1732 |
1739 // Stack layout in body: | 1733 // Stack layout in body: |
1740 // [iteration counter (Smi)] | 1734 // [iteration counter (Smi)] |
1741 // [length of array] | 1735 // [length of array] |
1742 // [FixedArray] | 1736 // [FixedArray] |
1743 // [Map or 0] | 1737 // [Map or 0] |
1744 // [Object] | 1738 // [Object] |
1745 | 1739 |
1746 // Check if enumerable is already a JSObject | 1740 // Check if enumerable is already a JSObject |
(...skipping 11 matching lines...) Expand all Loading... |
1758 jsobject.Bind(); | 1752 jsobject.Bind(); |
1759 // Get the set of properties (as a FixedArray or Map). | 1753 // Get the set of properties (as a FixedArray or Map). |
1760 frame_->EmitPush(r0); // duplicate the object being enumerated | 1754 frame_->EmitPush(r0); // duplicate the object being enumerated |
1761 frame_->EmitPush(r0); | 1755 frame_->EmitPush(r0); |
1762 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); | 1756 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
1763 | 1757 |
1764 // If we got a Map, we can do a fast modification check. | 1758 // If we got a Map, we can do a fast modification check. |
1765 // Otherwise, we got a FixedArray, and we have to do a slow check. | 1759 // Otherwise, we got a FixedArray, and we have to do a slow check. |
1766 __ mov(r2, Operand(r0)); | 1760 __ mov(r2, Operand(r0)); |
1767 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); | 1761 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset)); |
1768 __ LoadRoot(ip, Heap::kMetaMapRootIndex); | 1762 __ cmp(r1, Operand(Factory::meta_map())); |
1769 __ cmp(r1, ip); | |
1770 fixed_array.Branch(ne); | 1763 fixed_array.Branch(ne); |
1771 | 1764 |
1772 // Get enum cache | 1765 // Get enum cache |
1773 __ mov(r1, Operand(r0)); | 1766 __ mov(r1, Operand(r0)); |
1774 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset)); | 1767 __ ldr(r1, FieldMemOperand(r1, Map::kInstanceDescriptorsOffset)); |
1775 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset)); | 1768 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset)); |
1776 __ ldr(r2, | 1769 __ ldr(r2, |
1777 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1770 FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
1778 | 1771 |
1779 frame_->EmitPush(r0); // map | 1772 frame_->EmitPush(r0); // map |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1833 // Convert the entry to a string (or null if it isn't a property anymore). | 1826 // Convert the entry to a string (or null if it isn't a property anymore). |
1834 __ ldr(r0, frame_->ElementAt(4)); // push enumerable | 1827 __ ldr(r0, frame_->ElementAt(4)); // push enumerable |
1835 frame_->EmitPush(r0); | 1828 frame_->EmitPush(r0); |
1836 frame_->EmitPush(r3); // push entry | 1829 frame_->EmitPush(r3); // push entry |
1837 Result arg_count_reg(r0); | 1830 Result arg_count_reg(r0); |
1838 __ mov(r0, Operand(1)); | 1831 __ mov(r0, Operand(1)); |
1839 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, &arg_count_reg, 2); | 1832 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS, &arg_count_reg, 2); |
1840 __ mov(r3, Operand(r0)); | 1833 __ mov(r3, Operand(r0)); |
1841 | 1834 |
1842 // If the property has been removed while iterating, we just skip it. | 1835 // If the property has been removed while iterating, we just skip it. |
1843 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 1836 __ cmp(r3, Operand(Factory::null_value())); |
1844 __ cmp(r3, ip); | |
1845 node->continue_target()->Branch(eq); | 1837 node->continue_target()->Branch(eq); |
1846 | 1838 |
1847 end_del_check.Bind(); | 1839 end_del_check.Bind(); |
1848 // Store the entry in the 'each' expression and take another spin in the | 1840 // Store the entry in the 'each' expression and take another spin in the |
1849 // loop. r3: i'th entry of the enum cache (or string there of) | 1841 // loop. r3: i'th entry of the enum cache (or string there of) |
1850 frame_->EmitPush(r3); // push entry | 1842 frame_->EmitPush(r3); // push entry |
1851 { Reference each(this, node->each()); | 1843 { Reference each(this, node->each()); |
1852 if (!each.is_illegal()) { | 1844 if (!each.is_illegal()) { |
1853 if (each.size() > 0) { | 1845 if (each.size() > 0) { |
1854 __ ldr(r0, frame_->ElementAt(each.size())); | 1846 __ ldr(r0, frame_->ElementAt(each.size())); |
(...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2094 if (has_valid_frame()) { | 2086 if (has_valid_frame()) { |
2095 // The next handler address is on top of the frame. | 2087 // The next handler address is on top of the frame. |
2096 ASSERT(StackHandlerConstants::kNextOffset == 0); | 2088 ASSERT(StackHandlerConstants::kNextOffset == 0); |
2097 frame_->EmitPop(r1); | 2089 frame_->EmitPop(r1); |
2098 __ mov(r3, Operand(handler_address)); | 2090 __ mov(r3, Operand(handler_address)); |
2099 __ str(r1, MemOperand(r3)); | 2091 __ str(r1, MemOperand(r3)); |
2100 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 2092 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
2101 | 2093 |
2102 // Fake a top of stack value (unneeded when FALLING) and set the | 2094 // Fake a top of stack value (unneeded when FALLING) and set the |
2103 // state in r2, then jump around the unlink blocks if any. | 2095 // state in r2, then jump around the unlink blocks if any. |
2104 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 2096 __ mov(r0, Operand(Factory::undefined_value())); |
2105 frame_->EmitPush(r0); | 2097 frame_->EmitPush(r0); |
2106 __ mov(r2, Operand(Smi::FromInt(FALLING))); | 2098 __ mov(r2, Operand(Smi::FromInt(FALLING))); |
2107 if (nof_unlinks > 0) { | 2099 if (nof_unlinks > 0) { |
2108 finally_block.Jump(); | 2100 finally_block.Jump(); |
2109 } | 2101 } |
2110 } | 2102 } |
2111 | 2103 |
2112 // Generate code to unlink and set the state for the (formerly) | 2104 // Generate code to unlink and set the state for the (formerly) |
2113 // shadowing targets that have been jumped to. | 2105 // shadowing targets that have been jumped to. |
2114 for (int i = 0; i < shadows.length(); i++) { | 2106 for (int i = 0; i < shadows.length(); i++) { |
(...skipping 21 matching lines...) Expand all Loading... |
2136 frame_->EmitPop(r1); | 2128 frame_->EmitPop(r1); |
2137 __ str(r1, MemOperand(r3)); | 2129 __ str(r1, MemOperand(r3)); |
2138 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); | 2130 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); |
2139 | 2131 |
2140 if (i == kReturnShadowIndex) { | 2132 if (i == kReturnShadowIndex) { |
2141 // If this label shadowed the function return, materialize the | 2133 // If this label shadowed the function return, materialize the |
2142 // return value on the stack. | 2134 // return value on the stack. |
2143 frame_->EmitPush(r0); | 2135 frame_->EmitPush(r0); |
2144 } else { | 2136 } else { |
2145 // Fake TOS for targets that shadowed breaks and continues. | 2137 // Fake TOS for targets that shadowed breaks and continues. |
2146 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 2138 __ mov(r0, Operand(Factory::undefined_value())); |
2147 frame_->EmitPush(r0); | 2139 frame_->EmitPush(r0); |
2148 } | 2140 } |
2149 __ mov(r2, Operand(Smi::FromInt(JUMPING + i))); | 2141 __ mov(r2, Operand(Smi::FromInt(JUMPING + i))); |
2150 if (--nof_unlinks > 0) { | 2142 if (--nof_unlinks > 0) { |
2151 // If this is not the last unlink block, jump around the next. | 2143 // If this is not the last unlink block, jump around the next. |
2152 finally_block.Jump(); | 2144 finally_block.Jump(); |
2153 } | 2145 } |
2154 } | 2146 } |
2155 } | 2147 } |
2156 | 2148 |
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2323 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); | 2315 Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot(); |
2324 // Only generate the fast case for locals that rewrite to slots. | 2316 // Only generate the fast case for locals that rewrite to slots. |
2325 // This rules out argument loads. | 2317 // This rules out argument loads. |
2326 if (potential_slot != NULL) { | 2318 if (potential_slot != NULL) { |
2327 __ ldr(r0, | 2319 __ ldr(r0, |
2328 ContextSlotOperandCheckExtensions(potential_slot, | 2320 ContextSlotOperandCheckExtensions(potential_slot, |
2329 r1, | 2321 r1, |
2330 r2, | 2322 r2, |
2331 &slow)); | 2323 &slow)); |
2332 if (potential_slot->var()->mode() == Variable::CONST) { | 2324 if (potential_slot->var()->mode() == Variable::CONST) { |
2333 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 2325 __ cmp(r0, Operand(Factory::the_hole_value())); |
2334 __ cmp(r0, ip); | 2326 __ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq); |
2335 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
2336 } | 2327 } |
2337 // There is always control flow to slow from | 2328 // There is always control flow to slow from |
2338 // ContextSlotOperandCheckExtensions so we have to jump around | 2329 // ContextSlotOperandCheckExtensions so we have to jump around |
2339 // it. | 2330 // it. |
2340 done.Jump(); | 2331 done.Jump(); |
2341 } | 2332 } |
2342 } | 2333 } |
2343 | 2334 |
2344 slow.Bind(); | 2335 slow.Bind(); |
2345 frame_->EmitPush(cp); | 2336 frame_->EmitPush(cp); |
(...skipping 16 matching lines...) Expand all Loading... |
2362 | 2353 |
2363 // Special handling for locals allocated in registers. | 2354 // Special handling for locals allocated in registers. |
2364 __ ldr(r0, SlotOperand(slot, r2)); | 2355 __ ldr(r0, SlotOperand(slot, r2)); |
2365 frame_->EmitPush(r0); | 2356 frame_->EmitPush(r0); |
2366 if (slot->var()->mode() == Variable::CONST) { | 2357 if (slot->var()->mode() == Variable::CONST) { |
2367 // Const slots may contain 'the hole' value (the constant hasn't been | 2358 // Const slots may contain 'the hole' value (the constant hasn't been |
2368 // initialized yet) which needs to be converted into the 'undefined' | 2359 // initialized yet) which needs to be converted into the 'undefined' |
2369 // value. | 2360 // value. |
2370 Comment cmnt(masm_, "[ Unhole const"); | 2361 Comment cmnt(masm_, "[ Unhole const"); |
2371 frame_->EmitPop(r0); | 2362 frame_->EmitPop(r0); |
2372 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 2363 __ cmp(r0, Operand(Factory::the_hole_value())); |
2373 __ cmp(r0, ip); | 2364 __ mov(r0, Operand(Factory::undefined_value()), LeaveCC, eq); |
2374 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | |
2375 frame_->EmitPush(r0); | 2365 frame_->EmitPush(r0); |
2376 } | 2366 } |
2377 } | 2367 } |
2378 } | 2368 } |
2379 | 2369 |
2380 | 2370 |
2381 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, | 2371 void CodeGenerator::LoadFromGlobalSlotCheckExtensions(Slot* slot, |
2382 TypeofState typeof_state, | 2372 TypeofState typeof_state, |
2383 Register tmp, | 2373 Register tmp, |
2384 Register tmp2, | 2374 Register tmp2, |
(...skipping 22 matching lines...) Expand all Loading... |
2407 } | 2397 } |
2408 | 2398 |
2409 if (s->is_eval_scope()) { | 2399 if (s->is_eval_scope()) { |
2410 Label next, fast; | 2400 Label next, fast; |
2411 if (!context.is(tmp)) { | 2401 if (!context.is(tmp)) { |
2412 __ mov(tmp, Operand(context)); | 2402 __ mov(tmp, Operand(context)); |
2413 } | 2403 } |
2414 __ bind(&next); | 2404 __ bind(&next); |
2415 // Terminate at global context. | 2405 // Terminate at global context. |
2416 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset)); | 2406 __ ldr(tmp2, FieldMemOperand(tmp, HeapObject::kMapOffset)); |
2417 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex); | 2407 __ cmp(tmp2, Operand(Factory::global_context_map())); |
2418 __ cmp(tmp2, ip); | |
2419 __ b(eq, &fast); | 2408 __ b(eq, &fast); |
2420 // Check that extension is NULL. | 2409 // Check that extension is NULL. |
2421 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); | 2410 __ ldr(tmp2, ContextOperand(tmp, Context::EXTENSION_INDEX)); |
2422 __ tst(tmp2, tmp2); | 2411 __ tst(tmp2, tmp2); |
2423 slow->Branch(ne); | 2412 slow->Branch(ne); |
2424 // Load next context in chain. | 2413 // Load next context in chain. |
2425 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); | 2414 __ ldr(tmp, ContextOperand(tmp, Context::CLOSURE_INDEX)); |
2426 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); | 2415 __ ldr(tmp, FieldMemOperand(tmp, JSFunction::kContextOffset)); |
2427 __ b(&next); | 2416 __ b(&next); |
2428 __ bind(&fast); | 2417 __ bind(&fast); |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2505 | 2494 |
2506 // Load the literals array of the function. | 2495 // Load the literals array of the function. |
2507 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); | 2496 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
2508 | 2497 |
2509 // Load the literal at the ast saved index. | 2498 // Load the literal at the ast saved index. |
2510 int literal_offset = | 2499 int literal_offset = |
2511 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; | 2500 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
2512 __ ldr(r2, FieldMemOperand(r1, literal_offset)); | 2501 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
2513 | 2502 |
2514 JumpTarget done; | 2503 JumpTarget done; |
2515 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 2504 __ cmp(r2, Operand(Factory::undefined_value())); |
2516 __ cmp(r2, ip); | |
2517 done.Branch(ne); | 2505 done.Branch(ne); |
2518 | 2506 |
2519 // If the entry is undefined we call the runtime system to computed | 2507 // If the entry is undefined we call the runtime system to computed |
2520 // the literal. | 2508 // the literal. |
2521 frame_->EmitPush(r1); // literal array (0) | 2509 frame_->EmitPush(r1); // literal array (0) |
2522 __ mov(r0, Operand(Smi::FromInt(node->literal_index()))); | 2510 __ mov(r0, Operand(Smi::FromInt(node->literal_index()))); |
2523 frame_->EmitPush(r0); // literal index (1) | 2511 frame_->EmitPush(r0); // literal index (1) |
2524 __ mov(r0, Operand(node->pattern())); // RegExp pattern (2) | 2512 __ mov(r0, Operand(node->pattern())); // RegExp pattern (2) |
2525 frame_->EmitPush(r0); | 2513 frame_->EmitPush(r0); |
2526 __ mov(r0, Operand(node->flags())); // RegExp flags (3) | 2514 __ mov(r0, Operand(node->flags())); // RegExp flags (3) |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2588 // Load the literals array of the function. | 2576 // Load the literals array of the function. |
2589 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); | 2577 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
2590 | 2578 |
2591 // Load the literal at the ast saved index. | 2579 // Load the literal at the ast saved index. |
2592 int literal_offset = | 2580 int literal_offset = |
2593 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; | 2581 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
2594 __ ldr(r2, FieldMemOperand(r1, literal_offset)); | 2582 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
2595 | 2583 |
2596 // Check whether we need to materialize the object literal boilerplate. | 2584 // Check whether we need to materialize the object literal boilerplate. |
2597 // If so, jump to the deferred code. | 2585 // If so, jump to the deferred code. |
2598 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 2586 __ cmp(r2, Operand(Factory::undefined_value())); |
2599 __ cmp(r2, Operand(ip)); | |
2600 deferred->Branch(eq); | 2587 deferred->Branch(eq); |
2601 deferred->BindExit(); | 2588 deferred->BindExit(); |
2602 | 2589 |
2603 // Push the object literal boilerplate. | 2590 // Push the object literal boilerplate. |
2604 frame_->EmitPush(r2); | 2591 frame_->EmitPush(r2); |
2605 | 2592 |
2606 // Clone the boilerplate object. | 2593 // Clone the boilerplate object. |
2607 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; | 2594 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; |
2608 if (node->depth() == 1) { | 2595 if (node->depth() == 1) { |
2609 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; | 2596 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2711 // Load the literals array of the function. | 2698 // Load the literals array of the function. |
2712 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); | 2699 __ ldr(r1, FieldMemOperand(r1, JSFunction::kLiteralsOffset)); |
2713 | 2700 |
2714 // Load the literal at the ast saved index. | 2701 // Load the literal at the ast saved index. |
2715 int literal_offset = | 2702 int literal_offset = |
2716 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; | 2703 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; |
2717 __ ldr(r2, FieldMemOperand(r1, literal_offset)); | 2704 __ ldr(r2, FieldMemOperand(r1, literal_offset)); |
2718 | 2705 |
2719 // Check whether we need to materialize the object literal boilerplate. | 2706 // Check whether we need to materialize the object literal boilerplate. |
2720 // If so, jump to the deferred code. | 2707 // If so, jump to the deferred code. |
2721 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 2708 __ cmp(r2, Operand(Factory::undefined_value())); |
2722 __ cmp(r2, Operand(ip)); | |
2723 deferred->Branch(eq); | 2709 deferred->Branch(eq); |
2724 deferred->BindExit(); | 2710 deferred->BindExit(); |
2725 | 2711 |
2726 // Push the object literal boilerplate. | 2712 // Push the object literal boilerplate. |
2727 frame_->EmitPush(r2); | 2713 frame_->EmitPush(r2); |
2728 | 2714 |
2729 // Clone the boilerplate object. | 2715 // Clone the boilerplate object. |
2730 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; | 2716 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; |
2731 if (node->depth() == 1) { | 2717 if (node->depth() == 1) { |
2732 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; | 2718 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; |
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3043 // the function we need to call and the receiver of the call. | 3029 // the function we need to call and the receiver of the call. |
3044 // Then we call the resolved function using the given arguments. | 3030 // Then we call the resolved function using the given arguments. |
3045 | 3031 |
3046 ZoneList<Expression*>* args = node->arguments(); | 3032 ZoneList<Expression*>* args = node->arguments(); |
3047 Expression* function = node->expression(); | 3033 Expression* function = node->expression(); |
3048 | 3034 |
3049 CodeForStatementPosition(node); | 3035 CodeForStatementPosition(node); |
3050 | 3036 |
3051 // Prepare stack for call to resolved function. | 3037 // Prepare stack for call to resolved function. |
3052 LoadAndSpill(function); | 3038 LoadAndSpill(function); |
3053 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 3039 __ mov(r2, Operand(Factory::undefined_value())); |
3054 frame_->EmitPush(r2); // Slot for receiver | 3040 frame_->EmitPush(r2); // Slot for receiver |
3055 int arg_count = args->length(); | 3041 int arg_count = args->length(); |
3056 for (int i = 0; i < arg_count; i++) { | 3042 for (int i = 0; i < arg_count; i++) { |
3057 LoadAndSpill(args->at(i)); | 3043 LoadAndSpill(args->at(i)); |
3058 } | 3044 } |
3059 | 3045 |
3060 // Prepare stack for call to ResolvePossiblyDirectEval. | 3046 // Prepare stack for call to ResolvePossiblyDirectEval. |
3061 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize)); | 3047 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize + kPointerSize)); |
3062 frame_->EmitPush(r1); | 3048 frame_->EmitPush(r1); |
3063 if (arg_count > 0) { | 3049 if (arg_count > 0) { |
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3187 leave.Jump(); | 3173 leave.Jump(); |
3188 | 3174 |
3189 // Objects with a non-function constructor have class 'Object'. | 3175 // Objects with a non-function constructor have class 'Object'. |
3190 non_function_constructor.Bind(); | 3176 non_function_constructor.Bind(); |
3191 __ mov(r0, Operand(Factory::Object_symbol())); | 3177 __ mov(r0, Operand(Factory::Object_symbol())); |
3192 frame_->EmitPush(r0); | 3178 frame_->EmitPush(r0); |
3193 leave.Jump(); | 3179 leave.Jump(); |
3194 | 3180 |
3195 // Non-JS objects have class null. | 3181 // Non-JS objects have class null. |
3196 null.Bind(); | 3182 null.Bind(); |
3197 __ LoadRoot(r0, Heap::kNullValueRootIndex); | 3183 __ mov(r0, Operand(Factory::null_value())); |
3198 frame_->EmitPush(r0); | 3184 frame_->EmitPush(r0); |
3199 | 3185 |
3200 // All done. | 3186 // All done. |
3201 leave.Bind(); | 3187 leave.Bind(); |
3202 } | 3188 } |
3203 | 3189 |
3204 | 3190 |
3205 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { | 3191 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { |
3206 VirtualFrame::SpilledScope spilled_scope; | 3192 VirtualFrame::SpilledScope spilled_scope; |
3207 ASSERT(args->length() == 1); | 3193 ASSERT(args->length() == 1); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3260 VirtualFrame::SpilledScope spilled_scope; | 3246 VirtualFrame::SpilledScope spilled_scope; |
3261 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc. | 3247 // See comment in CodeGenerator::GenerateLog in codegen-ia32.cc. |
3262 ASSERT_EQ(args->length(), 3); | 3248 ASSERT_EQ(args->length(), 3); |
3263 #ifdef ENABLE_LOGGING_AND_PROFILING | 3249 #ifdef ENABLE_LOGGING_AND_PROFILING |
3264 if (ShouldGenerateLog(args->at(0))) { | 3250 if (ShouldGenerateLog(args->at(0))) { |
3265 LoadAndSpill(args->at(1)); | 3251 LoadAndSpill(args->at(1)); |
3266 LoadAndSpill(args->at(2)); | 3252 LoadAndSpill(args->at(2)); |
3267 __ CallRuntime(Runtime::kLog, 2); | 3253 __ CallRuntime(Runtime::kLog, 2); |
3268 } | 3254 } |
3269 #endif | 3255 #endif |
3270 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 3256 __ mov(r0, Operand(Factory::undefined_value())); |
3271 frame_->EmitPush(r0); | 3257 frame_->EmitPush(r0); |
3272 } | 3258 } |
3273 | 3259 |
3274 | 3260 |
3275 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { | 3261 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { |
3276 VirtualFrame::SpilledScope spilled_scope; | 3262 VirtualFrame::SpilledScope spilled_scope; |
3277 ASSERT(args->length() == 1); | 3263 ASSERT(args->length() == 1); |
3278 LoadAndSpill(args->at(0)); | 3264 LoadAndSpill(args->at(0)); |
3279 frame_->EmitPop(r0); | 3265 frame_->EmitPop(r0); |
3280 __ tst(r0, Operand(kSmiTagMask | 0x80000000u)); | 3266 __ tst(r0, Operand(kSmiTagMask | 0x80000000u)); |
3281 cc_reg_ = eq; | 3267 cc_reg_ = eq; |
3282 } | 3268 } |
3283 | 3269 |
3284 | 3270 |
3285 // This should generate code that performs a charCodeAt() call or returns | 3271 // This should generate code that performs a charCodeAt() call or returns |
3286 // undefined in order to trigger the slow case, Runtime_StringCharCodeAt. | 3272 // undefined in order to trigger the slow case, Runtime_StringCharCodeAt. |
3287 // It is not yet implemented on ARM, so it always goes to the slow case. | 3273 // It is not yet implemented on ARM, so it always goes to the slow case. |
3288 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { | 3274 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { |
3289 VirtualFrame::SpilledScope spilled_scope; | 3275 VirtualFrame::SpilledScope spilled_scope; |
3290 ASSERT(args->length() == 2); | 3276 ASSERT(args->length() == 2); |
3291 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 3277 __ mov(r0, Operand(Factory::undefined_value())); |
3292 frame_->EmitPush(r0); | 3278 frame_->EmitPush(r0); |
3293 } | 3279 } |
3294 | 3280 |
3295 | 3281 |
3296 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { | 3282 void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { |
3297 VirtualFrame::SpilledScope spilled_scope; | 3283 VirtualFrame::SpilledScope spilled_scope; |
3298 ASSERT(args->length() == 1); | 3284 ASSERT(args->length() == 1); |
3299 LoadAndSpill(args->at(0)); | 3285 LoadAndSpill(args->at(0)); |
3300 JumpTarget answer; | 3286 JumpTarget answer; |
3301 // We need the CC bits to come out as not_equal in the case where the | 3287 // We need the CC bits to come out as not_equal in the case where the |
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3501 frame_->EmitPush(r0); | 3487 frame_->EmitPush(r0); |
3502 __ mov(r0, Operand(variable->name())); | 3488 __ mov(r0, Operand(variable->name())); |
3503 frame_->EmitPush(r0); | 3489 frame_->EmitPush(r0); |
3504 Result arg_count(r0); | 3490 Result arg_count(r0); |
3505 __ mov(r0, Operand(1)); // not counting receiver | 3491 __ mov(r0, Operand(1)); // not counting receiver |
3506 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2); | 3492 frame_->InvokeBuiltin(Builtins::DELETE, CALL_JS, &arg_count, 2); |
3507 | 3493 |
3508 } else { | 3494 } else { |
3509 // Default: Result of deleting non-global, not dynamically | 3495 // Default: Result of deleting non-global, not dynamically |
3510 // introduced variables is false. | 3496 // introduced variables is false. |
3511 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | 3497 __ mov(r0, Operand(Factory::false_value())); |
3512 } | 3498 } |
3513 | 3499 |
3514 } else { | 3500 } else { |
3515 // Default: Result of deleting expressions is true. | 3501 // Default: Result of deleting expressions is true. |
3516 LoadAndSpill(node->expression()); // may have side-effects | 3502 LoadAndSpill(node->expression()); // may have side-effects |
3517 frame_->Drop(); | 3503 frame_->Drop(); |
3518 __ LoadRoot(r0, Heap::kTrueValueRootIndex); | 3504 __ mov(r0, Operand(Factory::true_value())); |
3519 } | 3505 } |
3520 frame_->EmitPush(r0); | 3506 frame_->EmitPush(r0); |
3521 | 3507 |
3522 } else if (op == Token::TYPEOF) { | 3508 } else if (op == Token::TYPEOF) { |
3523 // Special case for loading the typeof expression; see comment on | 3509 // Special case for loading the typeof expression; see comment on |
3524 // LoadTypeofExpression(). | 3510 // LoadTypeofExpression(). |
3525 LoadTypeofExpression(node->expression()); | 3511 LoadTypeofExpression(node->expression()); |
3526 frame_->CallRuntime(Runtime::kTypeof, 1); | 3512 frame_->CallRuntime(Runtime::kTypeof, 1); |
3527 frame_->EmitPush(r0); // r0 has result | 3513 frame_->EmitPush(r0); // r0 has result |
3528 | 3514 |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3561 smi_label.Bind(); | 3547 smi_label.Bind(); |
3562 __ mvn(r0, Operand(r0)); | 3548 __ mvn(r0, Operand(r0)); |
3563 __ bic(r0, r0, Operand(kSmiTagMask)); // bit-clear inverted smi-tag | 3549 __ bic(r0, r0, Operand(kSmiTagMask)); // bit-clear inverted smi-tag |
3564 continue_label.Bind(); | 3550 continue_label.Bind(); |
3565 break; | 3551 break; |
3566 } | 3552 } |
3567 | 3553 |
3568 case Token::VOID: | 3554 case Token::VOID: |
3569 // since the stack top is cached in r0, popping and then | 3555 // since the stack top is cached in r0, popping and then |
3570 // pushing a value can be done by just writing to r0. | 3556 // pushing a value can be done by just writing to r0. |
3571 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 3557 __ mov(r0, Operand(Factory::undefined_value())); |
3572 break; | 3558 break; |
3573 | 3559 |
3574 case Token::ADD: { | 3560 case Token::ADD: { |
3575 // Smi check. | 3561 // Smi check. |
3576 JumpTarget continue_label; | 3562 JumpTarget continue_label; |
3577 __ tst(r0, Operand(kSmiTagMask)); | 3563 __ tst(r0, Operand(kSmiTagMask)); |
3578 continue_label.Branch(eq); | 3564 continue_label.Branch(eq); |
3579 frame_->EmitPush(r0); | 3565 frame_->EmitPush(r0); |
3580 Result arg_count(r0); | 3566 Result arg_count(r0); |
3581 __ mov(r0, Operand(0)); // not counting receiver | 3567 __ mov(r0, Operand(0)); // not counting receiver |
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3887 // equality. | 3873 // equality. |
3888 if (op == Token::EQ || op == Token::EQ_STRICT) { | 3874 if (op == Token::EQ || op == Token::EQ_STRICT) { |
3889 bool left_is_null = | 3875 bool left_is_null = |
3890 left->AsLiteral() != NULL && left->AsLiteral()->IsNull(); | 3876 left->AsLiteral() != NULL && left->AsLiteral()->IsNull(); |
3891 bool right_is_null = | 3877 bool right_is_null = |
3892 right->AsLiteral() != NULL && right->AsLiteral()->IsNull(); | 3878 right->AsLiteral() != NULL && right->AsLiteral()->IsNull(); |
3893 // The 'null' value can only be equal to 'null' or 'undefined'. | 3879 // The 'null' value can only be equal to 'null' or 'undefined'. |
3894 if (left_is_null || right_is_null) { | 3880 if (left_is_null || right_is_null) { |
3895 LoadAndSpill(left_is_null ? right : left); | 3881 LoadAndSpill(left_is_null ? right : left); |
3896 frame_->EmitPop(r0); | 3882 frame_->EmitPop(r0); |
3897 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 3883 __ cmp(r0, Operand(Factory::null_value())); |
3898 __ cmp(r0, ip); | |
3899 | 3884 |
3900 // The 'null' value is only equal to 'undefined' if using non-strict | 3885 // The 'null' value is only equal to 'undefined' if using non-strict |
3901 // comparisons. | 3886 // comparisons. |
3902 if (op != Token::EQ_STRICT) { | 3887 if (op != Token::EQ_STRICT) { |
3903 true_target()->Branch(eq); | 3888 true_target()->Branch(eq); |
3904 | 3889 |
3905 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 3890 __ cmp(r0, Operand(Factory::undefined_value())); |
3906 __ cmp(r0, Operand(ip)); | |
3907 true_target()->Branch(eq); | 3891 true_target()->Branch(eq); |
3908 | 3892 |
3909 __ tst(r0, Operand(kSmiTagMask)); | 3893 __ tst(r0, Operand(kSmiTagMask)); |
3910 false_target()->Branch(eq); | 3894 false_target()->Branch(eq); |
3911 | 3895 |
3912 // It can be an undetectable object. | 3896 // It can be an undetectable object. |
3913 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | 3897 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
3914 __ ldrb(r0, FieldMemOperand(r0, Map::kBitFieldOffset)); | 3898 __ ldrb(r0, FieldMemOperand(r0, Map::kBitFieldOffset)); |
3915 __ and_(r0, r0, Operand(1 << Map::kIsUndetectable)); | 3899 __ and_(r0, r0, Operand(1 << Map::kIsUndetectable)); |
3916 __ cmp(r0, Operand(1 << Map::kIsUndetectable)); | 3900 __ cmp(r0, Operand(1 << Map::kIsUndetectable)); |
(...skipping 16 matching lines...) Expand all Loading... |
3933 Handle<String> check(String::cast(*right->AsLiteral()->handle())); | 3917 Handle<String> check(String::cast(*right->AsLiteral()->handle())); |
3934 | 3918 |
3935 // Load the operand, move it to register r1. | 3919 // Load the operand, move it to register r1. |
3936 LoadTypeofExpression(operation->expression()); | 3920 LoadTypeofExpression(operation->expression()); |
3937 frame_->EmitPop(r1); | 3921 frame_->EmitPop(r1); |
3938 | 3922 |
3939 if (check->Equals(Heap::number_symbol())) { | 3923 if (check->Equals(Heap::number_symbol())) { |
3940 __ tst(r1, Operand(kSmiTagMask)); | 3924 __ tst(r1, Operand(kSmiTagMask)); |
3941 true_target()->Branch(eq); | 3925 true_target()->Branch(eq); |
3942 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3926 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
3943 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 3927 __ cmp(r1, Operand(Factory::heap_number_map())); |
3944 __ cmp(r1, ip); | |
3945 cc_reg_ = eq; | 3928 cc_reg_ = eq; |
3946 | 3929 |
3947 } else if (check->Equals(Heap::string_symbol())) { | 3930 } else if (check->Equals(Heap::string_symbol())) { |
3948 __ tst(r1, Operand(kSmiTagMask)); | 3931 __ tst(r1, Operand(kSmiTagMask)); |
3949 false_target()->Branch(eq); | 3932 false_target()->Branch(eq); |
3950 | 3933 |
3951 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3934 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
3952 | 3935 |
3953 // It can be an undetectable string object. | 3936 // It can be an undetectable string object. |
3954 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); | 3937 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); |
3955 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); | 3938 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); |
3956 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); | 3939 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); |
3957 false_target()->Branch(eq); | 3940 false_target()->Branch(eq); |
3958 | 3941 |
3959 __ ldrb(r2, FieldMemOperand(r1, Map::kInstanceTypeOffset)); | 3942 __ ldrb(r2, FieldMemOperand(r1, Map::kInstanceTypeOffset)); |
3960 __ cmp(r2, Operand(FIRST_NONSTRING_TYPE)); | 3943 __ cmp(r2, Operand(FIRST_NONSTRING_TYPE)); |
3961 cc_reg_ = lt; | 3944 cc_reg_ = lt; |
3962 | 3945 |
3963 } else if (check->Equals(Heap::boolean_symbol())) { | 3946 } else if (check->Equals(Heap::boolean_symbol())) { |
3964 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 3947 __ cmp(r1, Operand(Factory::true_value())); |
3965 __ cmp(r1, ip); | |
3966 true_target()->Branch(eq); | 3948 true_target()->Branch(eq); |
3967 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | 3949 __ cmp(r1, Operand(Factory::false_value())); |
3968 __ cmp(r1, ip); | |
3969 cc_reg_ = eq; | 3950 cc_reg_ = eq; |
3970 | 3951 |
3971 } else if (check->Equals(Heap::undefined_symbol())) { | 3952 } else if (check->Equals(Heap::undefined_symbol())) { |
3972 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 3953 __ cmp(r1, Operand(Factory::undefined_value())); |
3973 __ cmp(r1, ip); | |
3974 true_target()->Branch(eq); | 3954 true_target()->Branch(eq); |
3975 | 3955 |
3976 __ tst(r1, Operand(kSmiTagMask)); | 3956 __ tst(r1, Operand(kSmiTagMask)); |
3977 false_target()->Branch(eq); | 3957 false_target()->Branch(eq); |
3978 | 3958 |
3979 // It can be an undetectable object. | 3959 // It can be an undetectable object. |
3980 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3960 __ ldr(r1, FieldMemOperand(r1, HeapObject::kMapOffset)); |
3981 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); | 3961 __ ldrb(r2, FieldMemOperand(r1, Map::kBitFieldOffset)); |
3982 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); | 3962 __ and_(r2, r2, Operand(1 << Map::kIsUndetectable)); |
3983 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); | 3963 __ cmp(r2, Operand(1 << Map::kIsUndetectable)); |
3984 | 3964 |
3985 cc_reg_ = eq; | 3965 cc_reg_ = eq; |
3986 | 3966 |
3987 } else if (check->Equals(Heap::function_symbol())) { | 3967 } else if (check->Equals(Heap::function_symbol())) { |
3988 __ tst(r1, Operand(kSmiTagMask)); | 3968 __ tst(r1, Operand(kSmiTagMask)); |
3989 false_target()->Branch(eq); | 3969 false_target()->Branch(eq); |
3990 __ CompareObjectType(r1, r1, r1, JS_FUNCTION_TYPE); | 3970 __ CompareObjectType(r1, r1, r1, JS_FUNCTION_TYPE); |
3991 cc_reg_ = eq; | 3971 cc_reg_ = eq; |
3992 | 3972 |
3993 } else if (check->Equals(Heap::object_symbol())) { | 3973 } else if (check->Equals(Heap::object_symbol())) { |
3994 __ tst(r1, Operand(kSmiTagMask)); | 3974 __ tst(r1, Operand(kSmiTagMask)); |
3995 false_target()->Branch(eq); | 3975 false_target()->Branch(eq); |
3996 | 3976 |
3997 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); | 3977 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); |
3998 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 3978 __ cmp(r1, Operand(Factory::null_value())); |
3999 __ cmp(r1, ip); | |
4000 true_target()->Branch(eq); | 3979 true_target()->Branch(eq); |
4001 | 3980 |
4002 // It can be an undetectable object. | 3981 // It can be an undetectable object. |
4003 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); | 3982 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); |
4004 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable)); | 3983 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable)); |
4005 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); | 3984 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); |
4006 false_target()->Branch(eq); | 3985 false_target()->Branch(eq); |
4007 | 3986 |
4008 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | 3987 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
4009 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE)); | 3988 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE)); |
(...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4220 ASSERT(!slot->var()->is_dynamic()); | 4199 ASSERT(!slot->var()->is_dynamic()); |
4221 | 4200 |
4222 JumpTarget exit; | 4201 JumpTarget exit; |
4223 if (init_state == CONST_INIT) { | 4202 if (init_state == CONST_INIT) { |
4224 ASSERT(slot->var()->mode() == Variable::CONST); | 4203 ASSERT(slot->var()->mode() == Variable::CONST); |
4225 // Only the first const initialization must be executed (the slot | 4204 // Only the first const initialization must be executed (the slot |
4226 // still contains 'the hole' value). When the assignment is | 4205 // still contains 'the hole' value). When the assignment is |
4227 // executed, the code is identical to a normal store (see below). | 4206 // executed, the code is identical to a normal store (see below). |
4228 Comment cmnt(masm, "[ Init const"); | 4207 Comment cmnt(masm, "[ Init const"); |
4229 __ ldr(r2, cgen_->SlotOperand(slot, r2)); | 4208 __ ldr(r2, cgen_->SlotOperand(slot, r2)); |
4230 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 4209 __ cmp(r2, Operand(Factory::the_hole_value())); |
4231 __ cmp(r2, ip); | |
4232 exit.Branch(ne); | 4210 exit.Branch(ne); |
4233 } | 4211 } |
4234 | 4212 |
4235 // We must execute the store. Storing a variable must keep the | 4213 // We must execute the store. Storing a variable must keep the |
4236 // (new) value on the stack. This is necessary for compiling | 4214 // (new) value on the stack. This is necessary for compiling |
4237 // assignment expressions. | 4215 // assignment expressions. |
4238 // | 4216 // |
4239 // Note: We will reach here even with slot->var()->mode() == | 4217 // Note: We will reach here even with slot->var()->mode() == |
4240 // Variable::CONST because of const declarations which will | 4218 // Variable::CONST because of const declarations which will |
4241 // initialize consts to 'the hole' value and by doing so, end up | 4219 // initialize consts to 'the hole' value and by doing so, end up |
(...skipping 712 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4954 __ add(result_reg, result_reg, Operand(HeapNumber::kSize)); | 4932 __ add(result_reg, result_reg, Operand(HeapNumber::kSize)); |
4955 // Compare new new allocation top and limit. | 4933 // Compare new new allocation top and limit. |
4956 __ cmp(result_reg, Operand(scratch2)); | 4934 __ cmp(result_reg, Operand(scratch2)); |
4957 // Branch if out of space in young generation. | 4935 // Branch if out of space in young generation. |
4958 __ b(hi, need_gc); | 4936 __ b(hi, need_gc); |
4959 // Store new allocation top. | 4937 // Store new allocation top. |
4960 __ str(result_reg, MemOperand(allocation_top_addr_reg)); // store new top | 4938 __ str(result_reg, MemOperand(allocation_top_addr_reg)); // store new top |
4961 // Tag and adjust back to start of new object. | 4939 // Tag and adjust back to start of new object. |
4962 __ sub(result_reg, result_reg, Operand(HeapNumber::kSize - kHeapObjectTag)); | 4940 __ sub(result_reg, result_reg, Operand(HeapNumber::kSize - kHeapObjectTag)); |
4963 // Get heap number map into scratch2. | 4941 // Get heap number map into scratch2. |
4964 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex); | 4942 __ mov(scratch2, Operand(Factory::heap_number_map())); |
4965 // Store heap number map in new object. | 4943 // Store heap number map in new object. |
4966 __ str(scratch2, FieldMemOperand(result_reg, HeapObject::kMapOffset)); | 4944 __ str(scratch2, FieldMemOperand(result_reg, HeapObject::kMapOffset)); |
4967 } | 4945 } |
4968 | 4946 |
4969 | 4947 |
4970 // We fall into this code if the operands were Smis, but the result was | 4948 // We fall into this code if the operands were Smis, but the result was |
4971 // not (eg. overflow). We branch into this code (to the not_smi label) if | 4949 // not (eg. overflow). We branch into this code (to the not_smi label) if |
4972 // the operands were not both Smi. The operands are in r0 and r1. In order | 4950 // the operands were not both Smi. The operands are in r0 and r1. In order |
4973 // to call the C-implemented binary fp operation routines we need to end up | 4951 // to call the C-implemented binary fp operation routines we need to end up |
4974 // with the double precision floating point operands in r0 and r1 (for the | 4952 // with the double precision floating point operands in r0 and r1 (for the |
(...skipping 1125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6100 __ b(gt, &slow); | 6078 __ b(gt, &slow); |
6101 | 6079 |
6102 // Register mapping: r3 is object map and r4 is function prototype. | 6080 // Register mapping: r3 is object map and r4 is function prototype. |
6103 // Get prototype of object into r2. | 6081 // Get prototype of object into r2. |
6104 __ ldr(r2, FieldMemOperand(r3, Map::kPrototypeOffset)); | 6082 __ ldr(r2, FieldMemOperand(r3, Map::kPrototypeOffset)); |
6105 | 6083 |
6106 // Loop through the prototype chain looking for the function prototype. | 6084 // Loop through the prototype chain looking for the function prototype. |
6107 __ bind(&loop); | 6085 __ bind(&loop); |
6108 __ cmp(r2, Operand(r4)); | 6086 __ cmp(r2, Operand(r4)); |
6109 __ b(eq, &is_instance); | 6087 __ b(eq, &is_instance); |
6110 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 6088 __ cmp(r2, Operand(Factory::null_value())); |
6111 __ cmp(r2, ip); | |
6112 __ b(eq, &is_not_instance); | 6089 __ b(eq, &is_not_instance); |
6113 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); | 6090 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
6114 __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); | 6091 __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); |
6115 __ jmp(&loop); | 6092 __ jmp(&loop); |
6116 | 6093 |
6117 __ bind(&is_instance); | 6094 __ bind(&is_instance); |
6118 __ mov(r0, Operand(Smi::FromInt(0))); | 6095 __ mov(r0, Operand(Smi::FromInt(0))); |
6119 __ pop(); | 6096 __ pop(); |
6120 __ pop(); | 6097 __ pop(); |
6121 __ mov(pc, Operand(lr)); // Return. | 6098 __ mov(pc, Operand(lr)); // Return. |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6256 int CompareStub::MinorKey() { | 6233 int CompareStub::MinorKey() { |
6257 // Encode the two parameters in a unique 16 bit value. | 6234 // Encode the two parameters in a unique 16 bit value. |
6258 ASSERT(static_cast<unsigned>(cc_) >> 28 < (1 << 15)); | 6235 ASSERT(static_cast<unsigned>(cc_) >> 28 < (1 << 15)); |
6259 return (static_cast<unsigned>(cc_) >> 27) | (strict_ ? 1 : 0); | 6236 return (static_cast<unsigned>(cc_) >> 27) | (strict_ ? 1 : 0); |
6260 } | 6237 } |
6261 | 6238 |
6262 | 6239 |
6263 #undef __ | 6240 #undef __ |
6264 | 6241 |
6265 } } // namespace v8::internal | 6242 } } // namespace v8::internal |
OLD | NEW |