Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(771)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 174639: Use a root array register for constant loads and stack guards on x64.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 11 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/cfg-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 519 matching lines...) Expand 10 before | Expand all | Expand 10 after
530 return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0)) 530 return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0))
531 && (allocator()->count(rbx) == (frame()->is_used(rbx) ? 1 : 0)) 531 && (allocator()->count(rbx) == (frame()->is_used(rbx) ? 1 : 0))
532 && (allocator()->count(rcx) == (frame()->is_used(rcx) ? 1 : 0)) 532 && (allocator()->count(rcx) == (frame()->is_used(rcx) ? 1 : 0))
533 && (allocator()->count(rdx) == (frame()->is_used(rdx) ? 1 : 0)) 533 && (allocator()->count(rdx) == (frame()->is_used(rdx) ? 1 : 0))
534 && (allocator()->count(rdi) == (frame()->is_used(rdi) ? 1 : 0)) 534 && (allocator()->count(rdi) == (frame()->is_used(rdi) ? 1 : 0))
535 && (allocator()->count(r8) == (frame()->is_used(r8) ? 1 : 0)) 535 && (allocator()->count(r8) == (frame()->is_used(r8) ? 1 : 0))
536 && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0)) 536 && (allocator()->count(r9) == (frame()->is_used(r9) ? 1 : 0))
537 && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0)) 537 && (allocator()->count(r11) == (frame()->is_used(r11) ? 1 : 0))
538 && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0)) 538 && (allocator()->count(r14) == (frame()->is_used(r14) ? 1 : 0))
539 && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0)) 539 && (allocator()->count(r15) == (frame()->is_used(r15) ? 1 : 0))
540 && (allocator()->count(r13) == (frame()->is_used(r13) ? 1 : 0))
541 && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0)); 540 && (allocator()->count(r12) == (frame()->is_used(r12) ? 1 : 0));
542 } 541 }
543 #endif 542 #endif
544 543
545 544
546 class DeferredReferenceGetKeyedValue: public DeferredCode { 545 class DeferredReferenceGetKeyedValue: public DeferredCode {
547 public: 546 public:
548 explicit DeferredReferenceGetKeyedValue(Register dst, 547 explicit DeferredReferenceGetKeyedValue(Register dst,
549 Register receiver, 548 Register receiver,
550 Register key, 549 Register key,
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after
851 850
852 void DeferredStackCheck::Generate() { 851 void DeferredStackCheck::Generate() {
853 StackCheckStub stub; 852 StackCheckStub stub;
854 __ CallStub(&stub); 853 __ CallStub(&stub);
855 } 854 }
856 855
857 856
858 void CodeGenerator::CheckStack() { 857 void CodeGenerator::CheckStack() {
859 if (FLAG_check_stack) { 858 if (FLAG_check_stack) {
860 DeferredStackCheck* deferred = new DeferredStackCheck; 859 DeferredStackCheck* deferred = new DeferredStackCheck;
861 ExternalReference stack_guard_limit = 860 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
862 ExternalReference::address_of_stack_guard_limit();
863 __ movq(kScratchRegister, stack_guard_limit);
864 __ cmpq(rsp, Operand(kScratchRegister, 0));
865 deferred->Branch(below); 861 deferred->Branch(below);
866 deferred->BindExit(); 862 deferred->BindExit();
867 } 863 }
868 } 864 }
869 865
870 866
871 void CodeGenerator::VisitAndSpill(Statement* statement) { 867 void CodeGenerator::VisitAndSpill(Statement* statement) {
872 // TODO(X64): No architecture specific code. Move to shared location. 868 // TODO(X64): No architecture specific code. Move to shared location.
873 ASSERT(in_spilled_code()); 869 ASSERT(in_spilled_code());
874 set_in_spilled_code(false); 870 set_in_spilled_code(false);
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
934 frame_->EmitPush(kScratchRegister); 930 frame_->EmitPush(kScratchRegister);
935 // Declaration nodes are always introduced in one of two modes. 931 // Declaration nodes are always introduced in one of two modes.
936 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST); 932 ASSERT(node->mode() == Variable::VAR || node->mode() == Variable::CONST);
937 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY; 933 PropertyAttributes attr = node->mode() == Variable::VAR ? NONE : READ_ONLY;
938 frame_->EmitPush(Immediate(Smi::FromInt(attr))); 934 frame_->EmitPush(Immediate(Smi::FromInt(attr)));
939 // Push initial value, if any. 935 // Push initial value, if any.
940 // Note: For variables we must not push an initial value (such as 936 // Note: For variables we must not push an initial value (such as
941 // 'undefined') because we may have a (legal) redeclaration and we 937 // 'undefined') because we may have a (legal) redeclaration and we
942 // must not destroy the current value. 938 // must not destroy the current value.
943 if (node->mode() == Variable::CONST) { 939 if (node->mode() == Variable::CONST) {
944 __ movq(kScratchRegister, Factory::the_hole_value(), 940 frame_->EmitPush(Heap::kTheHoleValueRootIndex);
945 RelocInfo::EMBEDDED_OBJECT);
946 frame_->EmitPush(kScratchRegister);
947 } else if (node->fun() != NULL) { 941 } else if (node->fun() != NULL) {
948 Load(node->fun()); 942 Load(node->fun());
949 } else { 943 } else {
950 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value! 944 frame_->EmitPush(Immediate(Smi::FromInt(0))); // no initial value!
951 } 945 }
952 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); 946 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
953 // Ignore the return value (declarations are statements). 947 // Ignore the return value (declarations are statements).
954 return; 948 return;
955 } 949 }
956 950
(...skipping 685 matching lines...) Expand 10 before | Expand all | Expand 10 after
1642 JumpTarget exit; 1636 JumpTarget exit;
1643 1637
1644 // Get the object to enumerate over (converted to JSObject). 1638 // Get the object to enumerate over (converted to JSObject).
1645 LoadAndSpill(node->enumerable()); 1639 LoadAndSpill(node->enumerable());
1646 1640
1647 // Both SpiderMonkey and kjs ignore null and undefined in contrast 1641 // Both SpiderMonkey and kjs ignore null and undefined in contrast
1648 // to the specification. 12.6.4 mandates a call to ToObject. 1642 // to the specification. 12.6.4 mandates a call to ToObject.
1649 frame_->EmitPop(rax); 1643 frame_->EmitPop(rax);
1650 1644
1651 // rax: value to be iterated over 1645 // rax: value to be iterated over
1652 __ Cmp(rax, Factory::undefined_value()); 1646 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1653 exit.Branch(equal); 1647 exit.Branch(equal);
1654 __ Cmp(rax, Factory::null_value()); 1648 __ CompareRoot(rax, Heap::kNullValueRootIndex);
1655 exit.Branch(equal); 1649 exit.Branch(equal);
1656 1650
1657 // Stack layout in body: 1651 // Stack layout in body:
1658 // [iteration counter (smi)] <- slot 0 1652 // [iteration counter (smi)] <- slot 0
1659 // [length of array] <- slot 1 1653 // [length of array] <- slot 1
1660 // [FixedArray] <- slot 2 1654 // [FixedArray] <- slot 2
1661 // [Map or 0] <- slot 3 1655 // [Map or 0] <- slot 3
1662 // [Object] <- slot 4 1656 // [Object] <- slot 4
1663 1657
1664 // Check if enumerable is already a JSObject 1658 // Check if enumerable is already a JSObject
(...skipping 15 matching lines...) Expand all
1680 1674
1681 frame_->EmitPush(rax); // push the Object (slot 4) for the runtime call 1675 frame_->EmitPush(rax); // push the Object (slot 4) for the runtime call
1682 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1676 frame_->CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1683 1677
1684 // If we got a Map, we can do a fast modification check. 1678 // If we got a Map, we can do a fast modification check.
1685 // Otherwise, we got a FixedArray, and we have to do a slow check. 1679 // Otherwise, we got a FixedArray, and we have to do a slow check.
1686 // rax: map or fixed array (result from call to 1680 // rax: map or fixed array (result from call to
1687 // Runtime::kGetPropertyNamesFast) 1681 // Runtime::kGetPropertyNamesFast)
1688 __ movq(rdx, rax); 1682 __ movq(rdx, rax);
1689 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); 1683 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1690 __ Cmp(rcx, Factory::meta_map()); 1684 __ CompareRoot(rcx, Heap::kMetaMapRootIndex);
1691 fixed_array.Branch(not_equal); 1685 fixed_array.Branch(not_equal);
1692 1686
1693 // Get enum cache 1687 // Get enum cache
1694 // rax: map (result from call to Runtime::kGetPropertyNamesFast) 1688 // rax: map (result from call to Runtime::kGetPropertyNamesFast)
1695 __ movq(rcx, rax); 1689 __ movq(rcx, rax);
1696 __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset)); 1690 __ movq(rcx, FieldOperand(rcx, Map::kInstanceDescriptorsOffset));
1697 // Get the bridge array held in the enumeration index field. 1691 // Get the bridge array held in the enumeration index field.
1698 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); 1692 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
1699 // Get the cache from the bridge array. 1693 // Get the cache from the bridge array.
1700 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1694 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1749 __ cmpq(rcx, rdx); 1743 __ cmpq(rcx, rdx);
1750 end_del_check.Branch(equal); 1744 end_del_check.Branch(equal);
1751 1745
1752 // Convert the entry to a string (or null if it isn't a property anymore). 1746 // Convert the entry to a string (or null if it isn't a property anymore).
1753 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable 1747 frame_->EmitPush(frame_->ElementAt(4)); // push enumerable
1754 frame_->EmitPush(rbx); // push entry 1748 frame_->EmitPush(rbx); // push entry
1755 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2); 1749 frame_->InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION, 2);
1756 __ movq(rbx, rax); 1750 __ movq(rbx, rax);
1757 1751
1758 // If the property has been removed while iterating, we just skip it. 1752 // If the property has been removed while iterating, we just skip it.
1759 __ Cmp(rbx, Factory::null_value()); 1753 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1760 node->continue_target()->Branch(equal); 1754 node->continue_target()->Branch(equal);
1761 1755
1762 end_del_check.Bind(); 1756 end_del_check.Bind();
1763 // Store the entry in the 'each' expression and take another spin in the 1757 // Store the entry in the 'each' expression and take another spin in the
1764 // loop. rdx: i'th entry of the enum cache (or string there of) 1758 // loop. rdx: i'th entry of the enum cache (or string there of)
1765 frame_->EmitPush(rbx); 1759 frame_->EmitPush(rbx);
1766 { Reference each(this, node->each()); 1760 { Reference each(this, node->each());
1767 // Loading a reference may leave the frame in an unspilled state. 1761 // Loading a reference may leave the frame in an unspilled state.
1768 frame_->SpillAll(); 1762 frame_->SpillAll();
1769 if (!each.is_illegal()) { 1763 if (!each.is_illegal()) {
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after
2024 // chain and set the state on the frame to FALLING. 2018 // chain and set the state on the frame to FALLING.
2025 if (has_valid_frame()) { 2019 if (has_valid_frame()) {
2026 // The next handler address is on top of the frame. 2020 // The next handler address is on top of the frame.
2027 ASSERT(StackHandlerConstants::kNextOffset == 0); 2021 ASSERT(StackHandlerConstants::kNextOffset == 0);
2028 __ movq(kScratchRegister, handler_address); 2022 __ movq(kScratchRegister, handler_address);
2029 frame_->EmitPop(Operand(kScratchRegister, 0)); 2023 frame_->EmitPop(Operand(kScratchRegister, 0));
2030 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 2024 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2031 2025
2032 // Fake a top of stack value (unneeded when FALLING) and set the 2026 // Fake a top of stack value (unneeded when FALLING) and set the
2033 // state in ecx, then jump around the unlink blocks if any. 2027 // state in ecx, then jump around the unlink blocks if any.
2034 __ movq(kScratchRegister, 2028 frame_->EmitPush(Heap::kUndefinedValueRootIndex);
2035 Factory::undefined_value(),
2036 RelocInfo::EMBEDDED_OBJECT);
2037 frame_->EmitPush(kScratchRegister);
2038 __ movq(rcx, Immediate(Smi::FromInt(FALLING))); 2029 __ movq(rcx, Immediate(Smi::FromInt(FALLING)));
2039 if (nof_unlinks > 0) { 2030 if (nof_unlinks > 0) {
2040 finally_block.Jump(); 2031 finally_block.Jump();
2041 } 2032 }
2042 } 2033 }
2043 2034
2044 // Generate code to unlink and set the state for the (formerly) 2035 // Generate code to unlink and set the state for the (formerly)
2045 // shadowing targets that have been jumped to. 2036 // shadowing targets that have been jumped to.
2046 for (int i = 0; i < shadows.length(); i++) { 2037 for (int i = 0; i < shadows.length(); i++) {
2047 if (shadows[i]->is_linked()) { 2038 if (shadows[i]->is_linked()) {
(...skipping 24 matching lines...) Expand all
2072 __ movq(kScratchRegister, handler_address); 2063 __ movq(kScratchRegister, handler_address);
2073 frame_->EmitPop(Operand(kScratchRegister, 0)); 2064 frame_->EmitPop(Operand(kScratchRegister, 0));
2074 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 2065 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
2075 2066
2076 if (i == kReturnShadowIndex) { 2067 if (i == kReturnShadowIndex) {
2077 // If this target shadowed the function return, materialize 2068 // If this target shadowed the function return, materialize
2078 // the return value on the stack. 2069 // the return value on the stack.
2079 frame_->EmitPush(rax); 2070 frame_->EmitPush(rax);
2080 } else { 2071 } else {
2081 // Fake TOS for targets that shadowed breaks and continues. 2072 // Fake TOS for targets that shadowed breaks and continues.
2082 __ movq(kScratchRegister, 2073 frame_->EmitPush(Heap::kUndefinedValueRootIndex);
2083 Factory::undefined_value(),
2084 RelocInfo::EMBEDDED_OBJECT);
2085 frame_->EmitPush(kScratchRegister);
2086 } 2074 }
2087 __ movq(rcx, Immediate(Smi::FromInt(JUMPING + i))); 2075 __ movq(rcx, Immediate(Smi::FromInt(JUMPING + i)));
2088 if (--nof_unlinks > 0) { 2076 if (--nof_unlinks > 0) {
2089 // If this is not the last unlink block, jump around the next. 2077 // If this is not the last unlink block, jump around the next.
2090 finally_block.Jump(); 2078 finally_block.Jump();
2091 } 2079 }
2092 } 2080 }
2093 } 2081 }
2094 2082
2095 // --- Finally block --- 2083 // --- Finally block ---
(...skipping 221 matching lines...) Expand 10 before | Expand all | Expand 10 after
2317 Result boilerplate = allocator_->Allocate(); 2305 Result boilerplate = allocator_->Allocate();
2318 ASSERT(boilerplate.is_valid()); 2306 ASSERT(boilerplate.is_valid());
2319 int literal_offset = 2307 int literal_offset =
2320 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; 2308 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2321 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset)); 2309 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
2322 2310
2323 // Check whether we need to materialize the RegExp object. If so, 2311 // Check whether we need to materialize the RegExp object. If so,
2324 // jump to the deferred code passing the literals array. 2312 // jump to the deferred code passing the literals array.
2325 DeferredRegExpLiteral* deferred = 2313 DeferredRegExpLiteral* deferred =
2326 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node); 2314 new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
2327 __ Cmp(boilerplate.reg(), Factory::undefined_value()); 2315 __ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
2328 deferred->Branch(equal); 2316 deferred->Branch(equal);
2329 deferred->BindExit(); 2317 deferred->BindExit();
2330 literals.Unuse(); 2318 literals.Unuse();
2331 2319
2332 // Push the boilerplate object. 2320 // Push the boilerplate object.
2333 frame_->Push(&boilerplate); 2321 frame_->Push(&boilerplate);
2334 } 2322 }
2335 2323
2336 2324
2337 // Materialize the object literal 'node' in the literals array 2325 // Materialize the object literal 'node' in the literals array
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
2388 Result boilerplate = allocator_->Allocate(); 2376 Result boilerplate = allocator_->Allocate();
2389 ASSERT(boilerplate.is_valid()); 2377 ASSERT(boilerplate.is_valid());
2390 int literal_offset = 2378 int literal_offset =
2391 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; 2379 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2392 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset)); 2380 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
2393 2381
2394 // Check whether we need to materialize the object literal boilerplate. 2382 // Check whether we need to materialize the object literal boilerplate.
2395 // If so, jump to the deferred code passing the literals array. 2383 // If so, jump to the deferred code passing the literals array.
2396 DeferredObjectLiteral* deferred = 2384 DeferredObjectLiteral* deferred =
2397 new DeferredObjectLiteral(boilerplate.reg(), literals.reg(), node); 2385 new DeferredObjectLiteral(boilerplate.reg(), literals.reg(), node);
2398 __ Cmp(boilerplate.reg(), Factory::undefined_value()); 2386 __ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
2399 deferred->Branch(equal); 2387 deferred->Branch(equal);
2400 deferred->BindExit(); 2388 deferred->BindExit();
2401 literals.Unuse(); 2389 literals.Unuse();
2402 2390
2403 // Push the boilerplate object. 2391 // Push the boilerplate object.
2404 frame_->Push(&boilerplate); 2392 frame_->Push(&boilerplate);
2405 // Clone the boilerplate object. 2393 // Clone the boilerplate object.
2406 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; 2394 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate;
2407 if (node->depth() == 1) { 2395 if (node->depth() == 1) {
2408 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; 2396 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate;
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after
2521 Result boilerplate = allocator_->Allocate(); 2509 Result boilerplate = allocator_->Allocate();
2522 ASSERT(boilerplate.is_valid()); 2510 ASSERT(boilerplate.is_valid());
2523 int literal_offset = 2511 int literal_offset =
2524 FixedArray::kHeaderSize + node->literal_index() * kPointerSize; 2512 FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
2525 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset)); 2513 __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
2526 2514
2527 // Check whether we need to materialize the object literal boilerplate. 2515 // Check whether we need to materialize the object literal boilerplate.
2528 // If so, jump to the deferred code passing the literals array. 2516 // If so, jump to the deferred code passing the literals array.
2529 DeferredArrayLiteral* deferred = 2517 DeferredArrayLiteral* deferred =
2530 new DeferredArrayLiteral(boilerplate.reg(), literals.reg(), node); 2518 new DeferredArrayLiteral(boilerplate.reg(), literals.reg(), node);
2531 __ Cmp(boilerplate.reg(), Factory::undefined_value()); 2519 __ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
2532 deferred->Branch(equal); 2520 deferred->Branch(equal);
2533 deferred->BindExit(); 2521 deferred->BindExit();
2534 literals.Unuse(); 2522 literals.Unuse();
2535 2523
2536 // Push the resulting array literal boilerplate on the stack. 2524 // Push the resulting array literal boilerplate on the stack.
2537 frame_->Push(&boilerplate); 2525 frame_->Push(&boilerplate);
2538 // Clone the boilerplate object. 2526 // Clone the boilerplate object.
2539 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate; 2527 Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate;
2540 if (node->depth() == 1) { 2528 if (node->depth() == 1) {
2541 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate; 2529 clone_function_id = Runtime::kCloneShallowLiteralBoilerplate;
(...skipping 937 matching lines...) Expand 10 before | Expand all | Expand 10 after
3479 // Load the operand and move it to a register. 3467 // Load the operand and move it to a register.
3480 LoadTypeofExpression(operation->expression()); 3468 LoadTypeofExpression(operation->expression());
3481 Result answer = frame_->Pop(); 3469 Result answer = frame_->Pop();
3482 answer.ToRegister(); 3470 answer.ToRegister();
3483 3471
3484 if (check->Equals(Heap::number_symbol())) { 3472 if (check->Equals(Heap::number_symbol())) {
3485 __ testl(answer.reg(), Immediate(kSmiTagMask)); 3473 __ testl(answer.reg(), Immediate(kSmiTagMask));
3486 destination()->true_target()->Branch(zero); 3474 destination()->true_target()->Branch(zero);
3487 frame_->Spill(answer.reg()); 3475 frame_->Spill(answer.reg());
3488 __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 3476 __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
3489 __ Cmp(answer.reg(), Factory::heap_number_map()); 3477 __ CompareRoot(answer.reg(), Heap::kHeapNumberMapRootIndex);
3490 answer.Unuse(); 3478 answer.Unuse();
3491 destination()->Split(equal); 3479 destination()->Split(equal);
3492 3480
3493 } else if (check->Equals(Heap::string_symbol())) { 3481 } else if (check->Equals(Heap::string_symbol())) {
3494 __ testl(answer.reg(), Immediate(kSmiTagMask)); 3482 __ testl(answer.reg(), Immediate(kSmiTagMask));
3495 destination()->false_target()->Branch(zero); 3483 destination()->false_target()->Branch(zero);
3496 3484
3497 // It can be an undetectable string object. 3485 // It can be an undetectable string object.
3498 __ movq(kScratchRegister, 3486 __ movq(kScratchRegister,
3499 FieldOperand(answer.reg(), HeapObject::kMapOffset)); 3487 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3500 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), 3488 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3501 Immediate(1 << Map::kIsUndetectable)); 3489 Immediate(1 << Map::kIsUndetectable));
3502 destination()->false_target()->Branch(not_zero); 3490 destination()->false_target()->Branch(not_zero);
3503 __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE); 3491 __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE);
3504 answer.Unuse(); 3492 answer.Unuse();
3505 destination()->Split(below); // Unsigned byte comparison needed. 3493 destination()->Split(below); // Unsigned byte comparison needed.
3506 3494
3507 } else if (check->Equals(Heap::boolean_symbol())) { 3495 } else if (check->Equals(Heap::boolean_symbol())) {
3508 __ Cmp(answer.reg(), Factory::true_value()); 3496 __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex);
3509 destination()->true_target()->Branch(equal); 3497 destination()->true_target()->Branch(equal);
3510 __ Cmp(answer.reg(), Factory::false_value()); 3498 __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex);
3511 answer.Unuse(); 3499 answer.Unuse();
3512 destination()->Split(equal); 3500 destination()->Split(equal);
3513 3501
3514 } else if (check->Equals(Heap::undefined_symbol())) { 3502 } else if (check->Equals(Heap::undefined_symbol())) {
3515 __ Cmp(answer.reg(), Factory::undefined_value()); 3503 __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex);
3516 destination()->true_target()->Branch(equal); 3504 destination()->true_target()->Branch(equal);
3517 3505
3518 __ testl(answer.reg(), Immediate(kSmiTagMask)); 3506 __ testl(answer.reg(), Immediate(kSmiTagMask));
3519 destination()->false_target()->Branch(zero); 3507 destination()->false_target()->Branch(zero);
3520 3508
3521 // It can be an undetectable object. 3509 // It can be an undetectable object.
3522 __ movq(kScratchRegister, 3510 __ movq(kScratchRegister,
3523 FieldOperand(answer.reg(), HeapObject::kMapOffset)); 3511 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3524 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), 3512 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3525 Immediate(1 << Map::kIsUndetectable)); 3513 Immediate(1 << Map::kIsUndetectable));
3526 answer.Unuse(); 3514 answer.Unuse();
3527 destination()->Split(not_zero); 3515 destination()->Split(not_zero);
3528 3516
3529 } else if (check->Equals(Heap::function_symbol())) { 3517 } else if (check->Equals(Heap::function_symbol())) {
3530 __ testl(answer.reg(), Immediate(kSmiTagMask)); 3518 __ testl(answer.reg(), Immediate(kSmiTagMask));
3531 destination()->false_target()->Branch(zero); 3519 destination()->false_target()->Branch(zero);
3532 frame_->Spill(answer.reg()); 3520 frame_->Spill(answer.reg());
3533 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg()); 3521 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
3534 answer.Unuse(); 3522 answer.Unuse();
3535 destination()->Split(equal); 3523 destination()->Split(equal);
3536 3524
3537 } else if (check->Equals(Heap::object_symbol())) { 3525 } else if (check->Equals(Heap::object_symbol())) {
3538 __ testl(answer.reg(), Immediate(kSmiTagMask)); 3526 __ testl(answer.reg(), Immediate(kSmiTagMask));
3539 destination()->false_target()->Branch(zero); 3527 destination()->false_target()->Branch(zero);
3540 __ Cmp(answer.reg(), Factory::null_value()); 3528 __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex);
3541 destination()->true_target()->Branch(equal); 3529 destination()->true_target()->Branch(equal);
3542 3530
3543 // It can be an undetectable object. 3531 // It can be an undetectable object.
3544 __ movq(kScratchRegister, 3532 __ movq(kScratchRegister,
3545 FieldOperand(answer.reg(), HeapObject::kMapOffset)); 3533 FieldOperand(answer.reg(), HeapObject::kMapOffset));
3546 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), 3534 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
3547 Immediate(1 << Map::kIsUndetectable)); 3535 Immediate(1 << Map::kIsUndetectable));
3548 destination()->false_target()->Branch(not_zero); 3536 destination()->false_target()->Branch(not_zero);
3549 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE); 3537 __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE);
3550 destination()->false_target()->Branch(below); 3538 destination()->false_target()->Branch(below);
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after
3824 __ bind(&a_cons_string); 3812 __ bind(&a_cons_string);
3825 // Get the first of the two strings. Both sliced and cons strings 3813 // Get the first of the two strings. Both sliced and cons strings
3826 // store their source string at the same offset. 3814 // store their source string at the same offset.
3827 ASSERT(SlicedString::kBufferOffset == ConsString::kFirstOffset); 3815 ASSERT(SlicedString::kBufferOffset == ConsString::kFirstOffset);
3828 __ movq(object.reg(), FieldOperand(object.reg(), ConsString::kFirstOffset)); 3816 __ movq(object.reg(), FieldOperand(object.reg(), ConsString::kFirstOffset));
3829 __ jmp(&try_again_with_new_string); 3817 __ jmp(&try_again_with_new_string);
3830 3818
3831 __ bind(&slow_case); 3819 __ bind(&slow_case);
3832 // Move the undefined value into the result register, which will 3820 // Move the undefined value into the result register, which will
3833 // trigger the slow case. 3821 // trigger the slow case.
3834 __ Move(temp.reg(), Factory::undefined_value()); 3822 __ LoadRoot(temp.reg(), Heap::kUndefinedValueRootIndex);
3835 3823
3836 __ bind(&end); 3824 __ bind(&end);
3837 frame_->Push(&temp); 3825 frame_->Push(&temp);
3838 } 3826 }
3839 3827
3840 3828
3841 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { 3829 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
3842 ASSERT(args->length() == 1); 3830 ASSERT(args->length() == 1);
3843 Load(args->at(0)); 3831 Load(args->at(0));
3844 Result value = frame_->Pop(); 3832 Result value = frame_->Pop();
(...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after
4267 // 'false_target'/'true_target' as appropriate. 4255 // 'false_target'/'true_target' as appropriate.
4268 void CodeGenerator::ToBoolean(ControlDestination* dest) { 4256 void CodeGenerator::ToBoolean(ControlDestination* dest) {
4269 Comment cmnt(masm_, "[ ToBoolean"); 4257 Comment cmnt(masm_, "[ ToBoolean");
4270 4258
4271 // The value to convert should be popped from the frame. 4259 // The value to convert should be popped from the frame.
4272 Result value = frame_->Pop(); 4260 Result value = frame_->Pop();
4273 value.ToRegister(); 4261 value.ToRegister();
4274 // Fast case checks. 4262 // Fast case checks.
4275 4263
4276 // 'false' => false. 4264 // 'false' => false.
4277 __ Cmp(value.reg(), Factory::false_value()); 4265 __ CompareRoot(value.reg(), Heap::kFalseValueRootIndex);
4278 dest->false_target()->Branch(equal); 4266 dest->false_target()->Branch(equal);
4279 4267
4280 // 'true' => true. 4268 // 'true' => true.
4281 __ Cmp(value.reg(), Factory::true_value()); 4269 __ CompareRoot(value.reg(), Heap::kTrueValueRootIndex);
4282 dest->true_target()->Branch(equal); 4270 dest->true_target()->Branch(equal);
4283 4271
4284 // 'undefined' => false. 4272 // 'undefined' => false.
4285 __ Cmp(value.reg(), Factory::undefined_value()); 4273 __ CompareRoot(value.reg(), Heap::kUndefinedValueRootIndex);
4286 dest->false_target()->Branch(equal); 4274 dest->false_target()->Branch(equal);
4287 4275
4288 // Smi => false iff zero. 4276 // Smi => false iff zero.
4289 ASSERT(kSmiTag == 0); 4277 ASSERT(kSmiTag == 0);
4290 __ testl(value.reg(), value.reg()); 4278 __ testl(value.reg(), value.reg());
4291 dest->false_target()->Branch(zero); 4279 dest->false_target()->Branch(zero);
4292 __ testl(value.reg(), Immediate(kSmiTagMask)); 4280 __ testl(value.reg(), Immediate(kSmiTagMask));
4293 dest->true_target()->Branch(zero); 4281 dest->true_target()->Branch(zero);
4294 4282
4295 // Call the stub for all other cases. 4283 // Call the stub for all other cases.
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after
4494 // Allocate a fresh register to use as a temp in 4482 // Allocate a fresh register to use as a temp in
4495 // ContextSlotOperandCheckExtensions and to hold the result 4483 // ContextSlotOperandCheckExtensions and to hold the result
4496 // value. 4484 // value.
4497 value = allocator_->Allocate(); 4485 value = allocator_->Allocate();
4498 ASSERT(value.is_valid()); 4486 ASSERT(value.is_valid());
4499 __ movq(value.reg(), 4487 __ movq(value.reg(),
4500 ContextSlotOperandCheckExtensions(potential_slot, 4488 ContextSlotOperandCheckExtensions(potential_slot,
4501 value, 4489 value,
4502 &slow)); 4490 &slow));
4503 if (potential_slot->var()->mode() == Variable::CONST) { 4491 if (potential_slot->var()->mode() == Variable::CONST) {
4504 __ Cmp(value.reg(), Factory::the_hole_value()); 4492 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
4505 done.Branch(not_equal, &value); 4493 done.Branch(not_equal, &value);
4506 __ movq(value.reg(), Factory::undefined_value(), 4494 __ LoadRoot(value.reg(), Heap::kUndefinedValueRootIndex);
4507 RelocInfo::EMBEDDED_OBJECT);
4508 } 4495 }
4509 // There is always control flow to slow from 4496 // There is always control flow to slow from
4510 // ContextSlotOperandCheckExtensions so we have to jump around 4497 // ContextSlotOperandCheckExtensions so we have to jump around
4511 // it. 4498 // it.
4512 done.Jump(&value); 4499 done.Jump(&value);
4513 } 4500 }
4514 } 4501 }
4515 4502
4516 slow.Bind(); 4503 slow.Bind();
4517 // A runtime call is inevitable. We eagerly sync frame elements 4504 // A runtime call is inevitable. We eagerly sync frame elements
(...skipping 17 matching lines...) Expand all
4535 // Const slots may contain 'the hole' value (the constant hasn't been 4522 // Const slots may contain 'the hole' value (the constant hasn't been
4536 // initialized yet) which needs to be converted into the 'undefined' 4523 // initialized yet) which needs to be converted into the 'undefined'
4537 // value. 4524 // value.
4538 // 4525 //
4539 // We currently spill the virtual frame because constants use the 4526 // We currently spill the virtual frame because constants use the
4540 // potentially unsafe direct-frame access of SlotOperand. 4527 // potentially unsafe direct-frame access of SlotOperand.
4541 VirtualFrame::SpilledScope spilled_scope; 4528 VirtualFrame::SpilledScope spilled_scope;
4542 Comment cmnt(masm_, "[ Load const"); 4529 Comment cmnt(masm_, "[ Load const");
4543 JumpTarget exit; 4530 JumpTarget exit;
4544 __ movq(rcx, SlotOperand(slot, rcx)); 4531 __ movq(rcx, SlotOperand(slot, rcx));
4545 __ Cmp(rcx, Factory::the_hole_value()); 4532 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
4546 exit.Branch(not_equal); 4533 exit.Branch(not_equal);
4547 __ movq(rcx, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT); 4534 __ LoadRoot(rcx, Heap::kUndefinedValueRootIndex);
4548 exit.Bind(); 4535 exit.Bind();
4549 frame_->EmitPush(rcx); 4536 frame_->EmitPush(rcx);
4550 4537
4551 } else if (slot->type() == Slot::PARAMETER) { 4538 } else if (slot->type() == Slot::PARAMETER) {
4552 frame_->PushParameterAt(slot->index()); 4539 frame_->PushParameterAt(slot->index());
4553 4540
4554 } else if (slot->type() == Slot::LOCAL) { 4541 } else if (slot->type() == Slot::LOCAL) {
4555 frame_->PushLocalAt(slot->index()); 4542 frame_->PushLocalAt(slot->index());
4556 4543
4557 } else { 4544 } else {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
4591 } else { 4578 } else {
4592 frame_->Push(&value); 4579 frame_->Push(&value);
4593 } 4580 }
4594 return; 4581 return;
4595 } 4582 }
4596 4583
4597 // The loaded value is in a register. If it is the sentinel that 4584 // The loaded value is in a register. If it is the sentinel that
4598 // indicates that we haven't loaded the arguments object yet, we 4585 // indicates that we haven't loaded the arguments object yet, we
4599 // need to do it now. 4586 // need to do it now.
4600 JumpTarget exit; 4587 JumpTarget exit;
4601 __ Cmp(value.reg(), Factory::the_hole_value()); 4588 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
4602 frame_->Push(&value); 4589 frame_->Push(&value);
4603 exit.Branch(not_equal); 4590 exit.Branch(not_equal);
4604 Result arguments = StoreArgumentsObject(false); 4591 Result arguments = StoreArgumentsObject(false);
4605 frame_->SetElementAt(0, &arguments); 4592 frame_->SetElementAt(0, &arguments);
4606 exit.Bind(); 4593 exit.Bind();
4607 } 4594 }
4608 4595
4609 4596
4610 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) { 4597 void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
4611 if (slot->type() == Slot::LOOKUP) { 4598 if (slot->type() == Slot::LOOKUP) {
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
4652 // Only the first const initialization must be executed (the slot 4639 // Only the first const initialization must be executed (the slot
4653 // still contains 'the hole' value). When the assignment is executed, 4640 // still contains 'the hole' value). When the assignment is executed,
4654 // the code is identical to a normal store (see below). 4641 // the code is identical to a normal store (see below).
4655 // 4642 //
4656 // We spill the frame in the code below because the direct-frame 4643 // We spill the frame in the code below because the direct-frame
4657 // access of SlotOperand is potentially unsafe with an unspilled 4644 // access of SlotOperand is potentially unsafe with an unspilled
4658 // frame. 4645 // frame.
4659 VirtualFrame::SpilledScope spilled_scope; 4646 VirtualFrame::SpilledScope spilled_scope;
4660 Comment cmnt(masm_, "[ Init const"); 4647 Comment cmnt(masm_, "[ Init const");
4661 __ movq(rcx, SlotOperand(slot, rcx)); 4648 __ movq(rcx, SlotOperand(slot, rcx));
4662 __ Cmp(rcx, Factory::the_hole_value()); 4649 __ CompareRoot(rcx, Heap::kTheHoleValueRootIndex);
4663 exit.Branch(not_equal); 4650 exit.Branch(not_equal);
4664 } 4651 }
4665 4652
4666 // We must execute the store. Storing a variable must keep the (new) 4653 // We must execute the store. Storing a variable must keep the (new)
4667 // value on the stack. This is necessary for compiling assignment 4654 // value on the stack. This is necessary for compiling assignment
4668 // expressions. 4655 // expressions.
4669 // 4656 //
4670 // Note: We will reach here even with slot->var()->mode() == 4657 // Note: We will reach here even with slot->var()->mode() ==
4671 // Variable::CONST because of const declarations which will initialize 4658 // Variable::CONST because of const declarations which will initialize
4672 // consts to 'the hole' value and by doing so, end up calling this code. 4659 // consts to 'the hole' value and by doing so, end up calling this code.
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
4736 } 4723 }
4737 4724
4738 if (s->is_eval_scope()) { 4725 if (s->is_eval_scope()) {
4739 // Loop up the context chain. There is no frame effect so it is 4726 // Loop up the context chain. There is no frame effect so it is
4740 // safe to use raw labels here. 4727 // safe to use raw labels here.
4741 Label next, fast; 4728 Label next, fast;
4742 if (!context.is(tmp.reg())) { 4729 if (!context.is(tmp.reg())) {
4743 __ movq(tmp.reg(), context); 4730 __ movq(tmp.reg(), context);
4744 } 4731 }
4745 // Load map for comparison into register, outside loop. 4732 // Load map for comparison into register, outside loop.
4746 __ Move(kScratchRegister, Factory::global_context_map()); 4733 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
4747 __ bind(&next); 4734 __ bind(&next);
4748 // Terminate at global context. 4735 // Terminate at global context.
4749 __ cmpq(kScratchRegister, FieldOperand(tmp.reg(), HeapObject::kMapOffset)); 4736 __ cmpq(kScratchRegister, FieldOperand(tmp.reg(), HeapObject::kMapOffset));
4750 __ j(equal, &fast); 4737 __ j(equal, &fast);
4751 // Check that extension is NULL. 4738 // Check that extension is NULL.
4752 __ cmpq(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0)); 4739 __ cmpq(ContextOperand(tmp.reg(), Context::EXTENSION_INDEX), Immediate(0));
4753 slow->Branch(not_equal); 4740 slow->Branch(not_equal);
4754 // Load next context in chain. 4741 // Load next context in chain.
4755 __ movq(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX)); 4742 __ movq(tmp.reg(), ContextOperand(tmp.reg(), Context::CLOSURE_INDEX));
4756 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset)); 4743 __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset));
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
4840 // We have to skip storing into the arguments slot if it has 4827 // We have to skip storing into the arguments slot if it has
4841 // already been written to. This can happen if the a function 4828 // already been written to. This can happen if the a function
4842 // has a local variable named 'arguments'. 4829 // has a local variable named 'arguments'.
4843 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); 4830 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
4844 Result arguments = frame_->Pop(); 4831 Result arguments = frame_->Pop();
4845 if (arguments.is_constant()) { 4832 if (arguments.is_constant()) {
4846 // We have to skip updating the arguments object if it has 4833 // We have to skip updating the arguments object if it has
4847 // been assigned a proper value. 4834 // been assigned a proper value.
4848 skip_arguments = !arguments.handle()->IsTheHole(); 4835 skip_arguments = !arguments.handle()->IsTheHole();
4849 } else { 4836 } else {
4850 __ Cmp(arguments.reg(), Factory::the_hole_value()); 4837 __ CompareRoot(arguments.reg(), Heap::kTheHoleValueRootIndex);
4851 arguments.Unuse(); 4838 arguments.Unuse();
4852 done.Branch(not_equal); 4839 done.Branch(not_equal);
4853 } 4840 }
4854 } 4841 }
4855 if (!skip_arguments) { 4842 if (!skip_arguments) {
4856 arguments_ref.SetValue(NOT_CONST_INIT); 4843 arguments_ref.SetValue(NOT_CONST_INIT);
4857 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind(); 4844 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
4858 } 4845 }
4859 shadow_ref.SetValue(NOT_CONST_INIT); 4846 shadow_ref.SetValue(NOT_CONST_INIT);
4860 } 4847 }
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
4978 } else if (cc == equal && 4965 } else if (cc == equal &&
4979 (left_side_constant_null || right_side_constant_null)) { 4966 (left_side_constant_null || right_side_constant_null)) {
4980 // To make null checks efficient, we check if either the left side or 4967 // To make null checks efficient, we check if either the left side or
4981 // the right side is the constant 'null'. 4968 // the right side is the constant 'null'.
4982 // If so, we optimize the code by inlining a null check instead of 4969 // If so, we optimize the code by inlining a null check instead of
4983 // calling the (very) general runtime routine for checking equality. 4970 // calling the (very) general runtime routine for checking equality.
4984 Result operand = left_side_constant_null ? right_side : left_side; 4971 Result operand = left_side_constant_null ? right_side : left_side;
4985 right_side.Unuse(); 4972 right_side.Unuse();
4986 left_side.Unuse(); 4973 left_side.Unuse();
4987 operand.ToRegister(); 4974 operand.ToRegister();
4988 __ Cmp(operand.reg(), Factory::null_value()); 4975 __ CompareRoot(operand.reg(), Heap::kNullValueRootIndex);
4989 if (strict) { 4976 if (strict) {
4990 operand.Unuse(); 4977 operand.Unuse();
4991 dest->Split(equal); 4978 dest->Split(equal);
4992 } else { 4979 } else {
4993 // The 'null' value is only equal to 'undefined' if using non-strict 4980 // The 'null' value is only equal to 'undefined' if using non-strict
4994 // comparisons. 4981 // comparisons.
4995 dest->true_target()->Branch(equal); 4982 dest->true_target()->Branch(equal);
4996 __ Cmp(operand.reg(), Factory::undefined_value()); 4983 __ CompareRoot(operand.reg(), Heap::kUndefinedValueRootIndex);
4997 dest->true_target()->Branch(equal); 4984 dest->true_target()->Branch(equal);
4998 __ testl(operand.reg(), Immediate(kSmiTagMask)); 4985 __ testl(operand.reg(), Immediate(kSmiTagMask));
4999 dest->false_target()->Branch(equal); 4986 dest->false_target()->Branch(equal);
5000 4987
5001 // It can be an undetectable object. 4988 // It can be an undetectable object.
5002 // Use a scratch register in preference to spilling operand.reg(). 4989 // Use a scratch register in preference to spilling operand.reg().
5003 Result temp = allocator()->Allocate(); 4990 Result temp = allocator()->Allocate();
5004 ASSERT(temp.is_valid()); 4991 ASSERT(temp.is_valid());
5005 __ movq(temp.reg(), 4992 __ movq(temp.reg(),
5006 FieldOperand(operand.reg(), HeapObject::kMapOffset)); 4993 FieldOperand(operand.reg(), HeapObject::kMapOffset));
(...skipping 1099 matching lines...) Expand 10 before | Expand all | Expand 10 after
6106 // one is rax, the we can reuse that one because the value 6093 // one is rax, the we can reuse that one because the value
6107 // coming from the deferred code will be in rax. 6094 // coming from the deferred code will be in rax.
6108 Result value = index; 6095 Result value = index;
6109 __ movq(value.reg(), 6096 __ movq(value.reg(),
6110 Operand(elements.reg(), 6097 Operand(elements.reg(),
6111 index.reg(), 6098 index.reg(),
6112 times_pointer_size, 6099 times_pointer_size,
6113 FixedArray::kHeaderSize - kHeapObjectTag)); 6100 FixedArray::kHeaderSize - kHeapObjectTag));
6114 elements.Unuse(); 6101 elements.Unuse();
6115 index.Unuse(); 6102 index.Unuse();
6116 __ Cmp(value.reg(), Factory::the_hole_value()); 6103 __ CompareRoot(value.reg(), Heap::kTheHoleValueRootIndex);
6117 deferred->Branch(equal); 6104 deferred->Branch(equal);
6118 __ IncrementCounter(&Counters::keyed_load_inline, 1); 6105 __ IncrementCounter(&Counters::keyed_load_inline, 1);
6119 6106
6120 deferred->BindExit(); 6107 deferred->BindExit();
6121 // Restore the receiver and key to the frame and push the 6108 // Restore the receiver and key to the frame and push the
6122 // result on top of it. 6109 // result on top of it.
6123 cgen_->frame()->Push(&receiver); 6110 cgen_->frame()->Push(&receiver);
6124 cgen_->frame()->Push(&key); 6111 cgen_->frame()->Push(&key);
6125 cgen_->frame()->Push(&value); 6112 cgen_->frame()->Push(&value);
6126 6113
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after
6315 UNREACHABLE(); 6302 UNREACHABLE();
6316 } 6303 }
6317 } 6304 }
6318 6305
6319 6306
6320 void ToBooleanStub::Generate(MacroAssembler* masm) { 6307 void ToBooleanStub::Generate(MacroAssembler* masm) {
6321 Label false_result, true_result, not_string; 6308 Label false_result, true_result, not_string;
6322 __ movq(rax, Operand(rsp, 1 * kPointerSize)); 6309 __ movq(rax, Operand(rsp, 1 * kPointerSize));
6323 6310
6324 // 'null' => false. 6311 // 'null' => false.
6325 __ Cmp(rax, Factory::null_value()); 6312 __ CompareRoot(rax, Heap::kNullValueRootIndex);
6326 __ j(equal, &false_result); 6313 __ j(equal, &false_result);
6327 6314
6328 // Get the map and type of the heap object. 6315 // Get the map and type of the heap object.
6329 // We don't use CmpObjectType because we manipulate the type field. 6316 // We don't use CmpObjectType because we manipulate the type field.
6330 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); 6317 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
6331 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset)); 6318 __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset));
6332 6319
6333 // Undetectable => false. 6320 // Undetectable => false.
6334 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset)); 6321 __ movzxbq(rbx, FieldOperand(rdx, Map::kBitFieldOffset));
6335 __ and_(rbx, Immediate(1 << Map::kIsUndetectable)); 6322 __ and_(rbx, Immediate(1 << Map::kIsUndetectable));
(...skipping 10 matching lines...) Expand all
6346 __ cmpq(rcx, Immediate(kShortStringTag)); 6333 __ cmpq(rcx, Immediate(kShortStringTag));
6347 __ j(not_equal, &true_result); // Empty string is always short. 6334 __ j(not_equal, &true_result); // Empty string is always short.
6348 __ movl(rdx, FieldOperand(rax, String::kLengthOffset)); 6335 __ movl(rdx, FieldOperand(rax, String::kLengthOffset));
6349 __ shr(rdx, Immediate(String::kShortLengthShift)); 6336 __ shr(rdx, Immediate(String::kShortLengthShift));
6350 __ j(zero, &false_result); 6337 __ j(zero, &false_result);
6351 __ jmp(&true_result); 6338 __ jmp(&true_result);
6352 6339
6353 __ bind(&not_string); 6340 __ bind(&not_string);
6354 // HeapNumber => false iff +0, -0, or NaN. 6341 // HeapNumber => false iff +0, -0, or NaN.
6355 // These three cases set C3 when compared to zero in the FPU. 6342 // These three cases set C3 when compared to zero in the FPU.
6356 __ Cmp(rdx, Factory::heap_number_map()); 6343 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex);
6357 __ j(not_equal, &true_result); 6344 __ j(not_equal, &true_result);
6358 // TODO(x64): Don't use fp stack, use MMX registers? 6345 // TODO(x64): Don't use fp stack, use MMX registers?
6359 __ fldz(); // Load zero onto fp stack 6346 __ fldz(); // Load zero onto fp stack
6360 // Load heap-number double value onto fp stack 6347 // Load heap-number double value onto fp stack
6361 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); 6348 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
6362 __ fucompp(); // Compare and pop both values. 6349 __ fucompp(); // Compare and pop both values.
6363 __ movq(kScratchRegister, rax); 6350 __ movq(kScratchRegister, rax);
6364 __ fnstsw_ax(); // Store fp status word in ax, no checking for exceptions. 6351 __ fnstsw_ax(); // Store fp status word in ax, no checking for exceptions.
6365 __ testl(rax, Immediate(0x4000)); // Test FP condition flag C3, bit 16. 6352 __ testl(rax, Immediate(0x4000)); // Test FP condition flag C3, bit 16.
6366 __ movq(rax, kScratchRegister); 6353 __ movq(rax, kScratchRegister);
(...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after
6759 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister); 6746 __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, kScratchRegister);
6760 __ j(below, &slow); 6747 __ j(below, &slow);
6761 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); 6748 __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE);
6762 __ j(above, &slow); 6749 __ j(above, &slow);
6763 6750
6764 // Register mapping: rax is object map and rbx is function prototype. 6751 // Register mapping: rax is object map and rbx is function prototype.
6765 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); 6752 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
6766 6753
6767 // Loop through the prototype chain looking for the function prototype. 6754 // Loop through the prototype chain looking for the function prototype.
6768 Label loop, is_instance, is_not_instance; 6755 Label loop, is_instance, is_not_instance;
6769 __ Move(kScratchRegister, Factory::null_value()); 6756 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
6770 __ bind(&loop); 6757 __ bind(&loop);
6771 __ cmpq(rcx, rbx); 6758 __ cmpq(rcx, rbx);
6772 __ j(equal, &is_instance); 6759 __ j(equal, &is_instance);
6773 __ cmpq(rcx, kScratchRegister); 6760 __ cmpq(rcx, kScratchRegister);
6774 __ j(equal, &is_not_instance); 6761 __ j(equal, &is_not_instance);
6775 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); 6762 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
6776 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); 6763 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
6777 __ jmp(&loop); 6764 __ jmp(&loop);
6778 6765
6779 __ bind(&is_instance); 6766 __ bind(&is_instance);
(...skipping 233 matching lines...) Expand 10 before | Expand all | Expand 10 after
7013 // Retrieve the pending exception and clear the variable. 7000 // Retrieve the pending exception and clear the variable.
7014 ExternalReference pending_exception_address(Top::k_pending_exception_address); 7001 ExternalReference pending_exception_address(Top::k_pending_exception_address);
7015 __ movq(kScratchRegister, pending_exception_address); 7002 __ movq(kScratchRegister, pending_exception_address);
7016 __ movq(rax, Operand(kScratchRegister, 0)); 7003 __ movq(rax, Operand(kScratchRegister, 0));
7017 __ movq(rdx, ExternalReference::the_hole_value_location()); 7004 __ movq(rdx, ExternalReference::the_hole_value_location());
7018 __ movq(rdx, Operand(rdx, 0)); 7005 __ movq(rdx, Operand(rdx, 0));
7019 __ movq(Operand(kScratchRegister, 0), rdx); 7006 __ movq(Operand(kScratchRegister, 0), rdx);
7020 7007
7021 // Special handling of termination exceptions which are uncatchable 7008 // Special handling of termination exceptions which are uncatchable
7022 // by javascript code. 7009 // by javascript code.
7023 __ Cmp(rax, Factory::termination_exception()); 7010 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
7024 __ j(equal, throw_termination_exception); 7011 __ j(equal, throw_termination_exception);
7025 7012
7026 // Handle normal exception. 7013 // Handle normal exception.
7027 __ jmp(throw_normal_exception); 7014 __ jmp(throw_normal_exception);
7028 7015
7029 // Retry. 7016 // Retry.
7030 __ bind(&retry); 7017 __ bind(&retry);
7031 } 7018 }
7032 7019
7033 7020
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
7327 // Allocate heap number in new space. 7314 // Allocate heap number in new space.
7328 __ AllocateObjectInNewSpace(HeapNumber::kSize, 7315 __ AllocateObjectInNewSpace(HeapNumber::kSize,
7329 result, 7316 result,
7330 scratch, 7317 scratch,
7331 no_reg, 7318 no_reg,
7332 need_gc, 7319 need_gc,
7333 false); 7320 false);
7334 7321
7335 // Set the map and tag the result. 7322 // Set the map and tag the result.
7336 __ addq(result, Immediate(kHeapObjectTag)); 7323 __ addq(result, Immediate(kHeapObjectTag));
7337 __ movq(kScratchRegister, 7324 __ LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
7338 Factory::heap_number_map(),
7339 RelocInfo::EMBEDDED_OBJECT);
7340 __ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); 7325 __ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
7341 } 7326 }
7342 7327
7343 7328
7344 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, 7329 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
7345 Register number) { 7330 Register number) {
7346 Label load_smi, done; 7331 Label load_smi, done;
7347 7332
7348 __ testl(number, Immediate(kSmiTagMask)); 7333 __ testl(number, Immediate(kSmiTagMask));
7349 __ j(zero, &load_smi); 7334 __ j(zero, &load_smi);
(...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after
7879 int CompareStub::MinorKey() { 7864 int CompareStub::MinorKey() {
7880 // Encode the two parameters in a unique 16 bit value. 7865 // Encode the two parameters in a unique 16 bit value.
7881 ASSERT(static_cast<unsigned>(cc_) < (1 << 15)); 7866 ASSERT(static_cast<unsigned>(cc_) < (1 << 15));
7882 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0); 7867 return (static_cast<unsigned>(cc_) << 1) | (strict_ ? 1 : 0);
7883 } 7868 }
7884 7869
7885 7870
7886 #undef __ 7871 #undef __
7887 7872
7888 } } // namespace v8::internal 7873 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/cfg-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698