Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1403)

Side by Side Diff: src/x64/full-codegen-x64.cc

Issue 7060010: Merge bleeding edge into the GC branch up to 7948. The asserts (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
56 : masm_(masm) { 56 : masm_(masm) {
57 #ifdef DEBUG 57 #ifdef DEBUG
58 info_emitted_ = false; 58 info_emitted_ = false;
59 #endif 59 #endif
60 } 60 }
61 61
62 ~JumpPatchSite() { 62 ~JumpPatchSite() {
63 ASSERT(patch_site_.is_bound() == info_emitted_); 63 ASSERT(patch_site_.is_bound() == info_emitted_);
64 } 64 }
65 65
66 void EmitJumpIfNotSmi(Register reg, NearLabel* target) { 66 void EmitJumpIfNotSmi(Register reg,
67 Label* target,
68 Label::Distance near_jump = Label::kFar) {
67 __ testb(reg, Immediate(kSmiTagMask)); 69 __ testb(reg, Immediate(kSmiTagMask));
68 EmitJump(not_carry, target); // Always taken before patched. 70 EmitJump(not_carry, target, near_jump); // Always taken before patched.
69 } 71 }
70 72
71 void EmitJumpIfSmi(Register reg, NearLabel* target) { 73 void EmitJumpIfSmi(Register reg,
74 Label* target,
75 Label::Distance near_jump = Label::kFar) {
72 __ testb(reg, Immediate(kSmiTagMask)); 76 __ testb(reg, Immediate(kSmiTagMask));
73 EmitJump(carry, target); // Never taken before patched. 77 EmitJump(carry, target, near_jump); // Never taken before patched.
74 } 78 }
75 79
76 void EmitPatchInfo() { 80 void EmitPatchInfo() {
77 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); 81 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
78 ASSERT(is_int8(delta_to_patch_site)); 82 ASSERT(is_int8(delta_to_patch_site));
79 __ testl(rax, Immediate(delta_to_patch_site)); 83 __ testl(rax, Immediate(delta_to_patch_site));
80 #ifdef DEBUG 84 #ifdef DEBUG
81 info_emitted_ = true; 85 info_emitted_ = true;
82 #endif 86 #endif
83 } 87 }
84 88
85 bool is_bound() const { return patch_site_.is_bound(); } 89 bool is_bound() const { return patch_site_.is_bound(); }
86 90
87 private: 91 private:
88 // jc will be patched with jz, jnc will become jnz. 92 // jc will be patched with jz, jnc will become jnz.
89 void EmitJump(Condition cc, NearLabel* target) { 93 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_); 94 ASSERT(!patch_site_.is_bound() && !info_emitted_);
91 ASSERT(cc == carry || cc == not_carry); 95 ASSERT(cc == carry || cc == not_carry);
92 __ bind(&patch_site_); 96 __ bind(&patch_site_);
93 __ j(cc, target); 97 __ j(cc, target, near_jump);
94 } 98 }
95 99
96 MacroAssembler* masm_; 100 MacroAssembler* masm_;
97 Label patch_site_; 101 Label patch_site_;
98 #ifdef DEBUG 102 #ifdef DEBUG
99 bool info_emitted_; 103 bool info_emitted_;
100 #endif 104 #endif
101 }; 105 };
102 106
103 107
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
232 // For named function expressions, declare the function name as a 236 // For named function expressions, declare the function name as a
233 // constant. 237 // constant.
234 if (scope()->is_function_scope() && scope()->function() != NULL) { 238 if (scope()->is_function_scope() && scope()->function() != NULL) {
235 EmitDeclaration(scope()->function(), Variable::CONST, NULL); 239 EmitDeclaration(scope()->function(), Variable::CONST, NULL);
236 } 240 }
237 VisitDeclarations(scope()->declarations()); 241 VisitDeclarations(scope()->declarations());
238 } 242 }
239 243
240 { Comment cmnt(masm_, "[ Stack check"); 244 { Comment cmnt(masm_, "[ Stack check");
241 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS); 245 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
242 NearLabel ok; 246 Label ok;
243 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 247 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
244 __ j(above_equal, &ok); 248 __ j(above_equal, &ok, Label::kNear);
245 StackCheckStub stub; 249 StackCheckStub stub;
246 __ CallStub(&stub); 250 __ CallStub(&stub);
247 __ bind(&ok); 251 __ bind(&ok);
248 } 252 }
249 253
250 { Comment cmnt(masm_, "[ Body"); 254 { Comment cmnt(masm_, "[ Body");
251 ASSERT(loop_depth() == 0); 255 ASSERT(loop_depth() == 0);
252 VisitStatements(function()->body()); 256 VisitStatements(function()->body());
253 ASSERT(loop_depth() == 0); 257 ASSERT(loop_depth() == 0);
254 } 258 }
255 } 259 }
256 260
257 // Always emit a 'return undefined' in case control fell off the end of 261 // Always emit a 'return undefined' in case control fell off the end of
258 // the body. 262 // the body.
259 { Comment cmnt(masm_, "[ return <undefined>;"); 263 { Comment cmnt(masm_, "[ return <undefined>;");
260 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 264 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
261 EmitReturnSequence(); 265 EmitReturnSequence();
262 } 266 }
263 } 267 }
264 268
265 269
266 void FullCodeGenerator::ClearAccumulator() { 270 void FullCodeGenerator::ClearAccumulator() {
267 __ Set(rax, 0); 271 __ Set(rax, 0);
268 } 272 }
269 273
270 274
271 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { 275 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
272 Comment cmnt(masm_, "[ Stack check"); 276 Comment cmnt(masm_, "[ Stack check");
273 NearLabel ok; 277 Label ok;
274 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 278 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
275 __ j(above_equal, &ok); 279 __ j(above_equal, &ok, Label::kNear);
276 StackCheckStub stub; 280 StackCheckStub stub;
277 __ CallStub(&stub); 281 __ CallStub(&stub);
278 // Record a mapping of this PC offset to the OSR id. This is used to find 282 // Record a mapping of this PC offset to the OSR id. This is used to find
279 // the AST id from the unoptimized code in order to use it as a key into 283 // the AST id from the unoptimized code in order to use it as a key into
280 // the deoptimization input data found in the optimized code. 284 // the deoptimization input data found in the optimized code.
281 RecordStackCheck(stmt->OsrEntryId()); 285 RecordStackCheck(stmt->OsrEntryId());
282 286
283 // Loop stack checks can be patched to perform on-stack replacement. In 287 // Loop stack checks can be patched to perform on-stack replacement. In
284 // order to decide whether or not to perform OSR we embed the loop depth 288 // order to decide whether or not to perform OSR we embed the loop depth
285 // in a test instruction after the call so we can extract it from the OSR 289 // in a test instruction after the call so we can extract it from the OSR
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
478 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 482 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
479 Label* materialize_false) const { 483 Label* materialize_false) const {
480 ASSERT(materialize_true == materialize_false); 484 ASSERT(materialize_true == materialize_false);
481 __ bind(materialize_true); 485 __ bind(materialize_true);
482 } 486 }
483 487
484 488
485 void FullCodeGenerator::AccumulatorValueContext::Plug( 489 void FullCodeGenerator::AccumulatorValueContext::Plug(
486 Label* materialize_true, 490 Label* materialize_true,
487 Label* materialize_false) const { 491 Label* materialize_false) const {
488 NearLabel done; 492 Label done;
489 __ bind(materialize_true); 493 __ bind(materialize_true);
490 __ Move(result_register(), isolate()->factory()->true_value()); 494 __ Move(result_register(), isolate()->factory()->true_value());
491 __ jmp(&done); 495 __ jmp(&done, Label::kNear);
492 __ bind(materialize_false); 496 __ bind(materialize_false);
493 __ Move(result_register(), isolate()->factory()->false_value()); 497 __ Move(result_register(), isolate()->factory()->false_value());
494 __ bind(&done); 498 __ bind(&done);
495 } 499 }
496 500
497 501
498 void FullCodeGenerator::StackValueContext::Plug( 502 void FullCodeGenerator::StackValueContext::Plug(
499 Label* materialize_true, 503 Label* materialize_true,
500 Label* materialize_false) const { 504 Label* materialize_false) const {
501 NearLabel done; 505 Label done;
502 __ bind(materialize_true); 506 __ bind(materialize_true);
503 __ Push(isolate()->factory()->true_value()); 507 __ Push(isolate()->factory()->true_value());
504 __ jmp(&done); 508 __ jmp(&done, Label::kNear);
505 __ bind(materialize_false); 509 __ bind(materialize_false);
506 __ Push(isolate()->factory()->false_value()); 510 __ Push(isolate()->factory()->false_value());
507 __ bind(&done); 511 __ bind(&done);
508 } 512 }
509 513
510 514
511 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 515 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
512 Label* materialize_false) const { 516 Label* materialize_false) const {
513 ASSERT(materialize_true == true_label_); 517 ASSERT(materialize_true == true_label_);
514 ASSERT(materialize_false == false_label_); 518 ASSERT(materialize_false == false_label_);
(...skipping 27 matching lines...) Expand all
542 if (true_label_ != fall_through_) __ jmp(true_label_); 546 if (true_label_ != fall_through_) __ jmp(true_label_);
543 } else { 547 } else {
544 if (false_label_ != fall_through_) __ jmp(false_label_); 548 if (false_label_ != fall_through_) __ jmp(false_label_);
545 } 549 }
546 } 550 }
547 551
548 552
549 void FullCodeGenerator::DoTest(Label* if_true, 553 void FullCodeGenerator::DoTest(Label* if_true,
550 Label* if_false, 554 Label* if_false,
551 Label* fall_through) { 555 Label* fall_through) {
552 // Emit the inlined tests assumed by the stub.
553 __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex);
554 __ j(equal, if_false);
555 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
556 __ j(equal, if_true);
557 __ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
558 __ j(equal, if_false);
559 STATIC_ASSERT(kSmiTag == 0);
560 __ Cmp(result_register(), Smi::FromInt(0));
561 __ j(equal, if_false);
562 Condition is_smi = masm_->CheckSmi(result_register());
563 __ j(is_smi, if_true);
564
565 // Call the ToBoolean stub for all other cases.
566 ToBooleanStub stub; 556 ToBooleanStub stub;
567 __ push(result_register()); 557 __ push(result_register());
568 __ CallStub(&stub); 558 __ CallStub(&stub);
569 __ testq(rax, rax); 559 __ testq(rax, rax);
570
571 // The stub returns nonzero for true. 560 // The stub returns nonzero for true.
572 Split(not_zero, if_true, if_false, fall_through); 561 Split(not_zero, if_true, if_false, fall_through);
573 } 562 }
574 563
575 564
576 void FullCodeGenerator::Split(Condition cc, 565 void FullCodeGenerator::Split(Condition cc,
577 Label* if_true, 566 Label* if_true,
578 Label* if_false, 567 Label* if_false,
579 Label* fall_through) { 568 Label* fall_through) {
580 if (if_false == fall_through) { 569 if (if_false == fall_through) {
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
631 620
632 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state, 621 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
633 bool should_normalize, 622 bool should_normalize,
634 Label* if_true, 623 Label* if_true,
635 Label* if_false) { 624 Label* if_false) {
636 // Only prepare for bailouts before splits if we're in a test 625 // Only prepare for bailouts before splits if we're in a test
637 // context. Otherwise, we let the Visit function deal with the 626 // context. Otherwise, we let the Visit function deal with the
638 // preparation to avoid preparing with the same AST id twice. 627 // preparation to avoid preparing with the same AST id twice.
639 if (!context()->IsTest() || !info_->IsOptimizable()) return; 628 if (!context()->IsTest() || !info_->IsOptimizable()) return;
640 629
641 NearLabel skip; 630 Label skip;
642 if (should_normalize) __ jmp(&skip); 631 if (should_normalize) __ jmp(&skip, Label::kNear);
643 632
644 ForwardBailoutStack* current = forward_bailout_stack_; 633 ForwardBailoutStack* current = forward_bailout_stack_;
645 while (current != NULL) { 634 while (current != NULL) {
646 PrepareForBailout(current->expr(), state); 635 PrepareForBailout(current->expr(), state);
647 current = current->parent(); 636 current = current->parent();
648 } 637 }
649 638
650 if (should_normalize) { 639 if (should_normalize) {
651 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 640 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
652 Split(equal, if_true, if_false, NULL); 641 Split(equal, if_true, if_false, NULL);
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
801 next_test.Unuse(); 790 next_test.Unuse();
802 791
803 // Compile the label expression. 792 // Compile the label expression.
804 VisitForAccumulatorValue(clause->label()); 793 VisitForAccumulatorValue(clause->label());
805 794
806 // Perform the comparison as if via '==='. 795 // Perform the comparison as if via '==='.
807 __ movq(rdx, Operand(rsp, 0)); // Switch value. 796 __ movq(rdx, Operand(rsp, 0)); // Switch value.
808 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 797 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
809 JumpPatchSite patch_site(masm_); 798 JumpPatchSite patch_site(masm_);
810 if (inline_smi_code) { 799 if (inline_smi_code) {
811 NearLabel slow_case; 800 Label slow_case;
812 __ movq(rcx, rdx); 801 __ movq(rcx, rdx);
813 __ or_(rcx, rax); 802 __ or_(rcx, rax);
814 patch_site.EmitJumpIfNotSmi(rcx, &slow_case); 803 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
815 804
816 __ cmpq(rdx, rax); 805 __ cmpq(rdx, rax);
817 __ j(not_equal, &next_test); 806 __ j(not_equal, &next_test);
818 __ Drop(1); // Switch value is no longer needed. 807 __ Drop(1); // Switch value is no longer needed.
819 __ jmp(clause->body_target()); 808 __ jmp(clause->body_target());
820 __ bind(&slow_case); 809 __ bind(&slow_case);
821 } 810 }
822 811
823 // Record position before stub call for type feedback. 812 // Record position before stub call for type feedback.
824 SetSourcePosition(clause->position()); 813 SetSourcePosition(clause->position());
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
912 __ cmpq(rdx, empty_descriptor_array_value); 901 __ cmpq(rdx, empty_descriptor_array_value);
913 __ j(equal, &call_runtime); 902 __ j(equal, &call_runtime);
914 903
915 // Check that there is an enum cache in the non-empty instance 904 // Check that there is an enum cache in the non-empty instance
916 // descriptors (rdx). This is the case if the next enumeration 905 // descriptors (rdx). This is the case if the next enumeration
917 // index field does not contain a smi. 906 // index field does not contain a smi.
918 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset)); 907 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
919 __ JumpIfSmi(rdx, &call_runtime); 908 __ JumpIfSmi(rdx, &call_runtime);
920 909
921 // For all objects but the receiver, check that the cache is empty. 910 // For all objects but the receiver, check that the cache is empty.
922 NearLabel check_prototype; 911 Label check_prototype;
923 __ cmpq(rcx, rax); 912 __ cmpq(rcx, rax);
924 __ j(equal, &check_prototype); 913 __ j(equal, &check_prototype, Label::kNear);
925 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 914 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
926 __ cmpq(rdx, empty_fixed_array_value); 915 __ cmpq(rdx, empty_fixed_array_value);
927 __ j(not_equal, &call_runtime); 916 __ j(not_equal, &call_runtime);
928 917
929 // Load the prototype from the map and loop if non-null. 918 // Load the prototype from the map and loop if non-null.
930 __ bind(&check_prototype); 919 __ bind(&check_prototype);
931 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset)); 920 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
932 __ cmpq(rcx, null_value); 921 __ cmpq(rcx, null_value);
933 __ j(not_equal, &next); 922 __ j(not_equal, &next);
934 923
935 // The enum cache is valid. Load the map of the object being 924 // The enum cache is valid. Load the map of the object being
936 // iterated over and use the cache for the iteration. 925 // iterated over and use the cache for the iteration.
937 NearLabel use_cache; 926 Label use_cache;
938 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset)); 927 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
939 __ jmp(&use_cache); 928 __ jmp(&use_cache, Label::kNear);
940 929
941 // Get the set of properties to enumerate. 930 // Get the set of properties to enumerate.
942 __ bind(&call_runtime); 931 __ bind(&call_runtime);
943 __ push(rax); // Duplicate the enumerable object on the stack. 932 __ push(rax); // Duplicate the enumerable object on the stack.
944 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 933 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
945 934
946 // If we got a map from the runtime call, we can do a fast 935 // If we got a map from the runtime call, we can do a fast
947 // modification check. Otherwise, we got a fixed array, and we have 936 // modification check. Otherwise, we got a fixed array, and we have
948 // to do a slow check. 937 // to do a slow check.
949 NearLabel fixed_array; 938 Label fixed_array;
950 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), 939 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
951 Heap::kMetaMapRootIndex); 940 Heap::kMetaMapRootIndex);
952 __ j(not_equal, &fixed_array); 941 __ j(not_equal, &fixed_array, Label::kNear);
953 942
954 // We got a map in register rax. Get the enumeration cache from it. 943 // We got a map in register rax. Get the enumeration cache from it.
955 __ bind(&use_cache); 944 __ bind(&use_cache);
956 __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset)); 945 __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset));
957 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset)); 946 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
958 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 947 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
959 948
960 // Setup the four remaining stack slots. 949 // Setup the four remaining stack slots.
961 __ push(rax); // Map. 950 __ push(rax); // Map.
962 __ push(rdx); // Enumeration cache. 951 __ push(rdx); // Enumeration cache.
(...skipping 23 matching lines...) Expand all
986 index.reg, 975 index.reg,
987 index.scale, 976 index.scale,
988 FixedArray::kHeaderSize)); 977 FixedArray::kHeaderSize));
989 978
990 // Get the expected map from the stack or a zero map in the 979 // Get the expected map from the stack or a zero map in the
991 // permanent slow case into register rdx. 980 // permanent slow case into register rdx.
992 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); 981 __ movq(rdx, Operand(rsp, 3 * kPointerSize));
993 982
994 // Check if the expected map still matches that of the enumerable. 983 // Check if the expected map still matches that of the enumerable.
995 // If not, we have to filter the key. 984 // If not, we have to filter the key.
996 NearLabel update_each; 985 Label update_each;
997 __ movq(rcx, Operand(rsp, 4 * kPointerSize)); 986 __ movq(rcx, Operand(rsp, 4 * kPointerSize));
998 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); 987 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
999 __ j(equal, &update_each); 988 __ j(equal, &update_each, Label::kNear);
1000 989
1001 // Convert the entry to a string or null if it isn't a property 990 // Convert the entry to a string or null if it isn't a property
1002 // anymore. If the property has been removed while iterating, we 991 // anymore. If the property has been removed while iterating, we
1003 // just skip it. 992 // just skip it.
1004 __ push(rcx); // Enumerable. 993 __ push(rcx); // Enumerable.
1005 __ push(rbx); // Current entry. 994 __ push(rbx); // Current entry.
1006 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 995 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1007 __ Cmp(rax, Smi::FromInt(0)); 996 __ Cmp(rax, Smi::FromInt(0));
1008 __ j(equal, loop_statement.continue_target()); 997 __ j(equal, loop_statement.continue_target());
1009 __ movq(rbx, rax); 998 __ movq(rbx, rax);
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1097 // If no outer scope calls eval, we do not need to check more 1086 // If no outer scope calls eval, we do not need to check more
1098 // context extensions. If we have reached an eval scope, we check 1087 // context extensions. If we have reached an eval scope, we check
1099 // all extensions from this point. 1088 // all extensions from this point.
1100 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break; 1089 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1101 s = s->outer_scope(); 1090 s = s->outer_scope();
1102 } 1091 }
1103 1092
1104 if (s != NULL && s->is_eval_scope()) { 1093 if (s != NULL && s->is_eval_scope()) {
1105 // Loop up the context chain. There is no frame effect so it is 1094 // Loop up the context chain. There is no frame effect so it is
1106 // safe to use raw labels here. 1095 // safe to use raw labels here.
1107 NearLabel next, fast; 1096 Label next, fast;
1108 if (!context.is(temp)) { 1097 if (!context.is(temp)) {
1109 __ movq(temp, context); 1098 __ movq(temp, context);
1110 } 1099 }
1111 // Load map for comparison into register, outside loop. 1100 // Load map for comparison into register, outside loop.
1112 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex); 1101 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
1113 __ bind(&next); 1102 __ bind(&next);
1114 // Terminate at global context. 1103 // Terminate at global context.
1115 __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset)); 1104 __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1116 __ j(equal, &fast); 1105 __ j(equal, &fast, Label::kNear);
1117 // Check that extension is NULL. 1106 // Check that extension is NULL.
1118 __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); 1107 __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1119 __ j(not_equal, slow); 1108 __ j(not_equal, slow);
1120 // Load next context in chain. 1109 // Load next context in chain.
1121 __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX)); 1110 __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
1122 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset)); 1111 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1123 __ jmp(&next); 1112 __ jmp(&next);
1124 __ bind(&fast); 1113 __ bind(&fast);
1125 } 1114 }
1126 1115
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
1255 1244
1256 context()->Plug(rax); 1245 context()->Plug(rax);
1257 1246
1258 } else if (slot != NULL) { 1247 } else if (slot != NULL) {
1259 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) 1248 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1260 ? "Context slot" 1249 ? "Context slot"
1261 : "Stack slot"); 1250 : "Stack slot");
1262 if (var->mode() == Variable::CONST) { 1251 if (var->mode() == Variable::CONST) {
1263 // Constants may be the hole value if they have not been initialized. 1252 // Constants may be the hole value if they have not been initialized.
1264 // Unhole them. 1253 // Unhole them.
1265 NearLabel done; 1254 Label done;
1266 MemOperand slot_operand = EmitSlotSearch(slot, rax); 1255 MemOperand slot_operand = EmitSlotSearch(slot, rax);
1267 __ movq(rax, slot_operand); 1256 __ movq(rax, slot_operand);
1268 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); 1257 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1269 __ j(not_equal, &done); 1258 __ j(not_equal, &done, Label::kNear);
1270 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 1259 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1271 __ bind(&done); 1260 __ bind(&done);
1272 context()->Plug(rax); 1261 context()->Plug(rax);
1273 } else { 1262 } else {
1274 context()->Plug(slot); 1263 context()->Plug(slot);
1275 } 1264 }
1276 1265
1277 } else { 1266 } else {
1278 Comment cmnt(masm_, "Rewritten parameter"); 1267 Comment cmnt(masm_, "Rewritten parameter");
1279 ASSERT_NOT_NULL(property); 1268 ASSERT_NOT_NULL(property);
(...skipping 385 matching lines...) Expand 10 before | Expand all | Expand 10 after
1665 1654
1666 1655
1667 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1656 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1668 Token::Value op, 1657 Token::Value op,
1669 OverwriteMode mode, 1658 OverwriteMode mode,
1670 Expression* left, 1659 Expression* left,
1671 Expression* right) { 1660 Expression* right) {
1672 // Do combined smi check of the operands. Left operand is on the 1661 // Do combined smi check of the operands. Left operand is on the
1673 // stack (popped into rdx). Right operand is in rax but moved into 1662 // stack (popped into rdx). Right operand is in rax but moved into
1674 // rcx to make the shifts easier. 1663 // rcx to make the shifts easier.
1675 NearLabel done, stub_call, smi_case; 1664 Label done, stub_call, smi_case;
1676 __ pop(rdx); 1665 __ pop(rdx);
1677 __ movq(rcx, rax); 1666 __ movq(rcx, rax);
1678 __ or_(rax, rdx); 1667 __ or_(rax, rdx);
1679 JumpPatchSite patch_site(masm_); 1668 JumpPatchSite patch_site(masm_);
1680 patch_site.EmitJumpIfSmi(rax, &smi_case); 1669 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1681 1670
1682 __ bind(&stub_call); 1671 __ bind(&stub_call);
1683 __ movq(rax, rcx); 1672 __ movq(rax, rcx);
1684 TypeRecordingBinaryOpStub stub(op, mode); 1673 TypeRecordingBinaryOpStub stub(op, mode);
1685 EmitCallIC(stub.GetCode(), &patch_site, expr->id()); 1674 EmitCallIC(stub.GetCode(), &patch_site, expr->id());
1686 __ jmp(&done); 1675 __ jmp(&done, Label::kNear);
1687 1676
1688 __ bind(&smi_case); 1677 __ bind(&smi_case);
1689 switch (op) { 1678 switch (op) {
1690 case Token::SAR: 1679 case Token::SAR:
1691 __ SmiShiftArithmeticRight(rax, rdx, rcx); 1680 __ SmiShiftArithmeticRight(rax, rdx, rcx);
1692 break; 1681 break;
1693 case Token::SHL: 1682 case Token::SHL:
1694 __ SmiShiftLeft(rax, rdx, rcx); 1683 __ SmiShiftLeft(rax, rdx, rcx);
1695 break; 1684 break;
1696 case Token::SHR: 1685 case Token::SHR:
(...skipping 495 matching lines...) Expand 10 before | Expand all | Expand 10 after
2192 __ push(context_register()); 2181 __ push(context_register());
2193 __ Push(var->name()); 2182 __ Push(var->name());
2194 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2183 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2195 __ push(rax); // Function. 2184 __ push(rax); // Function.
2196 __ push(rdx); // Receiver. 2185 __ push(rdx); // Receiver.
2197 2186
2198 // If fast case code has been generated, emit code to push the 2187 // If fast case code has been generated, emit code to push the
2199 // function and receiver and have the slow path jump around this 2188 // function and receiver and have the slow path jump around this
2200 // code. 2189 // code.
2201 if (done.is_linked()) { 2190 if (done.is_linked()) {
2202 NearLabel call; 2191 Label call;
2203 __ jmp(&call); 2192 __ jmp(&call, Label::kNear);
2204 __ bind(&done); 2193 __ bind(&done);
2205 // Push function. 2194 // Push function.
2206 __ push(rax); 2195 __ push(rax);
2207 // Push global receiver. 2196 // Push global receiver.
2208 __ movq(rbx, GlobalObjectOperand()); 2197 __ movq(rbx, GlobalObjectOperand());
2209 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 2198 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2210 __ bind(&call); 2199 __ bind(&call);
2211 } 2200 }
2212 2201
2213 // The receiver is either the global receiver or a JSObject found by 2202 // The receiver is either the global receiver or a JSObject found by
(...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after
2634 __ Move(rax, Smi::FromInt(scope()->num_parameters())); 2623 __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2635 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); 2624 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2636 __ CallStub(&stub); 2625 __ CallStub(&stub);
2637 context()->Plug(rax); 2626 context()->Plug(rax);
2638 } 2627 }
2639 2628
2640 2629
2641 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) { 2630 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2642 ASSERT(args->length() == 0); 2631 ASSERT(args->length() == 0);
2643 2632
2644 NearLabel exit; 2633 Label exit;
2645 // Get the number of formal parameters. 2634 // Get the number of formal parameters.
2646 __ Move(rax, Smi::FromInt(scope()->num_parameters())); 2635 __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2647 2636
2648 // Check if the calling frame is an arguments adaptor frame. 2637 // Check if the calling frame is an arguments adaptor frame.
2649 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2638 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2650 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), 2639 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2651 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2640 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2652 __ j(not_equal, &exit); 2641 __ j(not_equal, &exit, Label::kNear);
2653 2642
2654 // Arguments adaptor case: Read the arguments length from the 2643 // Arguments adaptor case: Read the arguments length from the
2655 // adaptor frame. 2644 // adaptor frame.
2656 __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2645 __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2657 2646
2658 __ bind(&exit); 2647 __ bind(&exit);
2659 if (FLAG_debug_code) __ AbortIfNotSmi(rax); 2648 if (FLAG_debug_code) __ AbortIfNotSmi(rax);
2660 context()->Plug(rax); 2649 context()->Plug(rax);
2661 } 2650 }
2662 2651
(...skipping 515 matching lines...) Expand 10 before | Expand all | Expand 10 after
3178 Register cache = rbx; 3167 Register cache = rbx;
3179 Register tmp = rcx; 3168 Register tmp = rcx;
3180 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX)); 3169 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
3181 __ movq(cache, 3170 __ movq(cache,
3182 FieldOperand(cache, GlobalObject::kGlobalContextOffset)); 3171 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3183 __ movq(cache, 3172 __ movq(cache,
3184 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3173 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3185 __ movq(cache, 3174 __ movq(cache,
3186 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3175 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3187 3176
3188 NearLabel done, not_found; 3177 Label done, not_found;
3189 // tmp now holds finger offset as a smi. 3178 // tmp now holds finger offset as a smi.
3190 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3179 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3191 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); 3180 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3192 SmiIndex index = 3181 SmiIndex index =
3193 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2); 3182 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3194 __ cmpq(key, FieldOperand(cache, 3183 __ cmpq(key, FieldOperand(cache,
3195 index.reg, 3184 index.reg,
3196 index.scale, 3185 index.scale,
3197 FixedArray::kHeaderSize)); 3186 FixedArray::kHeaderSize));
3198 __ j(not_equal, &not_found); 3187 __ j(not_equal, &not_found, Label::kNear);
3199 __ movq(rax, FieldOperand(cache, 3188 __ movq(rax, FieldOperand(cache,
3200 index.reg, 3189 index.reg,
3201 index.scale, 3190 index.scale,
3202 FixedArray::kHeaderSize + kPointerSize)); 3191 FixedArray::kHeaderSize + kPointerSize));
3203 __ jmp(&done); 3192 __ jmp(&done, Label::kNear);
3204 3193
3205 __ bind(&not_found); 3194 __ bind(&not_found);
3206 // Call runtime to perform the lookup. 3195 // Call runtime to perform the lookup.
3207 __ push(cache); 3196 __ push(cache);
3208 __ push(key); 3197 __ push(key);
3209 __ CallRuntime(Runtime::kGetFromCache, 2); 3198 __ CallRuntime(Runtime::kGetFromCache, 2);
3210 3199
3211 __ bind(&done); 3200 __ bind(&done);
3212 context()->Plug(rax); 3201 context()->Plug(rax);
3213 } 3202 }
3214 3203
3215 3204
3216 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) { 3205 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3217 ASSERT_EQ(2, args->length()); 3206 ASSERT_EQ(2, args->length());
3218 3207
3219 Register right = rax; 3208 Register right = rax;
3220 Register left = rbx; 3209 Register left = rbx;
3221 Register tmp = rcx; 3210 Register tmp = rcx;
3222 3211
3223 VisitForStackValue(args->at(0)); 3212 VisitForStackValue(args->at(0));
3224 VisitForAccumulatorValue(args->at(1)); 3213 VisitForAccumulatorValue(args->at(1));
3225 __ pop(left); 3214 __ pop(left);
3226 3215
3227 NearLabel done, fail, ok; 3216 Label done, fail, ok;
3228 __ cmpq(left, right); 3217 __ cmpq(left, right);
3229 __ j(equal, &ok); 3218 __ j(equal, &ok, Label::kNear);
3230 // Fail if either is a non-HeapObject. 3219 // Fail if either is a non-HeapObject.
3231 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp); 3220 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
3232 __ j(either_smi, &fail); 3221 __ j(either_smi, &fail, Label::kNear);
3233 __ j(zero, &fail); 3222 __ j(zero, &fail, Label::kNear);
3234 __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset)); 3223 __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
3235 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset), 3224 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
3236 Immediate(JS_REGEXP_TYPE)); 3225 Immediate(JS_REGEXP_TYPE));
3237 __ j(not_equal, &fail); 3226 __ j(not_equal, &fail, Label::kNear);
3238 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset)); 3227 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
3239 __ j(not_equal, &fail); 3228 __ j(not_equal, &fail, Label::kNear);
3240 __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset)); 3229 __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3241 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset)); 3230 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3242 __ j(equal, &ok); 3231 __ j(equal, &ok, Label::kNear);
3243 __ bind(&fail); 3232 __ bind(&fail);
3244 __ Move(rax, isolate()->factory()->false_value()); 3233 __ Move(rax, isolate()->factory()->false_value());
3245 __ jmp(&done); 3234 __ jmp(&done, Label::kNear);
3246 __ bind(&ok); 3235 __ bind(&ok);
3247 __ Move(rax, isolate()->factory()->true_value()); 3236 __ Move(rax, isolate()->factory()->true_value());
3248 __ bind(&done); 3237 __ bind(&done);
3249 3238
3250 context()->Plug(rax); 3239 context()->Plug(rax);
3251 } 3240 }
3252 3241
3253 3242
3254 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) { 3243 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3255 ASSERT(args->length() == 1); 3244 ASSERT(args->length() == 1);
(...skipping 547 matching lines...) Expand 10 before | Expand all | Expand 10 after
3803 3792
3804 // We need a second deoptimization point after loading the value 3793 // We need a second deoptimization point after loading the value
3805 // in case evaluating the property load my have a side effect. 3794 // in case evaluating the property load my have a side effect.
3806 if (assign_type == VARIABLE) { 3795 if (assign_type == VARIABLE) {
3807 PrepareForBailout(expr->expression(), TOS_REG); 3796 PrepareForBailout(expr->expression(), TOS_REG);
3808 } else { 3797 } else {
3809 PrepareForBailoutForId(expr->CountId(), TOS_REG); 3798 PrepareForBailoutForId(expr->CountId(), TOS_REG);
3810 } 3799 }
3811 3800
3812 // Call ToNumber only if operand is not a smi. 3801 // Call ToNumber only if operand is not a smi.
3813 NearLabel no_conversion; 3802 Label no_conversion;
3814 Condition is_smi; 3803 Condition is_smi;
3815 is_smi = masm_->CheckSmi(rax); 3804 is_smi = masm_->CheckSmi(rax);
3816 __ j(is_smi, &no_conversion); 3805 __ j(is_smi, &no_conversion, Label::kNear);
3817 ToNumberStub convert_stub; 3806 ToNumberStub convert_stub;
3818 __ CallStub(&convert_stub); 3807 __ CallStub(&convert_stub);
3819 __ bind(&no_conversion); 3808 __ bind(&no_conversion);
3820 3809
3821 // Save result for postfix expressions. 3810 // Save result for postfix expressions.
3822 if (expr->is_postfix()) { 3811 if (expr->is_postfix()) {
3823 if (!context()->IsEffect()) { 3812 if (!context()->IsEffect()) {
3824 // Save the result on the stack. If we have a named or keyed property 3813 // Save the result on the stack. If we have a named or keyed property
3825 // we store the result under the receiver that is currently on top 3814 // we store the result under the receiver that is currently on top
3826 // of the stack. 3815 // of the stack.
3827 switch (assign_type) { 3816 switch (assign_type) {
3828 case VARIABLE: 3817 case VARIABLE:
3829 __ push(rax); 3818 __ push(rax);
3830 break; 3819 break;
3831 case NAMED_PROPERTY: 3820 case NAMED_PROPERTY:
3832 __ movq(Operand(rsp, kPointerSize), rax); 3821 __ movq(Operand(rsp, kPointerSize), rax);
3833 break; 3822 break;
3834 case KEYED_PROPERTY: 3823 case KEYED_PROPERTY:
3835 __ movq(Operand(rsp, 2 * kPointerSize), rax); 3824 __ movq(Operand(rsp, 2 * kPointerSize), rax);
3836 break; 3825 break;
3837 } 3826 }
3838 } 3827 }
3839 } 3828 }
3840 3829
3841 // Inline smi case if we are in a loop. 3830 // Inline smi case if we are in a loop.
3842 NearLabel stub_call, done; 3831 Label done, stub_call;
3843 JumpPatchSite patch_site(masm_); 3832 JumpPatchSite patch_site(masm_);
3844 3833
3845 if (ShouldInlineSmiCase(expr->op())) { 3834 if (ShouldInlineSmiCase(expr->op())) {
3846 if (expr->op() == Token::INC) { 3835 if (expr->op() == Token::INC) {
3847 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 3836 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3848 } else { 3837 } else {
3849 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); 3838 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3850 } 3839 }
3851 __ j(overflow, &stub_call); 3840 __ j(overflow, &stub_call, Label::kNear);
3852 // We could eliminate this smi check if we split the code at 3841 // We could eliminate this smi check if we split the code at
3853 // the first smi check before calling ToNumber. 3842 // the first smi check before calling ToNumber.
3854 patch_site.EmitJumpIfSmi(rax, &done); 3843 patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
3855 3844
3856 __ bind(&stub_call); 3845 __ bind(&stub_call);
3857 // Call stub. Undo operation first. 3846 // Call stub. Undo operation first.
3858 if (expr->op() == Token::INC) { 3847 if (expr->op() == Token::INC) {
3859 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); 3848 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3860 } else { 3849 } else {
3861 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 3850 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3862 } 3851 }
3863 } 3852 }
3864 3853
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after
4129 break; 4118 break;
4130 case Token::IN: 4119 case Token::IN:
4131 case Token::INSTANCEOF: 4120 case Token::INSTANCEOF:
4132 default: 4121 default:
4133 UNREACHABLE(); 4122 UNREACHABLE();
4134 } 4123 }
4135 4124
4136 bool inline_smi_code = ShouldInlineSmiCase(op); 4125 bool inline_smi_code = ShouldInlineSmiCase(op);
4137 JumpPatchSite patch_site(masm_); 4126 JumpPatchSite patch_site(masm_);
4138 if (inline_smi_code) { 4127 if (inline_smi_code) {
4139 NearLabel slow_case; 4128 Label slow_case;
4140 __ movq(rcx, rdx); 4129 __ movq(rcx, rdx);
4141 __ or_(rcx, rax); 4130 __ or_(rcx, rax);
4142 patch_site.EmitJumpIfNotSmi(rcx, &slow_case); 4131 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4143 __ cmpq(rdx, rax); 4132 __ cmpq(rdx, rax);
4144 Split(cc, if_true, if_false, NULL); 4133 Split(cc, if_true, if_false, NULL);
4145 __ bind(&slow_case); 4134 __ bind(&slow_case);
4146 } 4135 }
4147 4136
4148 // Record position and call the compare IC. 4137 // Record position and call the compare IC.
4149 SetSourcePosition(expr->position()); 4138 SetSourcePosition(expr->position());
4150 Handle<Code> ic = CompareIC::GetUninitialized(op); 4139 Handle<Code> ic = CompareIC::GetUninitialized(op);
4151 EmitCallIC(ic, &patch_site, expr->id()); 4140 EmitCallIC(ic, &patch_site, expr->id());
4152 4141
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
4305 __ ret(0); 4294 __ ret(0);
4306 } 4295 }
4307 4296
4308 4297
4309 #undef __ 4298 #undef __
4310 4299
4311 4300
4312 } } // namespace v8::internal 4301 } } // namespace v8::internal
4313 4302
4314 #endif // V8_TARGET_ARCH_X64 4303 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698