Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(433)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 139973004: A64: Synchronize with r15814. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 21 matching lines...) Expand all
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "regexp-macro-assembler.h" 34 #include "regexp-macro-assembler.h"
35 #include "stub-cache.h" 35 #include "stub-cache.h"
36 #include "runtime.h" 36 #include "runtime.h"
37 37
38 namespace v8 { 38 namespace v8 {
39 namespace internal { 39 namespace internal {
40 40
41 41
42 void ToNumberStub::InitializeInterfaceDescriptor(
43 Isolate* isolate,
44 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { rax };
46 descriptor->register_param_count_ = 1;
47 descriptor->register_params_ = registers;
48 descriptor->deoptimization_handler_ = NULL;
49 }
50
51
42 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor( 52 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
43 Isolate* isolate, 53 Isolate* isolate,
44 CodeStubInterfaceDescriptor* descriptor) { 54 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { rax, rbx, rcx }; 55 static Register registers[] = { rax, rbx, rcx };
46 descriptor->register_param_count_ = 3; 56 descriptor->register_param_count_ = 3;
47 descriptor->register_params_ = registers; 57 descriptor->register_params_ = registers;
48 descriptor->deoptimization_handler_ = 58 descriptor->deoptimization_handler_ =
49 Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry; 59 Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
50 } 60 }
51 61
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after
251 Isolate* isolate, 261 Isolate* isolate,
252 CodeStubInterfaceDescriptor* descriptor) { 262 CodeStubInterfaceDescriptor* descriptor) {
253 static Register registers[] = { rdx, rcx, rax }; 263 static Register registers[] = { rdx, rcx, rax };
254 descriptor->register_param_count_ = 3; 264 descriptor->register_param_count_ = 3;
255 descriptor->register_params_ = registers; 265 descriptor->register_params_ = registers;
256 descriptor->deoptimization_handler_ = 266 descriptor->deoptimization_handler_ =
257 FUNCTION_ADDR(StoreIC_MissFromStubFailure); 267 FUNCTION_ADDR(StoreIC_MissFromStubFailure);
258 } 268 }
259 269
260 270
271 void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor(
272 Isolate* isolate,
273 CodeStubInterfaceDescriptor* descriptor) {
274 static Register registers[] = { rax, rbx, rcx, rdx };
275 descriptor->register_param_count_ = 4;
276 descriptor->register_params_ = registers;
277 descriptor->deoptimization_handler_ =
278 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
279 }
280
281
261 #define __ ACCESS_MASM(masm) 282 #define __ ACCESS_MASM(masm)
262 283
263 284
264 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) { 285 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
265 // Update the static counter each time a new code stub is generated. 286 // Update the static counter each time a new code stub is generated.
266 Isolate* isolate = masm->isolate(); 287 Isolate* isolate = masm->isolate();
267 isolate->counters()->code_stubs()->Increment(); 288 isolate->counters()->code_stubs()->Increment();
268 289
269 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate); 290 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
270 int param_count = descriptor->register_param_count_; 291 int param_count = descriptor->register_param_count_;
271 { 292 {
272 // Call the runtime system in a fresh internal frame. 293 // Call the runtime system in a fresh internal frame.
273 FrameScope scope(masm, StackFrame::INTERNAL); 294 FrameScope scope(masm, StackFrame::INTERNAL);
274 ASSERT(descriptor->register_param_count_ == 0 || 295 ASSERT(descriptor->register_param_count_ == 0 ||
275 rax.is(descriptor->register_params_[param_count - 1])); 296 rax.is(descriptor->register_params_[param_count - 1]));
276 // Push arguments 297 // Push arguments
277 for (int i = 0; i < param_count; ++i) { 298 for (int i = 0; i < param_count; ++i) {
278 __ push(descriptor->register_params_[i]); 299 __ push(descriptor->register_params_[i]);
279 } 300 }
280 ExternalReference miss = descriptor->miss_handler(); 301 ExternalReference miss = descriptor->miss_handler();
281 __ CallExternalReference(miss, descriptor->register_param_count_); 302 __ CallExternalReference(miss, descriptor->register_param_count_);
282 } 303 }
283 304
284 __ Ret(); 305 __ Ret();
285 } 306 }
286 307
287 308
288 void ToNumberStub::Generate(MacroAssembler* masm) {
289 // The ToNumber stub takes one argument in rax.
290 Label check_heap_number, call_builtin;
291 __ SmiTest(rax);
292 __ j(not_zero, &check_heap_number, Label::kNear);
293 __ Ret();
294
295 __ bind(&check_heap_number);
296 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
297 Heap::kHeapNumberMapRootIndex);
298 __ j(not_equal, &call_builtin, Label::kNear);
299 __ Ret();
300
301 __ bind(&call_builtin);
302 __ pop(rcx); // Pop return address.
303 __ push(rax);
304 __ push(rcx); // Push return address.
305 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
306 }
307
308
309 void FastNewClosureStub::Generate(MacroAssembler* masm) { 309 void FastNewClosureStub::Generate(MacroAssembler* masm) {
310 // Create a new closure from the given function info in new 310 // Create a new closure from the given function info in new
311 // space. Set the context to the current context in rsi. 311 // space. Set the context to the current context in rsi.
312 Counters* counters = masm->isolate()->counters(); 312 Counters* counters = masm->isolate()->counters();
313 313
314 Label gc; 314 Label gc;
315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); 315 __ Allocate(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
316 316
317 __ IncrementCounter(counters->fast_new_closure_total(), 1); 317 __ IncrementCounter(counters->fast_new_closure_total(), 1);
318 318
(...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after
600 Register second, 600 Register second,
601 Register scratch1, 601 Register scratch1,
602 Register scratch2, 602 Register scratch2,
603 Register scratch3, 603 Register scratch3,
604 Label* on_success, 604 Label* on_success,
605 Label* on_not_smis, 605 Label* on_not_smis,
606 ConvertUndefined convert_undefined); 606 ConvertUndefined convert_undefined);
607 }; 607 };
608 608
609 609
610 // Get the integer part of a heap number. 610 void DoubleToIStub::Generate(MacroAssembler* masm) {
611 // Overwrites the contents of rdi, rbx and rcx. Result cannot be rdi or rbx. 611 Register input_reg = this->source();
612 void IntegerConvert(MacroAssembler* masm, 612 Register final_result_reg = this->destination();
613 Register result, 613 ASSERT(is_truncating());
614 Register source) {
615 // Result may be rcx. If result and source are the same register, source will
616 // be overwritten.
617 ASSERT(!result.is(rdi) && !result.is(rbx));
618 // TODO(lrn): When type info reaches here, if value is a 32-bit integer, use
619 // cvttsd2si (32-bit version) directly.
620 Register double_exponent = rbx;
621 Register double_value = rdi;
622 Label done, exponent_63_plus;
623 // Get double and extract exponent.
624 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset));
625 // Clear result preemptively, in case we need to return zero.
626 __ xorl(result, result);
627 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there.
628 // Double to remove sign bit, shift exponent down to least significant bits.
629 // and subtract bias to get the unshifted, unbiased exponent.
630 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0));
631 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits));
632 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias));
633 // Check whether the exponent is too big for a 63 bit unsigned integer.
634 __ cmpl(double_exponent, Immediate(63));
635 __ j(above_equal, &exponent_63_plus, Label::kNear);
636 // Handle exponent range 0..62.
637 __ cvttsd2siq(result, xmm0);
638 __ jmp(&done, Label::kNear);
639 614
640 __ bind(&exponent_63_plus); 615 Label check_negative, process_64_bits, done;
641 // Exponent negative or 63+.
642 __ cmpl(double_exponent, Immediate(83));
643 // If exponent negative or above 83, number contains no significant bits in
644 // the range 0..2^31, so result is zero, and rcx already holds zero.
645 __ j(above, &done, Label::kNear);
646 616
647 // Exponent in rage 63..83. 617 int double_offset = offset();
648 // Mantissa * 2^exponent contains bits in the range 2^0..2^31, namely
649 // the least significant exponent-52 bits.
650 618
651 // Negate low bits of mantissa if value is negative. 619 // Account for return address and saved regs if input is rsp.
652 __ addq(double_value, double_value); // Move sign bit to carry. 620 if (input_reg.is(rsp)) double_offset += 3 * kPointerSize;
653 __ sbbl(result, result); // And convert carry to -1 in result register.
654 // if scratch2 is negative, do (scratch2-1)^-1, otherwise (scratch2-0)^0.
655 __ addl(double_value, result);
656 // Do xor in opposite directions depending on where we want the result
657 // (depending on whether result is rcx or not).
658 621
659 if (result.is(rcx)) { 622 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
660 __ xorl(double_value, result); 623 MemOperand exponent_operand(MemOperand(input_reg,
661 // Left shift mantissa by (exponent - mantissabits - 1) to save the 624 double_offset + kDoubleSize / 2));
662 // bits that have positional values below 2^32 (the extra -1 comes from the
663 // doubling done above to move the sign bit into the carry flag).
664 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
665 __ shll_cl(double_value);
666 __ movl(result, double_value);
667 } else {
668 // As the then-branch, but move double-value to result before shifting.
669 __ xorl(result, double_value);
670 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1));
671 __ shll_cl(result);
672 }
673 625
674 __ bind(&done); 626 Register scratch1;
627 Register scratch_candidates[3] = { rbx, rdx, rdi };
628 for (int i = 0; i < 3; i++) {
629 scratch1 = scratch_candidates[i];
630 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
631 }
632
633 // Since we must use rcx for shifts below, use some other register (rax)
634 // to calculate the result if ecx is the requested return register.
635 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
636 // Save ecx if it isn't the return register and therefore volatile, or if it
637 // is the return register, then save the temp register we use in its stead
638 // for the result.
639 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
640 __ push(scratch1);
641 __ push(save_reg);
642
643 bool stash_exponent_copy = !input_reg.is(rsp);
644 __ movl(scratch1, mantissa_operand);
645 __ movsd(xmm0, mantissa_operand);
646 __ movl(rcx, exponent_operand);
647 if (stash_exponent_copy) __ push(rcx);
648
649 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
650 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
651 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
652 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
653 __ j(below, &process_64_bits);
654
655 // Result is entirely in lower 32-bits of mantissa
656 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
657 __ subl(rcx, Immediate(delta));
658 __ xorl(result_reg, result_reg);
659 __ cmpl(rcx, Immediate(31));
660 __ j(above, &done);
661 __ shll_cl(scratch1);
662 __ jmp(&check_negative);
663
664 __ bind(&process_64_bits);
665 __ cvttsd2siq(result_reg, xmm0);
666 __ jmp(&done, Label::kNear);
667
668 // If the double was negative, negate the integer result.
669 __ bind(&check_negative);
670 __ movl(result_reg, scratch1);
671 __ negl(result_reg);
672 if (stash_exponent_copy) {
673 __ cmpl(MemOperand(rsp, 0), Immediate(0));
674 } else {
675 __ cmpl(exponent_operand, Immediate(0));
676 }
677 __ cmovl(greater, result_reg, scratch1);
678
679 // Restore registers
680 __ bind(&done);
681 if (stash_exponent_copy) {
682 __ addq(rsp, Immediate(kDoubleSize));
683 }
684 if (!final_result_reg.is(result_reg)) {
685 ASSERT(final_result_reg.is(rcx));
686 __ movl(final_result_reg, result_reg);
687 }
688 __ pop(save_reg);
689 __ pop(scratch1);
690 __ ret(0);
675 } 691 }
676 692
677 693
678 void BinaryOpStub::Initialize() {} 694 void BinaryOpStub::Initialize() {}
679 695
680 696
681 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 697 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
682 __ pop(rcx); // Save return address. 698 __ pop(rcx); // Save return address.
683 __ push(rdx); 699 __ push(rdx);
684 __ push(rax); 700 __ push(rax);
(...skipping 295 matching lines...) Expand 10 before | Expand all | Expand 10 after
980 Label left_not_string, call_runtime; 996 Label left_not_string, call_runtime;
981 997
982 // Registers containing left and right operands respectively. 998 // Registers containing left and right operands respectively.
983 Register left = rdx; 999 Register left = rdx;
984 Register right = rax; 1000 Register right = rax;
985 1001
986 // Test if left operand is a string. 1002 // Test if left operand is a string.
987 __ JumpIfSmi(left, &left_not_string, Label::kNear); 1003 __ JumpIfSmi(left, &left_not_string, Label::kNear);
988 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); 1004 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
989 __ j(above_equal, &left_not_string, Label::kNear); 1005 __ j(above_equal, &left_not_string, Label::kNear);
990 StringAddStub string_add_left_stub((StringAddFlags) 1006 StringAddStub string_add_left_stub(
991 (ERECT_FRAME | NO_STRING_CHECK_LEFT_IN_STUB)); 1007 (StringAddFlags)(STRING_ADD_CHECK_RIGHT | STRING_ADD_ERECT_FRAME));
992 BinaryOpStub_GenerateRegisterArgsPushUnderReturn(masm); 1008 BinaryOpStub_GenerateRegisterArgsPushUnderReturn(masm);
993 __ TailCallStub(&string_add_left_stub); 1009 __ TailCallStub(&string_add_left_stub);
994 1010
995 // Left operand is not a string, test right. 1011 // Left operand is not a string, test right.
996 __ bind(&left_not_string); 1012 __ bind(&left_not_string);
997 __ JumpIfSmi(right, &call_runtime, Label::kNear); 1013 __ JumpIfSmi(right, &call_runtime, Label::kNear);
998 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); 1014 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx);
999 __ j(above_equal, &call_runtime, Label::kNear); 1015 __ j(above_equal, &call_runtime, Label::kNear);
1000 1016
1001 StringAddStub string_add_right_stub((StringAddFlags) 1017 StringAddStub string_add_right_stub(
1002 (ERECT_FRAME | NO_STRING_CHECK_RIGHT_IN_STUB)); 1018 (StringAddFlags)(STRING_ADD_CHECK_LEFT | STRING_ADD_ERECT_FRAME));
1003 BinaryOpStub_GenerateRegisterArgsPushUnderReturn(masm); 1019 BinaryOpStub_GenerateRegisterArgsPushUnderReturn(masm);
1004 __ TailCallStub(&string_add_right_stub); 1020 __ TailCallStub(&string_add_right_stub);
1005 1021
1006 // Neither argument is a string. 1022 // Neither argument is a string.
1007 __ bind(&call_runtime); 1023 __ bind(&call_runtime);
1008 } 1024 }
1009 1025
1010 1026
1011 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { 1027 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1012 Label right_arg_changed, call_runtime; 1028 Label right_arg_changed, call_runtime;
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
1067 // Test if left operand is a string. 1083 // Test if left operand is a string.
1068 __ JumpIfSmi(left, &call_runtime); 1084 __ JumpIfSmi(left, &call_runtime);
1069 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); 1085 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx);
1070 __ j(above_equal, &call_runtime); 1086 __ j(above_equal, &call_runtime);
1071 1087
1072 // Test if right operand is a string. 1088 // Test if right operand is a string.
1073 __ JumpIfSmi(right, &call_runtime); 1089 __ JumpIfSmi(right, &call_runtime);
1074 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); 1090 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx);
1075 __ j(above_equal, &call_runtime); 1091 __ j(above_equal, &call_runtime);
1076 1092
1077 StringAddStub string_add_stub((StringAddFlags) 1093 StringAddStub string_add_stub(
1078 (ERECT_FRAME | NO_STRING_CHECK_IN_STUB)); 1094 (StringAddFlags)(STRING_ADD_CHECK_NONE | STRING_ADD_ERECT_FRAME));
1079 BinaryOpStub_GenerateRegisterArgsPushUnderReturn(masm); 1095 BinaryOpStub_GenerateRegisterArgsPushUnderReturn(masm);
1080 __ TailCallStub(&string_add_stub); 1096 __ TailCallStub(&string_add_stub);
1081 1097
1082 __ bind(&call_runtime); 1098 __ bind(&call_runtime);
1083 GenerateTypeTransition(masm); 1099 GenerateTypeTransition(masm);
1084 } 1100 }
1085 1101
1086 1102
1087 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { 1103 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
1088 Label call_runtime; 1104 Label call_runtime;
(...skipping 461 matching lines...) Expand 10 before | Expand all | Expand 10 after
1550 Label done; 1566 Label done;
1551 Label rax_is_smi; 1567 Label rax_is_smi;
1552 Label rax_is_object; 1568 Label rax_is_object;
1553 Label rdx_is_object; 1569 Label rdx_is_object;
1554 1570
1555 __ JumpIfNotSmi(rdx, &rdx_is_object); 1571 __ JumpIfNotSmi(rdx, &rdx_is_object);
1556 __ SmiToInteger32(rdx, rdx); 1572 __ SmiToInteger32(rdx, rdx);
1557 __ JumpIfSmi(rax, &rax_is_smi); 1573 __ JumpIfSmi(rax, &rax_is_smi);
1558 1574
1559 __ bind(&rax_is_object); 1575 __ bind(&rax_is_object);
1560 IntegerConvert(masm, rcx, rax); // Uses rdi, rcx and rbx. 1576 DoubleToIStub stub1(rax, rcx, HeapNumber::kValueOffset - kHeapObjectTag,
1577 true);
1578 __ call(stub1.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
1579
1561 __ jmp(&done); 1580 __ jmp(&done);
1562 1581
1563 __ bind(&rdx_is_object); 1582 __ bind(&rdx_is_object);
1564 IntegerConvert(masm, rdx, rdx); // Uses rdi, rcx and rbx. 1583 DoubleToIStub stub2(rdx, rdx, HeapNumber::kValueOffset - kHeapObjectTag,
1584 true);
1585 __ call(stub1.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
1565 __ JumpIfNotSmi(rax, &rax_is_object); 1586 __ JumpIfNotSmi(rax, &rax_is_object);
1587
1566 __ bind(&rax_is_smi); 1588 __ bind(&rax_is_smi);
1567 __ SmiToInteger32(rcx, rax); 1589 __ SmiToInteger32(rcx, rax);
1568 1590
1569 __ bind(&done); 1591 __ bind(&done);
1570 __ movl(rax, rdx); 1592 __ movl(rax, rdx);
1571 } 1593 }
1572 1594
1573 1595
1574 // Input: rdx, rax are the left and right objects of a bit op. 1596 // Input: rdx, rax are the left and right objects of a bit op.
1575 // Output: rax, rcx are left and right integers for a bit op. 1597 // Output: rax, rcx are left and right integers for a bit op.
(...skipping 14 matching lines...) Expand all
1590 __ bind(&check_undefined_arg1); 1612 __ bind(&check_undefined_arg1);
1591 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); 1613 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1592 __ j(not_equal, conversion_failure); 1614 __ j(not_equal, conversion_failure);
1593 __ Set(r8, 0); 1615 __ Set(r8, 0);
1594 __ jmp(&load_arg2); 1616 __ jmp(&load_arg2);
1595 1617
1596 __ bind(&arg1_is_object); 1618 __ bind(&arg1_is_object);
1597 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map); 1619 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map);
1598 __ j(not_equal, &check_undefined_arg1); 1620 __ j(not_equal, &check_undefined_arg1);
1599 // Get the untagged integer version of the rdx heap number in rcx. 1621 // Get the untagged integer version of the rdx heap number in rcx.
1600 IntegerConvert(masm, r8, rdx); 1622 DoubleToIStub stub1(rdx, r8, HeapNumber::kValueOffset - kHeapObjectTag,
1623 true);
1624 __ call(stub1.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
1601 1625
1602 // Here r8 has the untagged integer, rax has a Smi or a heap number. 1626 // Here r8 has the untagged integer, rax has a Smi or a heap number.
1603 __ bind(&load_arg2); 1627 __ bind(&load_arg2);
1604 // Test if arg2 is a Smi. 1628 // Test if arg2 is a Smi.
1605 __ JumpIfNotSmi(rax, &arg2_is_object); 1629 __ JumpIfNotSmi(rax, &arg2_is_object);
1606 __ SmiToInteger32(rcx, rax); 1630 __ SmiToInteger32(rcx, rax);
1607 __ jmp(&done); 1631 __ jmp(&done);
1608 1632
1609 // If the argument is undefined it converts to zero (ECMA-262, section 9.5). 1633 // If the argument is undefined it converts to zero (ECMA-262, section 9.5).
1610 __ bind(&check_undefined_arg2); 1634 __ bind(&check_undefined_arg2);
1611 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 1635 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1612 __ j(not_equal, conversion_failure); 1636 __ j(not_equal, conversion_failure);
1613 __ Set(rcx, 0); 1637 __ Set(rcx, 0);
1614 __ jmp(&done); 1638 __ jmp(&done);
1615 1639
1616 __ bind(&arg2_is_object); 1640 __ bind(&arg2_is_object);
1617 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map); 1641 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map);
1618 __ j(not_equal, &check_undefined_arg2); 1642 __ j(not_equal, &check_undefined_arg2);
1619 // Get the untagged integer version of the rax heap number in rcx. 1643 // Get the untagged integer version of the rax heap number in rcx.
1620 IntegerConvert(masm, rcx, rax); 1644 DoubleToIStub stub2(rax, rcx, HeapNumber::kValueOffset - kHeapObjectTag,
1645 true);
1646 __ call(stub2.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
1647
1621 __ bind(&done); 1648 __ bind(&done);
1622 __ movl(rax, r8); 1649 __ movl(rax, r8);
1623 } 1650 }
1624 1651
1625 1652
1626 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) { 1653 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1627 __ SmiToInteger32(kScratchRegister, rdx); 1654 __ SmiToInteger32(kScratchRegister, rdx);
1628 __ cvtlsi2sd(xmm0, kScratchRegister); 1655 __ cvtlsi2sd(xmm0, kScratchRegister);
1629 __ SmiToInteger32(kScratchRegister, rax); 1656 __ SmiToInteger32(kScratchRegister, rax);
1630 __ cvtlsi2sd(xmm1, kScratchRegister); 1657 __ cvtlsi2sd(xmm1, kScratchRegister);
(...skipping 1443 matching lines...) Expand 10 before | Expand all | Expand 10 after
3074 __ bind(&slowcase); 3101 __ bind(&slowcase);
3075 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); 3102 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
3076 } 3103 }
3077 3104
3078 3105
3079 void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, 3106 void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm,
3080 Register object, 3107 Register object,
3081 Register result, 3108 Register result,
3082 Register scratch1, 3109 Register scratch1,
3083 Register scratch2, 3110 Register scratch2,
3084 bool object_is_smi,
3085 Label* not_found) { 3111 Label* not_found) {
3086 // Use of registers. Register result is used as a temporary. 3112 // Use of registers. Register result is used as a temporary.
3087 Register number_string_cache = result; 3113 Register number_string_cache = result;
3088 Register mask = scratch1; 3114 Register mask = scratch1;
3089 Register scratch = scratch2; 3115 Register scratch = scratch2;
3090 3116
3091 // Load the number string cache. 3117 // Load the number string cache.
3092 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); 3118 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
3093 3119
3094 // Make the hash mask from the length of the number string cache. It 3120 // Make the hash mask from the length of the number string cache. It
3095 // contains two elements (number and string) for each cache entry. 3121 // contains two elements (number and string) for each cache entry.
3096 __ SmiToInteger32( 3122 __ SmiToInteger32(
3097 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset)); 3123 mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
3098 __ shrl(mask, Immediate(1)); 3124 __ shrl(mask, Immediate(1));
3099 __ subq(mask, Immediate(1)); // Make mask. 3125 __ subq(mask, Immediate(1)); // Make mask.
3100 3126
3101 // Calculate the entry in the number string cache. The hash value in the 3127 // Calculate the entry in the number string cache. The hash value in the
3102 // number string cache for smis is just the smi value, and the hash for 3128 // number string cache for smis is just the smi value, and the hash for
3103 // doubles is the xor of the upper and lower words. See 3129 // doubles is the xor of the upper and lower words. See
3104 // Heap::GetNumberStringCache. 3130 // Heap::GetNumberStringCache.
3105 Label is_smi; 3131 Label is_smi;
3106 Label load_result_from_cache; 3132 Label load_result_from_cache;
3107 Factory* factory = masm->isolate()->factory(); 3133 Factory* factory = masm->isolate()->factory();
3108 if (!object_is_smi) { 3134 __ JumpIfSmi(object, &is_smi);
3109 __ JumpIfSmi(object, &is_smi); 3135 __ CheckMap(object,
3110 __ CheckMap(object, 3136 factory->heap_number_map(),
3111 factory->heap_number_map(), 3137 not_found,
3112 not_found, 3138 DONT_DO_SMI_CHECK);
3113 DONT_DO_SMI_CHECK);
3114 3139
3115 STATIC_ASSERT(8 == kDoubleSize); 3140 STATIC_ASSERT(8 == kDoubleSize);
3116 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); 3141 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
3117 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset)); 3142 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset));
3118 GenerateConvertHashCodeToIndex(masm, scratch, mask); 3143 GenerateConvertHashCodeToIndex(masm, scratch, mask);
3119 3144
3120 Register index = scratch; 3145 Register index = scratch;
3121 Register probe = mask; 3146 Register probe = mask;
3122 __ movq(probe, 3147 __ movq(probe,
3123 FieldOperand(number_string_cache, 3148 FieldOperand(number_string_cache,
3124 index, 3149 index,
3125 times_1, 3150 times_1,
3126 FixedArray::kHeaderSize)); 3151 FixedArray::kHeaderSize));
3127 __ JumpIfSmi(probe, not_found); 3152 __ JumpIfSmi(probe, not_found);
3128 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); 3153 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
3129 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); 3154 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
3130 __ ucomisd(xmm0, xmm1); 3155 __ ucomisd(xmm0, xmm1);
3131 __ j(parity_even, not_found); // Bail out if NaN is involved. 3156 __ j(parity_even, not_found); // Bail out if NaN is involved.
3132 __ j(not_equal, not_found); // The cache did not contain this value. 3157 __ j(not_equal, not_found); // The cache did not contain this value.
3133 __ jmp(&load_result_from_cache); 3158 __ jmp(&load_result_from_cache);
3134 }
3135 3159
3136 __ bind(&is_smi); 3160 __ bind(&is_smi);
3137 __ SmiToInteger32(scratch, object); 3161 __ SmiToInteger32(scratch, object);
3138 GenerateConvertHashCodeToIndex(masm, scratch, mask); 3162 GenerateConvertHashCodeToIndex(masm, scratch, mask);
3139 3163
3140 Register index = scratch;
3141 // Check if the entry is the smi we are looking for. 3164 // Check if the entry is the smi we are looking for.
3142 __ cmpq(object, 3165 __ cmpq(object,
3143 FieldOperand(number_string_cache, 3166 FieldOperand(number_string_cache,
3144 index, 3167 index,
3145 times_1, 3168 times_1,
3146 FixedArray::kHeaderSize)); 3169 FixedArray::kHeaderSize));
3147 __ j(not_equal, not_found); 3170 __ j(not_equal, not_found);
3148 3171
3149 // Get the result from the cache. 3172 // Get the result from the cache.
3150 __ bind(&load_result_from_cache); 3173 __ bind(&load_result_from_cache);
(...skipping 18 matching lines...) Expand all
3169 __ shl(hash, Immediate(kPointerSizeLog2 + 1)); 3192 __ shl(hash, Immediate(kPointerSizeLog2 + 1));
3170 } 3193 }
3171 3194
3172 3195
3173 void NumberToStringStub::Generate(MacroAssembler* masm) { 3196 void NumberToStringStub::Generate(MacroAssembler* masm) {
3174 Label runtime; 3197 Label runtime;
3175 3198
3176 __ movq(rbx, Operand(rsp, kPointerSize)); 3199 __ movq(rbx, Operand(rsp, kPointerSize));
3177 3200
3178 // Generate code to lookup number in the number string cache. 3201 // Generate code to lookup number in the number string cache.
3179 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, false, &runtime); 3202 GenerateLookupNumberStringCache(masm, rbx, rax, r8, r9, &runtime);
3180 __ ret(1 * kPointerSize); 3203 __ ret(1 * kPointerSize);
3181 3204
3182 __ bind(&runtime); 3205 __ bind(&runtime);
3183 // Handle number to string in the runtime system if not found in the cache. 3206 // Handle number to string in the runtime system if not found in the cache.
3184 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); 3207 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1);
3185 } 3208 }
3186 3209
3187 3210
3188 static int NegativeComparisonResult(Condition cc) { 3211 static int NegativeComparisonResult(Condition cc) {
3189 ASSERT(cc != equal); 3212 ASSERT(cc != equal);
(...skipping 22 matching lines...) Expand all
3212 3235
3213 3236
3214 static void BranchIfNotInternalizedString(MacroAssembler* masm, 3237 static void BranchIfNotInternalizedString(MacroAssembler* masm,
3215 Label* label, 3238 Label* label,
3216 Register object, 3239 Register object,
3217 Register scratch) { 3240 Register scratch) {
3218 __ JumpIfSmi(object, label); 3241 __ JumpIfSmi(object, label);
3219 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); 3242 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
3220 __ movzxbq(scratch, 3243 __ movzxbq(scratch,
3221 FieldOperand(scratch, Map::kInstanceTypeOffset)); 3244 FieldOperand(scratch, Map::kInstanceTypeOffset));
3222 STATIC_ASSERT(kInternalizedTag != 0); 3245 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3223 __ and_(scratch, Immediate(kIsNotStringMask | kIsInternalizedMask)); 3246 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3224 __ cmpb(scratch, Immediate(kInternalizedTag | kStringTag)); 3247 __ j(not_zero, label);
3225 __ j(not_equal, label);
3226 } 3248 }
3227 3249
3228 3250
3229 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { 3251 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
3230 Label check_unequal_objects, done; 3252 Label check_unequal_objects, done;
3231 Condition cc = GetCondition(); 3253 Condition cc = GetCondition();
3232 Factory* factory = masm->isolate()->factory(); 3254 Factory* factory = masm->isolate()->factory();
3233 3255
3234 Label miss; 3256 Label miss;
3235 CheckInputType(masm, rdx, left_, &miss); 3257 CheckInputType(masm, rdx, left_, &miss);
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
3329 3351
3330 // If either operand is a JSObject or an oddball value, then they are not 3352 // If either operand is a JSObject or an oddball value, then they are not
3331 // equal since their pointers are different 3353 // equal since their pointers are different
3332 // There is no test for undetectability in strict equality. 3354 // There is no test for undetectability in strict equality.
3333 3355
3334 // If the first object is a JS object, we have done pointer comparison. 3356 // If the first object is a JS object, we have done pointer comparison.
3335 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE); 3357 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
3336 Label first_non_object; 3358 Label first_non_object;
3337 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 3359 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
3338 __ j(below, &first_non_object, Label::kNear); 3360 __ j(below, &first_non_object, Label::kNear);
3339 // Return non-zero (eax (not rax) is not zero) 3361 // Return non-zero (rax (not rax) is not zero)
3340 Label return_not_equal; 3362 Label return_not_equal;
3341 STATIC_ASSERT(kHeapObjectTag != 0); 3363 STATIC_ASSERT(kHeapObjectTag != 0);
3342 __ bind(&return_not_equal); 3364 __ bind(&return_not_equal);
3343 __ ret(0); 3365 __ ret(0);
3344 3366
3345 __ bind(&first_non_object); 3367 __ bind(&first_non_object);
3346 // Check for oddballs: true, false, null, undefined. 3368 // Check for oddballs: true, false, null, undefined.
3347 __ CmpInstanceType(rcx, ODDBALL_TYPE); 3369 __ CmpInstanceType(rcx, ODDBALL_TYPE);
3348 __ j(equal, &return_not_equal); 3370 __ j(equal, &return_not_equal);
3349 3371
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
3391 3413
3392 // Fast negative check for internalized-to-internalized equality. 3414 // Fast negative check for internalized-to-internalized equality.
3393 Label check_for_strings; 3415 Label check_for_strings;
3394 if (cc == equal) { 3416 if (cc == equal) {
3395 BranchIfNotInternalizedString( 3417 BranchIfNotInternalizedString(
3396 masm, &check_for_strings, rax, kScratchRegister); 3418 masm, &check_for_strings, rax, kScratchRegister);
3397 BranchIfNotInternalizedString( 3419 BranchIfNotInternalizedString(
3398 masm, &check_for_strings, rdx, kScratchRegister); 3420 masm, &check_for_strings, rdx, kScratchRegister);
3399 3421
3400 // We've already checked for object identity, so if both operands are 3422 // We've already checked for object identity, so if both operands are
3401 // internalized strings they aren't equal. Register eax (not rax) already 3423 // internalized strings they aren't equal. Register rax (not rax) already
3402 // holds a non-zero value, which indicates not equal, so just return. 3424 // holds a non-zero value, which indicates not equal, so just return.
3403 __ ret(0); 3425 __ ret(0);
3404 } 3426 }
3405 3427
3406 __ bind(&check_for_strings); 3428 __ bind(&check_for_strings);
3407 3429
3408 __ JumpIfNotBothSequentialAsciiStrings( 3430 __ JumpIfNotBothSequentialAsciiStrings(
3409 rdx, rax, rcx, rbx, &check_unequal_objects); 3431 rdx, rax, rcx, rbx, &check_unequal_objects);
3410 3432
3411 // Inline comparison of ASCII strings. 3433 // Inline comparison of ASCII strings.
(...skipping 1064 matching lines...) Expand 10 before | Expand all | Expand 10 after
4476 4498
4477 void StringAddStub::Generate(MacroAssembler* masm) { 4499 void StringAddStub::Generate(MacroAssembler* masm) {
4478 Label call_runtime, call_builtin; 4500 Label call_runtime, call_builtin;
4479 Builtins::JavaScript builtin_id = Builtins::ADD; 4501 Builtins::JavaScript builtin_id = Builtins::ADD;
4480 4502
4481 // Load the two arguments. 4503 // Load the two arguments.
4482 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). 4504 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left).
4483 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). 4505 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right).
4484 4506
4485 // Make sure that both arguments are strings if not known in advance. 4507 // Make sure that both arguments are strings if not known in advance.
4486 if ((flags_ & NO_STRING_ADD_FLAGS) != 0) { 4508 // Otherwise, at least one of the arguments is definitely a string,
4509 // and we convert the one that is not known to be a string.
4510 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
4511 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
4512 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
4487 __ JumpIfSmi(rax, &call_runtime); 4513 __ JumpIfSmi(rax, &call_runtime);
4488 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); 4514 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
4489 __ j(above_equal, &call_runtime); 4515 __ j(above_equal, &call_runtime);
4490 4516
4491 // First argument is a a string, test second. 4517 // First argument is a a string, test second.
4492 __ JumpIfSmi(rdx, &call_runtime); 4518 __ JumpIfSmi(rdx, &call_runtime);
4493 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9); 4519 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
4494 __ j(above_equal, &call_runtime); 4520 __ j(above_equal, &call_runtime);
4495 } else { 4521 } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
4496 // Here at least one of the arguments is definitely a string. 4522 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
4497 // We convert the one that is not known to be a string. 4523 GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi,
4498 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) { 4524 &call_builtin);
4499 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0); 4525 builtin_id = Builtins::STRING_ADD_RIGHT;
4500 GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi, 4526 } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
4501 &call_builtin); 4527 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
4502 builtin_id = Builtins::STRING_ADD_RIGHT; 4528 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi,
4503 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) { 4529 &call_builtin);
4504 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0); 4530 builtin_id = Builtins::STRING_ADD_LEFT;
4505 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi,
4506 &call_builtin);
4507 builtin_id = Builtins::STRING_ADD_LEFT;
4508 }
4509 } 4531 }
4510 4532
4511 // Both arguments are strings. 4533 // Both arguments are strings.
4512 // rax: first string 4534 // rax: first string
4513 // rdx: second string 4535 // rdx: second string
4514 // Check if either of the strings are empty. In that case return the other. 4536 // Check if either of the strings are empty. In that case return the other.
4515 Label second_not_zero_length, both_not_zero_length; 4537 Label second_not_zero_length, both_not_zero_length;
4516 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset)); 4538 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
4517 __ SmiTest(rcx); 4539 __ SmiTest(rcx);
4518 __ j(not_zero, &second_not_zero_length, Label::kNear); 4540 __ j(not_zero, &second_not_zero_length, Label::kNear);
(...skipping 15 matching lines...) Expand all
4534 // rbx: length of first string 4556 // rbx: length of first string
4535 // rcx: length of second string 4557 // rcx: length of second string
4536 // rdx: second string 4558 // rdx: second string
4537 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS) 4559 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS)
4538 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS) 4560 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS)
4539 Label string_add_flat_result, longer_than_two; 4561 Label string_add_flat_result, longer_than_two;
4540 __ bind(&both_not_zero_length); 4562 __ bind(&both_not_zero_length);
4541 4563
4542 // If arguments where known to be strings, maps are not loaded to r8 and r9 4564 // If arguments where known to be strings, maps are not loaded to r8 and r9
4543 // by the code above. 4565 // by the code above.
4544 if (flags_ != NO_STRING_ADD_FLAGS) { 4566 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
4545 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset)); 4567 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
4546 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); 4568 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
4547 } 4569 }
4548 // Get the instance types of the two strings as they will be needed soon. 4570 // Get the instance types of the two strings as they will be needed soon.
4549 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset)); 4571 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
4550 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset)); 4572 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
4551 4573
4552 // Look at the length of the result of adding the two strings. 4574 // Look at the length of the result of adding the two strings.
4553 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2); 4575 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
4554 __ SmiAdd(rbx, rbx, rcx); 4576 __ SmiAdd(rbx, rbx, rcx);
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after
4754 // rbx: next character of result 4776 // rbx: next character of result
4755 // rdx: first char of second string 4777 // rdx: first char of second string
4756 // r15: length of second string 4778 // r15: length of second string
4757 StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, false); 4779 StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, false);
4758 __ IncrementCounter(counters->string_add_native(), 1); 4780 __ IncrementCounter(counters->string_add_native(), 1);
4759 __ ret(2 * kPointerSize); 4781 __ ret(2 * kPointerSize);
4760 4782
4761 // Just jump to runtime to add the two strings. 4783 // Just jump to runtime to add the two strings.
4762 __ bind(&call_runtime); 4784 __ bind(&call_runtime);
4763 4785
4764 if ((flags_ & ERECT_FRAME) != 0) { 4786 if ((flags_ & STRING_ADD_ERECT_FRAME) != 0) {
4765 GenerateRegisterArgsPop(masm, rcx); 4787 GenerateRegisterArgsPop(masm, rcx);
4766 // Build a frame 4788 // Build a frame
4767 { 4789 {
4768 FrameScope scope(masm, StackFrame::INTERNAL); 4790 FrameScope scope(masm, StackFrame::INTERNAL);
4769 GenerateRegisterArgsPush(masm); 4791 GenerateRegisterArgsPush(masm);
4770 __ CallRuntime(Runtime::kStringAdd, 2); 4792 __ CallRuntime(Runtime::kStringAdd, 2);
4771 } 4793 }
4772 __ Ret(); 4794 __ Ret();
4773 } else { 4795 } else {
4774 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); 4796 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
4775 } 4797 }
4776 4798
4777 if (call_builtin.is_linked()) { 4799 if (call_builtin.is_linked()) {
4778 __ bind(&call_builtin); 4800 __ bind(&call_builtin);
4779 if ((flags_ & ERECT_FRAME) != 0) { 4801 if ((flags_ & STRING_ADD_ERECT_FRAME) != 0) {
4780 GenerateRegisterArgsPop(masm, rcx); 4802 GenerateRegisterArgsPop(masm, rcx);
4781 // Build a frame 4803 // Build a frame
4782 { 4804 {
4783 FrameScope scope(masm, StackFrame::INTERNAL); 4805 FrameScope scope(masm, StackFrame::INTERNAL);
4784 GenerateRegisterArgsPush(masm); 4806 GenerateRegisterArgsPush(masm);
4785 __ InvokeBuiltin(builtin_id, CALL_FUNCTION); 4807 __ InvokeBuiltin(builtin_id, CALL_FUNCTION);
4786 } 4808 }
4787 __ Ret(); 4809 __ Ret();
4788 } else { 4810 } else {
4789 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION); 4811 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
4822 4844
4823 // Check the number to string cache. 4845 // Check the number to string cache.
4824 Label not_cached; 4846 Label not_cached;
4825 __ bind(&not_string); 4847 __ bind(&not_string);
4826 // Puts the cached result into scratch1. 4848 // Puts the cached result into scratch1.
4827 NumberToStringStub::GenerateLookupNumberStringCache(masm, 4849 NumberToStringStub::GenerateLookupNumberStringCache(masm,
4828 arg, 4850 arg,
4829 scratch1, 4851 scratch1,
4830 scratch2, 4852 scratch2,
4831 scratch3, 4853 scratch3,
4832 false,
4833 &not_cached); 4854 &not_cached);
4834 __ movq(arg, scratch1); 4855 __ movq(arg, scratch1);
4835 __ movq(Operand(rsp, stack_offset), arg); 4856 __ movq(Operand(rsp, stack_offset), arg);
4836 __ jmp(&done); 4857 __ jmp(&done);
4837 4858
4838 // Check if the argument is a safe string wrapper. 4859 // Check if the argument is a safe string wrapper.
4839 __ bind(&not_cached); 4860 __ bind(&not_cached);
4840 __ JumpIfSmi(arg, slow); 4861 __ JumpIfSmi(arg, slow);
4841 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1. 4862 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1.
4842 __ j(not_equal, slow); 4863 __ j(not_equal, slow);
(...skipping 791 matching lines...) Expand 10 before | Expand all | Expand 10 after
5634 // Check that both operands are heap objects. 5655 // Check that both operands are heap objects.
5635 Label miss; 5656 Label miss;
5636 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 5657 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
5637 __ j(cond, &miss, Label::kNear); 5658 __ j(cond, &miss, Label::kNear);
5638 5659
5639 // Check that both operands are internalized strings. 5660 // Check that both operands are internalized strings.
5640 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 5661 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
5641 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 5662 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
5642 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 5663 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
5643 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 5664 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
5644 STATIC_ASSERT(kInternalizedTag != 0); 5665 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
5645 __ and_(tmp1, Immediate(kIsNotStringMask | kIsInternalizedMask)); 5666 __ or_(tmp1, tmp2);
5646 __ cmpb(tmp1, Immediate(kInternalizedTag | kStringTag)); 5667 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
5647 __ j(not_equal, &miss, Label::kNear); 5668 __ j(not_zero, &miss, Label::kNear);
5648
5649 __ and_(tmp2, Immediate(kIsNotStringMask | kIsInternalizedMask));
5650 __ cmpb(tmp2, Immediate(kInternalizedTag | kStringTag));
5651 __ j(not_equal, &miss, Label::kNear);
5652 5669
5653 // Internalized strings are compared by identity. 5670 // Internalized strings are compared by identity.
5654 Label done; 5671 Label done;
5655 __ cmpq(left, right); 5672 __ cmpq(left, right);
5656 // Make sure rax is non-zero. At this point input operands are 5673 // Make sure rax is non-zero. At this point input operands are
5657 // guaranteed to be non-zero. 5674 // guaranteed to be non-zero.
5658 ASSERT(right.is(rax)); 5675 ASSERT(right.is(rax));
5659 __ j(not_equal, &done, Label::kNear); 5676 __ j(not_equal, &done, Label::kNear);
5660 STATIC_ASSERT(EQUAL == 0); 5677 STATIC_ASSERT(EQUAL == 0);
5661 STATIC_ASSERT(kSmiTag == 0); 5678 STATIC_ASSERT(kSmiTag == 0);
(...skipping 16 matching lines...) Expand all
5678 Register tmp1 = rcx; 5695 Register tmp1 = rcx;
5679 Register tmp2 = rbx; 5696 Register tmp2 = rbx;
5680 5697
5681 // Check that both operands are heap objects. 5698 // Check that both operands are heap objects.
5682 Label miss; 5699 Label miss;
5683 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 5700 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
5684 __ j(cond, &miss, Label::kNear); 5701 __ j(cond, &miss, Label::kNear);
5685 5702
5686 // Check that both operands are unique names. This leaves the instance 5703 // Check that both operands are unique names. This leaves the instance
5687 // types loaded in tmp1 and tmp2. 5704 // types loaded in tmp1 and tmp2.
5688 STATIC_ASSERT(kInternalizedTag != 0);
5689 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 5705 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
5690 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 5706 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
5691 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 5707 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
5692 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 5708 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
5693 5709
5694 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear); 5710 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
5695 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear); 5711 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
5696 5712
5697 // Unique names are compared by identity. 5713 // Unique names are compared by identity.
5698 Label done; 5714 Label done;
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
5751 __ ret(0); 5767 __ ret(0);
5752 5768
5753 // Handle not identical strings. 5769 // Handle not identical strings.
5754 __ bind(&not_same); 5770 __ bind(&not_same);
5755 5771
5756 // Check that both strings are internalized strings. If they are, we're done 5772 // Check that both strings are internalized strings. If they are, we're done
5757 // because we already know they are not identical. We also know they are both 5773 // because we already know they are not identical. We also know they are both
5758 // strings. 5774 // strings.
5759 if (equality) { 5775 if (equality) {
5760 Label do_compare; 5776 Label do_compare;
5761 STATIC_ASSERT(kInternalizedTag != 0); 5777 STATIC_ASSERT(kInternalizedTag == 0);
5762 __ and_(tmp1, tmp2); 5778 __ or_(tmp1, tmp2);
5763 __ testb(tmp1, Immediate(kIsInternalizedMask)); 5779 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
5764 __ j(zero, &do_compare, Label::kNear); 5780 __ j(not_zero, &do_compare, Label::kNear);
5765 // Make sure rax is non-zero. At this point input operands are 5781 // Make sure rax is non-zero. At this point input operands are
5766 // guaranteed to be non-zero. 5782 // guaranteed to be non-zero.
5767 ASSERT(right.is(rax)); 5783 ASSERT(right.is(rax));
5768 __ ret(0); 5784 __ ret(0);
5769 __ bind(&do_compare); 5785 __ bind(&do_compare);
5770 } 5786 }
5771 5787
5772 // Check that both strings are sequential ASCII. 5788 // Check that both strings are sequential ASCII.
5773 Label runtime; 5789 Label runtime;
5774 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); 5790 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
(...skipping 1025 matching lines...) Expand 10 before | Expand all | Expand 10 after
6800 __ bind(&fast_elements_case); 6816 __ bind(&fast_elements_case);
6801 GenerateCase(masm, FAST_ELEMENTS); 6817 GenerateCase(masm, FAST_ELEMENTS);
6802 } 6818 }
6803 6819
6804 6820
6805 #undef __ 6821 #undef __
6806 6822
6807 } } // namespace v8::internal 6823 } } // namespace v8::internal
6808 6824
6809 #endif // V8_TARGET_ARCH_X64 6825 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/code-stubs-x64.h ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698