OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
239 static Register registers[] = { rax }; | 239 static Register registers[] = { rax }; |
240 descriptor->register_param_count_ = 1; | 240 descriptor->register_param_count_ = 1; |
241 descriptor->register_params_ = registers; | 241 descriptor->register_params_ = registers; |
242 descriptor->deoptimization_handler_ = | 242 descriptor->deoptimization_handler_ = |
243 FUNCTION_ADDR(ToBooleanIC_Miss); | 243 FUNCTION_ADDR(ToBooleanIC_Miss); |
244 descriptor->SetMissHandler( | 244 descriptor->SetMissHandler( |
245 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); | 245 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate)); |
246 } | 246 } |
247 | 247 |
248 | 248 |
| 249 void UnaryOpStub::InitializeInterfaceDescriptor( |
| 250 Isolate* isolate, |
| 251 CodeStubInterfaceDescriptor* descriptor) { |
| 252 static Register registers[] = { rax }; |
| 253 descriptor->register_param_count_ = 1; |
| 254 descriptor->register_params_ = registers; |
| 255 descriptor->deoptimization_handler_ = |
| 256 FUNCTION_ADDR(UnaryOpIC_Miss); |
| 257 } |
| 258 |
| 259 |
249 void StoreGlobalStub::InitializeInterfaceDescriptor( | 260 void StoreGlobalStub::InitializeInterfaceDescriptor( |
250 Isolate* isolate, | 261 Isolate* isolate, |
251 CodeStubInterfaceDescriptor* descriptor) { | 262 CodeStubInterfaceDescriptor* descriptor) { |
252 static Register registers[] = { rdx, rcx, rax }; | 263 static Register registers[] = { rdx, rcx, rax }; |
253 descriptor->register_param_count_ = 3; | 264 descriptor->register_param_count_ = 3; |
254 descriptor->register_params_ = registers; | 265 descriptor->register_params_ = registers; |
255 descriptor->deoptimization_handler_ = | 266 descriptor->deoptimization_handler_ = |
256 FUNCTION_ADDR(StoreIC_MissFromStubFailure); | 267 FUNCTION_ADDR(StoreIC_MissFromStubFailure); |
257 } | 268 } |
258 | 269 |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
412 | 423 |
413 // Return and remove the on-stack parameter. | 424 // Return and remove the on-stack parameter. |
414 __ ret(1 * kPointerSize); | 425 __ ret(1 * kPointerSize); |
415 | 426 |
416 __ bind(&restore); | 427 __ bind(&restore); |
417 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); | 428 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); |
418 __ jmp(&install_unoptimized); | 429 __ jmp(&install_unoptimized); |
419 | 430 |
420 // Create a new closure through the slower runtime call. | 431 // Create a new closure through the slower runtime call. |
421 __ bind(&gc); | 432 __ bind(&gc); |
422 __ PopReturnAddressTo(rcx); | 433 __ pop(rcx); // Temporarily remove return address. |
423 __ pop(rdx); | 434 __ pop(rdx); |
424 __ push(rsi); | 435 __ push(rsi); |
425 __ push(rdx); | 436 __ push(rdx); |
426 __ PushRoot(Heap::kFalseValueRootIndex); | 437 __ PushRoot(Heap::kFalseValueRootIndex); |
427 __ PushReturnAddressFrom(rcx); | 438 __ push(rcx); // Restore return address. |
428 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); | 439 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); |
429 } | 440 } |
430 | 441 |
431 | 442 |
432 void FastNewContextStub::Generate(MacroAssembler* masm) { | 443 void FastNewContextStub::Generate(MacroAssembler* masm) { |
433 // Try to allocate the context in new space. | 444 // Try to allocate the context in new space. |
434 Label gc; | 445 Label gc; |
435 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | 446 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
436 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, | 447 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize, |
437 rax, rbx, rcx, &gc, TAG_OBJECT); | 448 rax, rbx, rcx, &gc, TAG_OBJECT); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
493 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); | 504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); |
494 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); | 505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); |
495 | 506 |
496 // If this block context is nested in the native context we get a smi | 507 // If this block context is nested in the native context we get a smi |
497 // sentinel instead of a function. The block context should get the | 508 // sentinel instead of a function. The block context should get the |
498 // canonical empty function of the native context as its closure which | 509 // canonical empty function of the native context as its closure which |
499 // we still have to look up. | 510 // we still have to look up. |
500 Label after_sentinel; | 511 Label after_sentinel; |
501 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); | 512 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); |
502 if (FLAG_debug_code) { | 513 if (FLAG_debug_code) { |
| 514 const char* message = "Expected 0 as a Smi sentinel"; |
503 __ cmpq(rcx, Immediate(0)); | 515 __ cmpq(rcx, Immediate(0)); |
504 __ Assert(equal, kExpected0AsASmiSentinel); | 516 __ Assert(equal, message); |
505 } | 517 } |
506 __ movq(rcx, GlobalObjectOperand()); | 518 __ movq(rcx, GlobalObjectOperand()); |
507 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); | 519 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); |
508 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); | 520 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); |
509 __ bind(&after_sentinel); | 521 __ bind(&after_sentinel); |
510 | 522 |
511 // Set up the fixed slots. | 523 // Set up the fixed slots. |
512 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); | 524 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); |
513 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); | 525 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); |
514 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); | 526 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
676 __ pop(save_reg); | 688 __ pop(save_reg); |
677 __ pop(scratch1); | 689 __ pop(scratch1); |
678 __ ret(0); | 690 __ ret(0); |
679 } | 691 } |
680 | 692 |
681 | 693 |
682 void BinaryOpStub::Initialize() {} | 694 void BinaryOpStub::Initialize() {} |
683 | 695 |
684 | 696 |
685 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 697 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
686 __ PopReturnAddressTo(rcx); | 698 __ pop(rcx); // Save return address. |
687 __ push(rdx); | 699 __ push(rdx); |
688 __ push(rax); | 700 __ push(rax); |
689 // Left and right arguments are now on top. | 701 // Left and right arguments are now on top. |
690 __ Push(Smi::FromInt(MinorKey())); | 702 __ Push(Smi::FromInt(MinorKey())); |
691 | 703 |
692 __ PushReturnAddressFrom(rcx); | 704 __ push(rcx); // Push return address. |
693 | 705 |
694 // Patch the caller to an appropriate specialized stub and return the | 706 // Patch the caller to an appropriate specialized stub and return the |
695 // operation result to the caller of the stub. | 707 // operation result to the caller of the stub. |
696 __ TailCallExternalReference( | 708 __ TailCallExternalReference( |
697 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), | 709 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
698 masm->isolate()), | 710 masm->isolate()), |
699 3, | 711 3, |
700 1); | 712 1); |
701 } | 713 } |
702 | 714 |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
935 Label allocation_failed; | 947 Label allocation_failed; |
936 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). | 948 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). |
937 // Allocate heap number in new space. | 949 // Allocate heap number in new space. |
938 // Not using AllocateHeapNumber macro in order to reuse | 950 // Not using AllocateHeapNumber macro in order to reuse |
939 // already loaded heap_number_map. | 951 // already loaded heap_number_map. |
940 __ Allocate(HeapNumber::kSize, rax, rdx, no_reg, &allocation_failed, | 952 __ Allocate(HeapNumber::kSize, rax, rdx, no_reg, &allocation_failed, |
941 TAG_OBJECT); | 953 TAG_OBJECT); |
942 // Set the map. | 954 // Set the map. |
943 __ AssertRootValue(heap_number_map, | 955 __ AssertRootValue(heap_number_map, |
944 Heap::kHeapNumberMapRootIndex, | 956 Heap::kHeapNumberMapRootIndex, |
945 kHeapNumberMapRegisterClobbered); | 957 "HeapNumberMap register clobbered."); |
946 __ movq(FieldOperand(rax, HeapObject::kMapOffset), | 958 __ movq(FieldOperand(rax, HeapObject::kMapOffset), |
947 heap_number_map); | 959 heap_number_map); |
948 __ cvtqsi2sd(xmm0, rbx); | 960 __ cvtqsi2sd(xmm0, rbx); |
949 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); | 961 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); |
950 __ Ret(); | 962 __ Ret(); |
951 | 963 |
952 __ bind(&allocation_failed); | 964 __ bind(&allocation_failed); |
953 // We need tagged values in rdx and rax for the following code, | 965 // We need tagged values in rdx and rax for the following code, |
954 // not int32 in rax and rcx. | 966 // not int32 in rax and rcx. |
955 __ Integer32ToSmi(rax, rcx); | 967 __ Integer32ToSmi(rax, rcx); |
956 __ Integer32ToSmi(rdx, rbx); | 968 __ Integer32ToSmi(rdx, rbx); |
957 __ jmp(allocation_failure); | 969 __ jmp(allocation_failure); |
958 } | 970 } |
959 break; | 971 break; |
960 } | 972 } |
961 default: UNREACHABLE(); break; | 973 default: UNREACHABLE(); break; |
962 } | 974 } |
963 // No fall-through from this generated code. | 975 // No fall-through from this generated code. |
964 if (FLAG_debug_code) { | 976 if (FLAG_debug_code) { |
965 __ Abort(kUnexpectedFallThroughInBinaryStubGenerateFloatingPointCode); | 977 __ Abort("Unexpected fall-through in " |
| 978 "BinaryStub_GenerateFloatingPointCode."); |
966 } | 979 } |
967 } | 980 } |
968 | 981 |
969 | 982 |
970 static void BinaryOpStub_GenerateRegisterArgsPushUnderReturn( | 983 static void BinaryOpStub_GenerateRegisterArgsPushUnderReturn( |
971 MacroAssembler* masm) { | 984 MacroAssembler* masm) { |
972 // Push arguments, but ensure they are under the return address | 985 // Push arguments, but ensure they are under the return address |
973 // for a tail call. | 986 // for a tail call. |
974 __ PopReturnAddressTo(rcx); | 987 __ pop(rcx); |
975 __ push(rdx); | 988 __ push(rdx); |
976 __ push(rax); | 989 __ push(rax); |
977 __ PushReturnAddressFrom(rcx); | 990 __ push(rcx); |
978 } | 991 } |
979 | 992 |
980 | 993 |
981 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { | 994 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { |
982 ASSERT(op_ == Token::ADD); | 995 ASSERT(op_ == Token::ADD); |
983 Label left_not_string, call_runtime; | 996 Label left_not_string, call_runtime; |
984 | 997 |
985 // Registers containing left and right operands respectively. | 998 // Registers containing left and right operands respectively. |
986 Register left = rdx; | 999 Register left = rdx; |
987 Register right = rax; | 1000 Register right = rax; |
(...skipping 1147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2135 // property might have been redefined. | 2148 // property might have been redefined. |
2136 __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset)); | 2149 __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset)); |
2137 __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset), | 2150 __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset), |
2138 Heap::kHashTableMapRootIndex); | 2151 Heap::kHashTableMapRootIndex); |
2139 __ j(equal, &miss); | 2152 __ j(equal, &miss); |
2140 | 2153 |
2141 // Check that value is a smi. | 2154 // Check that value is a smi. |
2142 __ JumpIfNotSmi(value, &miss); | 2155 __ JumpIfNotSmi(value, &miss); |
2143 | 2156 |
2144 // Prepare tail call to StoreIC_ArrayLength. | 2157 // Prepare tail call to StoreIC_ArrayLength. |
2145 __ PopReturnAddressTo(scratch); | 2158 __ pop(scratch); |
2146 __ push(receiver); | 2159 __ push(receiver); |
2147 __ push(value); | 2160 __ push(value); |
2148 __ PushReturnAddressFrom(scratch); | 2161 __ push(scratch); // return address |
2149 | 2162 |
2150 ExternalReference ref = | 2163 ExternalReference ref = |
2151 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate()); | 2164 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate()); |
2152 __ TailCallExternalReference(ref, 2, 1); | 2165 __ TailCallExternalReference(ref, 2, 1); |
2153 | 2166 |
2154 __ bind(&miss); | 2167 __ bind(&miss); |
2155 | 2168 |
2156 StubCompiler::TailCallBuiltin( | 2169 StubCompiler::TailCallBuiltin( |
2157 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); | 2170 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); |
2158 } | 2171 } |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2204 // Read the argument from the stack and return it. | 2217 // Read the argument from the stack and return it. |
2205 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2); | 2218 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2); |
2206 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0)); | 2219 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0)); |
2207 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2); | 2220 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2); |
2208 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); | 2221 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); |
2209 __ Ret(); | 2222 __ Ret(); |
2210 | 2223 |
2211 // Slow-case: Handle non-smi or out-of-bounds access to arguments | 2224 // Slow-case: Handle non-smi or out-of-bounds access to arguments |
2212 // by calling the runtime system. | 2225 // by calling the runtime system. |
2213 __ bind(&slow); | 2226 __ bind(&slow); |
2214 __ PopReturnAddressTo(rbx); | 2227 __ pop(rbx); // Return address. |
2215 __ push(rdx); | 2228 __ push(rdx); |
2216 __ PushReturnAddressFrom(rbx); | 2229 __ push(rbx); |
2217 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); | 2230 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); |
2218 } | 2231 } |
2219 | 2232 |
2220 | 2233 |
2221 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { | 2234 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) { |
2222 // Stack layout: | 2235 // Stack layout: |
2223 // rsp[0] : return address | 2236 // rsp[0] : return address |
2224 // rsp[8] : number of parameters (tagged) | 2237 // rsp[8] : number of parameters (tagged) |
2225 // rsp[16] : receiver displacement | 2238 // rsp[16] : receiver displacement |
2226 // rsp[24] : function | 2239 // rsp[24] : function |
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2596 __ movq(rax, Operand(rsp, kJSRegExpOffset)); | 2609 __ movq(rax, Operand(rsp, kJSRegExpOffset)); |
2597 __ JumpIfSmi(rax, &runtime); | 2610 __ JumpIfSmi(rax, &runtime); |
2598 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); | 2611 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); |
2599 __ j(not_equal, &runtime); | 2612 __ j(not_equal, &runtime); |
2600 | 2613 |
2601 // Check that the RegExp has been compiled (data contains a fixed array). | 2614 // Check that the RegExp has been compiled (data contains a fixed array). |
2602 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset)); | 2615 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset)); |
2603 if (FLAG_debug_code) { | 2616 if (FLAG_debug_code) { |
2604 Condition is_smi = masm->CheckSmi(rax); | 2617 Condition is_smi = masm->CheckSmi(rax); |
2605 __ Check(NegateCondition(is_smi), | 2618 __ Check(NegateCondition(is_smi), |
2606 kUnexpectedTypeForRegExpDataFixedArrayExpected); | 2619 "Unexpected type for RegExp data, FixedArray expected"); |
2607 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); | 2620 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); |
2608 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); | 2621 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected"); |
2609 } | 2622 } |
2610 | 2623 |
2611 // rax: RegExp data (FixedArray) | 2624 // rax: RegExp data (FixedArray) |
2612 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. | 2625 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. |
2613 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); | 2626 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); |
2614 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); | 2627 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); |
2615 __ j(not_equal, &runtime); | 2628 __ j(not_equal, &runtime); |
2616 | 2629 |
2617 // rax: RegExp data (FixedArray) | 2630 // rax: RegExp data (FixedArray) |
2618 // Check that the number of captures fit in the static offsets vector buffer. | 2631 // Check that the number of captures fit in the static offsets vector buffer. |
(...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2964 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). | 2977 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). |
2965 | 2978 |
2966 // (8) External string. Short external strings have been ruled out. | 2979 // (8) External string. Short external strings have been ruled out. |
2967 __ bind(&external_string); | 2980 __ bind(&external_string); |
2968 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); | 2981 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
2969 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 2982 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
2970 if (FLAG_debug_code) { | 2983 if (FLAG_debug_code) { |
2971 // Assert that we do not have a cons or slice (indirect strings) here. | 2984 // Assert that we do not have a cons or slice (indirect strings) here. |
2972 // Sequential strings have already been ruled out. | 2985 // Sequential strings have already been ruled out. |
2973 __ testb(rbx, Immediate(kIsIndirectStringMask)); | 2986 __ testb(rbx, Immediate(kIsIndirectStringMask)); |
2974 __ Assert(zero, kExternalStringExpectedButNotFound); | 2987 __ Assert(zero, "external string expected, but not found"); |
2975 } | 2988 } |
2976 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); | 2989 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); |
2977 // Move the pointer so that offset-wise, it looks like a sequential string. | 2990 // Move the pointer so that offset-wise, it looks like a sequential string. |
2978 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); | 2991 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); |
2979 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 2992 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); |
2980 STATIC_ASSERT(kTwoByteStringTag == 0); | 2993 STATIC_ASSERT(kTwoByteStringTag == 0); |
2981 // (8a) Is the external string one byte? If yes, go to (6). | 2994 // (8a) Is the external string one byte? If yes, go to (6). |
2982 __ testb(rbx, Immediate(kStringEncodingMask)); | 2995 __ testb(rbx, Immediate(kStringEncodingMask)); |
2983 __ j(not_zero, &seq_one_byte_string); // Goto (6). | 2996 __ j(not_zero, &seq_one_byte_string); // Goto (6). |
2984 | 2997 |
(...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3428 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, | 3441 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, |
3429 rdx, | 3442 rdx, |
3430 rax, | 3443 rax, |
3431 rcx, | 3444 rcx, |
3432 rbx, | 3445 rbx, |
3433 rdi, | 3446 rdi, |
3434 r8); | 3447 r8); |
3435 } | 3448 } |
3436 | 3449 |
3437 #ifdef DEBUG | 3450 #ifdef DEBUG |
3438 __ Abort(kUnexpectedFallThroughFromStringComparison); | 3451 __ Abort("Unexpected fall-through from string comparison"); |
3439 #endif | 3452 #endif |
3440 | 3453 |
3441 __ bind(&check_unequal_objects); | 3454 __ bind(&check_unequal_objects); |
3442 if (cc == equal && !strict()) { | 3455 if (cc == equal && !strict()) { |
3443 // Not strict equality. Objects are unequal if | 3456 // Not strict equality. Objects are unequal if |
3444 // they are both JSObjects and not undetectable, | 3457 // they are both JSObjects and not undetectable, |
3445 // and their pointers are different. | 3458 // and their pointers are different. |
3446 Label not_both_objects, return_unequal; | 3459 Label not_both_objects, return_unequal; |
3447 // At most one is a smi, so we can test for smi by adding the two. | 3460 // At most one is a smi, so we can test for smi by adding the two. |
3448 // A smi plus a heap object has the low bit set, a heap object plus | 3461 // A smi plus a heap object has the low bit set, a heap object plus |
(...skipping 17 matching lines...) Expand all Loading... |
3466 // undefined, and are equal. | 3479 // undefined, and are equal. |
3467 __ Set(rax, EQUAL); | 3480 __ Set(rax, EQUAL); |
3468 __ bind(&return_unequal); | 3481 __ bind(&return_unequal); |
3469 // Return non-equal by returning the non-zero object pointer in rax, | 3482 // Return non-equal by returning the non-zero object pointer in rax, |
3470 // or return equal if we fell through to here. | 3483 // or return equal if we fell through to here. |
3471 __ ret(0); | 3484 __ ret(0); |
3472 __ bind(¬_both_objects); | 3485 __ bind(¬_both_objects); |
3473 } | 3486 } |
3474 | 3487 |
3475 // Push arguments below the return address to prepare jump to builtin. | 3488 // Push arguments below the return address to prepare jump to builtin. |
3476 __ PopReturnAddressTo(rcx); | 3489 __ pop(rcx); |
3477 __ push(rdx); | 3490 __ push(rdx); |
3478 __ push(rax); | 3491 __ push(rax); |
3479 | 3492 |
3480 // Figure out which native to call and setup the arguments. | 3493 // Figure out which native to call and setup the arguments. |
3481 Builtins::JavaScript builtin; | 3494 Builtins::JavaScript builtin; |
3482 if (cc == equal) { | 3495 if (cc == equal) { |
3483 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; | 3496 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS; |
3484 } else { | 3497 } else { |
3485 builtin = Builtins::COMPARE; | 3498 builtin = Builtins::COMPARE; |
3486 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); | 3499 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); |
3487 } | 3500 } |
3488 | 3501 |
3489 __ PushReturnAddressFrom(rcx); | 3502 // Restore return address on the stack. |
| 3503 __ push(rcx); |
3490 | 3504 |
3491 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) | 3505 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) |
3492 // tagged as a small integer. | 3506 // tagged as a small integer. |
3493 __ InvokeBuiltin(builtin, JUMP_FUNCTION); | 3507 __ InvokeBuiltin(builtin, JUMP_FUNCTION); |
3494 | 3508 |
3495 __ bind(&miss); | 3509 __ bind(&miss); |
3496 GenerateMiss(masm); | 3510 GenerateMiss(masm); |
3497 } | 3511 } |
3498 | 3512 |
3499 | 3513 |
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3648 if (RecordCallTarget()) { | 3662 if (RecordCallTarget()) { |
3649 // If there is a call target cache, mark it megamorphic in the | 3663 // If there is a call target cache, mark it megamorphic in the |
3650 // non-function case. MegamorphicSentinel is an immortal immovable | 3664 // non-function case. MegamorphicSentinel is an immortal immovable |
3651 // object (undefined) so no write barrier is needed. | 3665 // object (undefined) so no write barrier is needed. |
3652 __ Move(FieldOperand(rbx, Cell::kValueOffset), | 3666 __ Move(FieldOperand(rbx, Cell::kValueOffset), |
3653 TypeFeedbackCells::MegamorphicSentinel(isolate)); | 3667 TypeFeedbackCells::MegamorphicSentinel(isolate)); |
3654 } | 3668 } |
3655 // Check for function proxy. | 3669 // Check for function proxy. |
3656 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); | 3670 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); |
3657 __ j(not_equal, &non_function); | 3671 __ j(not_equal, &non_function); |
3658 __ PopReturnAddressTo(rcx); | 3672 __ pop(rcx); |
3659 __ push(rdi); // put proxy as additional argument under return address | 3673 __ push(rdi); // put proxy as additional argument under return address |
3660 __ PushReturnAddressFrom(rcx); | 3674 __ push(rcx); |
3661 __ Set(rax, argc_ + 1); | 3675 __ Set(rax, argc_ + 1); |
3662 __ Set(rbx, 0); | 3676 __ Set(rbx, 0); |
3663 __ SetCallKind(rcx, CALL_AS_METHOD); | 3677 __ SetCallKind(rcx, CALL_AS_METHOD); |
3664 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | 3678 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
3665 { | 3679 { |
3666 Handle<Code> adaptor = | 3680 Handle<Code> adaptor = |
3667 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); | 3681 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
3668 __ jmp(adaptor, RelocInfo::CODE_TARGET); | 3682 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
3669 } | 3683 } |
3670 | 3684 |
(...skipping 583 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4254 if (!HasCallSiteInlineCheck()) { | 4268 if (!HasCallSiteInlineCheck()) { |
4255 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); | 4269 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
4256 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); | 4270 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
4257 } else { | 4271 } else { |
4258 // Get return address and delta to inlined map check. | 4272 // Get return address and delta to inlined map check. |
4259 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | 4273 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
4260 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | 4274 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
4261 if (FLAG_debug_code) { | 4275 if (FLAG_debug_code) { |
4262 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); | 4276 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); |
4263 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); | 4277 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); |
4264 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck); | 4278 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); |
4265 } | 4279 } |
4266 __ movq(kScratchRegister, | 4280 __ movq(kScratchRegister, |
4267 Operand(kScratchRegister, kOffsetToMapCheckValue)); | 4281 Operand(kScratchRegister, kOffsetToMapCheckValue)); |
4268 __ movq(Operand(kScratchRegister, 0), rax); | 4282 __ movq(Operand(kScratchRegister, 0), rax); |
4269 } | 4283 } |
4270 | 4284 |
4271 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); | 4285 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); |
4272 | 4286 |
4273 // Loop through the prototype chain looking for the function prototype. | 4287 // Loop through the prototype chain looking for the function prototype. |
4274 Label loop, is_instance, is_not_instance; | 4288 Label loop, is_instance, is_not_instance; |
(...skipping 21 matching lines...) Expand all Loading... |
4296 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; | 4310 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
4297 // Assert it is a 1-byte signed value. | 4311 // Assert it is a 1-byte signed value. |
4298 ASSERT(true_offset >= 0 && true_offset < 0x100); | 4312 ASSERT(true_offset >= 0 && true_offset < 0x100); |
4299 __ movl(rax, Immediate(true_offset)); | 4313 __ movl(rax, Immediate(true_offset)); |
4300 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | 4314 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
4301 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | 4315 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
4302 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | 4316 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
4303 if (FLAG_debug_code) { | 4317 if (FLAG_debug_code) { |
4304 __ movl(rax, Immediate(kWordBeforeResultValue)); | 4318 __ movl(rax, Immediate(kWordBeforeResultValue)); |
4305 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 4319 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
4306 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); | 4320 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); |
4307 } | 4321 } |
4308 __ Set(rax, 0); | 4322 __ Set(rax, 0); |
4309 } | 4323 } |
4310 __ ret(2 * kPointerSize + extra_stack_space); | 4324 __ ret(2 * kPointerSize + extra_stack_space); |
4311 | 4325 |
4312 __ bind(&is_not_instance); | 4326 __ bind(&is_not_instance); |
4313 if (!HasCallSiteInlineCheck()) { | 4327 if (!HasCallSiteInlineCheck()) { |
4314 // We have to store a non-zero value in the cache. | 4328 // We have to store a non-zero value in the cache. |
4315 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 4329 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
4316 } else { | 4330 } else { |
4317 // Store offset of false in the root array at the inline check site. | 4331 // Store offset of false in the root array at the inline check site. |
4318 int false_offset = 0x100 + | 4332 int false_offset = 0x100 + |
4319 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; | 4333 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
4320 // Assert it is a 1-byte signed value. | 4334 // Assert it is a 1-byte signed value. |
4321 ASSERT(false_offset >= 0 && false_offset < 0x100); | 4335 ASSERT(false_offset >= 0 && false_offset < 0x100); |
4322 __ movl(rax, Immediate(false_offset)); | 4336 __ movl(rax, Immediate(false_offset)); |
4323 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); | 4337 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
4324 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); | 4338 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
4325 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); | 4339 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
4326 if (FLAG_debug_code) { | 4340 if (FLAG_debug_code) { |
4327 __ movl(rax, Immediate(kWordBeforeResultValue)); | 4341 __ movl(rax, Immediate(kWordBeforeResultValue)); |
4328 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); | 4342 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); |
4329 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov); | 4343 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); |
4330 } | 4344 } |
4331 } | 4345 } |
4332 __ ret(2 * kPointerSize + extra_stack_space); | 4346 __ ret(2 * kPointerSize + extra_stack_space); |
4333 | 4347 |
4334 // Slow-case: Go through the JavaScript implementation. | 4348 // Slow-case: Go through the JavaScript implementation. |
4335 __ bind(&slow); | 4349 __ bind(&slow); |
4336 if (HasCallSiteInlineCheck()) { | 4350 if (HasCallSiteInlineCheck()) { |
4337 // Remove extra value from the stack. | 4351 // Remove extra value from the stack. |
4338 __ PopReturnAddressTo(rcx); | 4352 __ pop(rcx); |
4339 __ pop(rax); | 4353 __ pop(rax); |
4340 __ PushReturnAddressFrom(rcx); | 4354 __ push(rcx); |
4341 } | 4355 } |
4342 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 4356 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
4343 } | 4357 } |
4344 | 4358 |
4345 | 4359 |
4346 // Passing arguments in registers is not supported. | 4360 // Passing arguments in registers is not supported. |
4347 Register InstanceofStub::left() { return no_reg; } | 4361 Register InstanceofStub::left() { return no_reg; } |
4348 | 4362 |
4349 | 4363 |
4350 Register InstanceofStub::right() { return no_reg; } | 4364 Register InstanceofStub::right() { return no_reg; } |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4383 masm, object_, index_, result_, &call_runtime_); | 4397 masm, object_, index_, result_, &call_runtime_); |
4384 | 4398 |
4385 __ Integer32ToSmi(result_, result_); | 4399 __ Integer32ToSmi(result_, result_); |
4386 __ bind(&exit_); | 4400 __ bind(&exit_); |
4387 } | 4401 } |
4388 | 4402 |
4389 | 4403 |
4390 void StringCharCodeAtGenerator::GenerateSlow( | 4404 void StringCharCodeAtGenerator::GenerateSlow( |
4391 MacroAssembler* masm, | 4405 MacroAssembler* masm, |
4392 const RuntimeCallHelper& call_helper) { | 4406 const RuntimeCallHelper& call_helper) { |
4393 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); | 4407 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); |
4394 | 4408 |
4395 Factory* factory = masm->isolate()->factory(); | 4409 Factory* factory = masm->isolate()->factory(); |
4396 // Index is not a smi. | 4410 // Index is not a smi. |
4397 __ bind(&index_not_smi_); | 4411 __ bind(&index_not_smi_); |
4398 // If index is a heap number, try converting it to an integer. | 4412 // If index is a heap number, try converting it to an integer. |
4399 __ CheckMap(index_, | 4413 __ CheckMap(index_, |
4400 factory->heap_number_map(), | 4414 factory->heap_number_map(), |
4401 index_not_number_, | 4415 index_not_number_, |
4402 DONT_DO_SMI_CHECK); | 4416 DONT_DO_SMI_CHECK); |
4403 call_helper.BeforeCall(masm); | 4417 call_helper.BeforeCall(masm); |
(...skipping 29 matching lines...) Expand all Loading... |
4433 __ push(object_); | 4447 __ push(object_); |
4434 __ Integer32ToSmi(index_, index_); | 4448 __ Integer32ToSmi(index_, index_); |
4435 __ push(index_); | 4449 __ push(index_); |
4436 __ CallRuntime(Runtime::kStringCharCodeAt, 2); | 4450 __ CallRuntime(Runtime::kStringCharCodeAt, 2); |
4437 if (!result_.is(rax)) { | 4451 if (!result_.is(rax)) { |
4438 __ movq(result_, rax); | 4452 __ movq(result_, rax); |
4439 } | 4453 } |
4440 call_helper.AfterCall(masm); | 4454 call_helper.AfterCall(masm); |
4441 __ jmp(&exit_); | 4455 __ jmp(&exit_); |
4442 | 4456 |
4443 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); | 4457 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); |
4444 } | 4458 } |
4445 | 4459 |
4446 | 4460 |
4447 // ------------------------------------------------------------------------- | 4461 // ------------------------------------------------------------------------- |
4448 // StringCharFromCodeGenerator | 4462 // StringCharFromCodeGenerator |
4449 | 4463 |
4450 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { | 4464 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { |
4451 // Fast case of Heap::LookupSingleCharacterStringFromCode. | 4465 // Fast case of Heap::LookupSingleCharacterStringFromCode. |
4452 __ JumpIfNotSmi(code_, &slow_case_); | 4466 __ JumpIfNotSmi(code_, &slow_case_); |
4453 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode)); | 4467 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode)); |
4454 __ j(above, &slow_case_); | 4468 __ j(above, &slow_case_); |
4455 | 4469 |
4456 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); | 4470 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); |
4457 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2); | 4471 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2); |
4458 __ movq(result_, FieldOperand(result_, index.reg, index.scale, | 4472 __ movq(result_, FieldOperand(result_, index.reg, index.scale, |
4459 FixedArray::kHeaderSize)); | 4473 FixedArray::kHeaderSize)); |
4460 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); | 4474 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); |
4461 __ j(equal, &slow_case_); | 4475 __ j(equal, &slow_case_); |
4462 __ bind(&exit_); | 4476 __ bind(&exit_); |
4463 } | 4477 } |
4464 | 4478 |
4465 | 4479 |
4466 void StringCharFromCodeGenerator::GenerateSlow( | 4480 void StringCharFromCodeGenerator::GenerateSlow( |
4467 MacroAssembler* masm, | 4481 MacroAssembler* masm, |
4468 const RuntimeCallHelper& call_helper) { | 4482 const RuntimeCallHelper& call_helper) { |
4469 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); | 4483 __ Abort("Unexpected fallthrough to CharFromCode slow case"); |
4470 | 4484 |
4471 __ bind(&slow_case_); | 4485 __ bind(&slow_case_); |
4472 call_helper.BeforeCall(masm); | 4486 call_helper.BeforeCall(masm); |
4473 __ push(code_); | 4487 __ push(code_); |
4474 __ CallRuntime(Runtime::kCharFromCode, 1); | 4488 __ CallRuntime(Runtime::kCharFromCode, 1); |
4475 if (!result_.is(rax)) { | 4489 if (!result_.is(rax)) { |
4476 __ movq(result_, rax); | 4490 __ movq(result_, rax); |
4477 } | 4491 } |
4478 call_helper.AfterCall(masm); | 4492 call_helper.AfterCall(masm); |
4479 __ jmp(&exit_); | 4493 __ jmp(&exit_); |
4480 | 4494 |
4481 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); | 4495 __ Abort("Unexpected fallthrough from CharFromCode slow case"); |
4482 } | 4496 } |
4483 | 4497 |
4484 | 4498 |
4485 void StringAddStub::Generate(MacroAssembler* masm) { | 4499 void StringAddStub::Generate(MacroAssembler* masm) { |
4486 Label call_runtime, call_builtin; | 4500 Label call_runtime, call_builtin; |
4487 Builtins::JavaScript builtin_id = Builtins::ADD; | 4501 Builtins::JavaScript builtin_id = Builtins::ADD; |
4488 | 4502 |
4489 // Load the two arguments. | 4503 // Load the two arguments. |
4490 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). | 4504 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). |
4491 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). | 4505 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). |
(...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4801 | 4815 |
4802 | 4816 |
4803 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 4817 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
4804 __ push(rax); | 4818 __ push(rax); |
4805 __ push(rdx); | 4819 __ push(rdx); |
4806 } | 4820 } |
4807 | 4821 |
4808 | 4822 |
4809 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm, | 4823 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm, |
4810 Register temp) { | 4824 Register temp) { |
4811 __ PopReturnAddressTo(temp); | 4825 __ pop(temp); |
4812 __ pop(rdx); | 4826 __ pop(rdx); |
4813 __ pop(rax); | 4827 __ pop(rax); |
4814 __ PushReturnAddressFrom(temp); | 4828 __ push(temp); |
4815 } | 4829 } |
4816 | 4830 |
4817 | 4831 |
4818 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm, | 4832 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm, |
4819 int stack_offset, | 4833 int stack_offset, |
4820 Register arg, | 4834 Register arg, |
4821 Register scratch1, | 4835 Register scratch1, |
4822 Register scratch2, | 4836 Register scratch2, |
4823 Register scratch3, | 4837 Register scratch3, |
4824 Label* slow) { | 4838 Label* slow) { |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5019 Label is_string; | 5033 Label is_string; |
5020 __ CmpObjectType(candidate, ODDBALL_TYPE, map); | 5034 __ CmpObjectType(candidate, ODDBALL_TYPE, map); |
5021 __ j(not_equal, &is_string, Label::kNear); | 5035 __ j(not_equal, &is_string, Label::kNear); |
5022 | 5036 |
5023 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex); | 5037 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex); |
5024 __ j(equal, not_found); | 5038 __ j(equal, not_found); |
5025 // Must be the hole (deleted entry). | 5039 // Must be the hole (deleted entry). |
5026 if (FLAG_debug_code) { | 5040 if (FLAG_debug_code) { |
5027 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); | 5041 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
5028 __ cmpq(kScratchRegister, candidate); | 5042 __ cmpq(kScratchRegister, candidate); |
5029 __ Assert(equal, kOddballInStringTableIsNotUndefinedOrTheHole); | 5043 __ Assert(equal, "oddball in string table is not undefined or the hole"); |
5030 } | 5044 } |
5031 __ jmp(&next_probe[i]); | 5045 __ jmp(&next_probe[i]); |
5032 | 5046 |
5033 __ bind(&is_string); | 5047 __ bind(&is_string); |
5034 | 5048 |
5035 // If length is not 2 the string is not a candidate. | 5049 // If length is not 2 the string is not a candidate. |
5036 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset), | 5050 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset), |
5037 Smi::FromInt(2)); | 5051 Smi::FromInt(2)); |
5038 __ j(not_equal, &next_probe[i]); | 5052 __ j(not_equal, &next_probe[i]); |
5039 | 5053 |
(...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5508 __ ret(2 * kPointerSize); | 5522 __ ret(2 * kPointerSize); |
5509 | 5523 |
5510 __ bind(¬_same); | 5524 __ bind(¬_same); |
5511 | 5525 |
5512 // Check that both are sequential ASCII strings. | 5526 // Check that both are sequential ASCII strings. |
5513 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); | 5527 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); |
5514 | 5528 |
5515 // Inline comparison of ASCII strings. | 5529 // Inline comparison of ASCII strings. |
5516 __ IncrementCounter(counters->string_compare_native(), 1); | 5530 __ IncrementCounter(counters->string_compare_native(), 1); |
5517 // Drop arguments from the stack | 5531 // Drop arguments from the stack |
5518 __ PopReturnAddressTo(rcx); | 5532 __ pop(rcx); |
5519 __ addq(rsp, Immediate(2 * kPointerSize)); | 5533 __ addq(rsp, Immediate(2 * kPointerSize)); |
5520 __ PushReturnAddressFrom(rcx); | 5534 __ push(rcx); |
5521 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); | 5535 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); |
5522 | 5536 |
5523 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 5537 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
5524 // tagged as a small integer. | 5538 // tagged as a small integer. |
5525 __ bind(&runtime); | 5539 __ bind(&runtime); |
5526 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 5540 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
5527 } | 5541 } |
5528 | 5542 |
5529 | 5543 |
5530 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 5544 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
(...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5779 if (equality) { | 5793 if (equality) { |
5780 StringCompareStub::GenerateFlatAsciiStringEquals( | 5794 StringCompareStub::GenerateFlatAsciiStringEquals( |
5781 masm, left, right, tmp1, tmp2); | 5795 masm, left, right, tmp1, tmp2); |
5782 } else { | 5796 } else { |
5783 StringCompareStub::GenerateCompareFlatAsciiStrings( | 5797 StringCompareStub::GenerateCompareFlatAsciiStrings( |
5784 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); | 5798 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); |
5785 } | 5799 } |
5786 | 5800 |
5787 // Handle more complex cases in runtime. | 5801 // Handle more complex cases in runtime. |
5788 __ bind(&runtime); | 5802 __ bind(&runtime); |
5789 __ PopReturnAddressTo(tmp1); | 5803 __ pop(tmp1); // Return address. |
5790 __ push(left); | 5804 __ push(left); |
5791 __ push(right); | 5805 __ push(right); |
5792 __ PushReturnAddressFrom(tmp1); | 5806 __ push(tmp1); |
5793 if (equality) { | 5807 if (equality) { |
5794 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); | 5808 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); |
5795 } else { | 5809 } else { |
5796 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 5810 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
5797 } | 5811 } |
5798 | 5812 |
5799 __ bind(&miss); | 5813 __ bind(&miss); |
5800 GenerateMiss(masm); | 5814 GenerateMiss(masm); |
5801 } | 5815 } |
5802 | 5816 |
(...skipping 587 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6390 __ CheckFastElements(rdi, &double_elements); | 6404 __ CheckFastElements(rdi, &double_elements); |
6391 | 6405 |
6392 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS | 6406 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS |
6393 __ JumpIfSmi(rax, &smi_element); | 6407 __ JumpIfSmi(rax, &smi_element); |
6394 __ CheckFastSmiElements(rdi, &fast_elements); | 6408 __ CheckFastSmiElements(rdi, &fast_elements); |
6395 | 6409 |
6396 // Store into the array literal requires a elements transition. Call into | 6410 // Store into the array literal requires a elements transition. Call into |
6397 // the runtime. | 6411 // the runtime. |
6398 | 6412 |
6399 __ bind(&slow_elements); | 6413 __ bind(&slow_elements); |
6400 __ PopReturnAddressTo(rdi); | 6414 __ pop(rdi); // Pop return address and remember to put back later for tail |
| 6415 // call. |
6401 __ push(rbx); | 6416 __ push(rbx); |
6402 __ push(rcx); | 6417 __ push(rcx); |
6403 __ push(rax); | 6418 __ push(rax); |
6404 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 6419 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
6405 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); | 6420 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); |
6406 __ push(rdx); | 6421 __ push(rdx); |
6407 __ PushReturnAddressFrom(rdi); | 6422 __ push(rdi); // Return return address so that tail call returns to right |
| 6423 // place. |
6408 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); | 6424 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); |
6409 | 6425 |
6410 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. | 6426 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. |
6411 __ bind(&fast_elements); | 6427 __ bind(&fast_elements); |
6412 __ SmiToInteger32(kScratchRegister, rcx); | 6428 __ SmiToInteger32(kScratchRegister, rcx); |
6413 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); | 6429 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); |
6414 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, | 6430 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, |
6415 FixedArrayBase::kHeaderSize)); | 6431 FixedArrayBase::kHeaderSize)); |
6416 __ movq(Operand(rcx, 0), rax); | 6432 __ movq(Operand(rcx, 0), rax); |
6417 // Update the write barrier for the array store. | 6433 // Update the write barrier for the array store. |
(...skipping 26 matching lines...) Expand all Loading... |
6444 } | 6460 } |
6445 | 6461 |
6446 | 6462 |
6447 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { | 6463 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { |
6448 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); | 6464 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs); |
6449 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); | 6465 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); |
6450 int parameter_count_offset = | 6466 int parameter_count_offset = |
6451 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; | 6467 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset; |
6452 __ movq(rbx, MemOperand(rbp, parameter_count_offset)); | 6468 __ movq(rbx, MemOperand(rbp, parameter_count_offset)); |
6453 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); | 6469 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); |
6454 __ PopReturnAddressTo(rcx); | 6470 __ pop(rcx); |
6455 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE | 6471 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE |
6456 ? kPointerSize | 6472 ? kPointerSize |
6457 : 0; | 6473 : 0; |
6458 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); | 6474 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); |
6459 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. | 6475 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. |
6460 } | 6476 } |
6461 | 6477 |
6462 | 6478 |
6463 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 6479 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
6464 if (masm->isolate()->function_entry_hook() != NULL) { | 6480 if (masm->isolate()->function_entry_hook() != NULL) { |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6516 Label next; | 6532 Label next; |
6517 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6533 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
6518 __ cmpl(rdx, Immediate(kind)); | 6534 __ cmpl(rdx, Immediate(kind)); |
6519 __ j(not_equal, &next); | 6535 __ j(not_equal, &next); |
6520 T stub(kind); | 6536 T stub(kind); |
6521 __ TailCallStub(&stub); | 6537 __ TailCallStub(&stub); |
6522 __ bind(&next); | 6538 __ bind(&next); |
6523 } | 6539 } |
6524 | 6540 |
6525 // If we reached this point there is a problem. | 6541 // If we reached this point there is a problem. |
6526 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 6542 __ Abort("Unexpected ElementsKind in array constructor"); |
6527 } | 6543 } |
6528 | 6544 |
6529 | 6545 |
6530 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { | 6546 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { |
6531 // rbx - type info cell | 6547 // rbx - type info cell |
6532 // rdx - kind | 6548 // rdx - kind |
6533 // rax - number of arguments | 6549 // rax - number of arguments |
6534 // rdi - constructor? | 6550 // rdi - constructor? |
6535 // rsp[0] - return address | 6551 // rsp[0] - return address |
6536 // rsp[8] - last argument | 6552 // rsp[8] - last argument |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6579 Label next; | 6595 Label next; |
6580 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6596 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
6581 __ cmpl(rdx, Immediate(kind)); | 6597 __ cmpl(rdx, Immediate(kind)); |
6582 __ j(not_equal, &next); | 6598 __ j(not_equal, &next); |
6583 ArraySingleArgumentConstructorStub stub(kind); | 6599 ArraySingleArgumentConstructorStub stub(kind); |
6584 __ TailCallStub(&stub); | 6600 __ TailCallStub(&stub); |
6585 __ bind(&next); | 6601 __ bind(&next); |
6586 } | 6602 } |
6587 | 6603 |
6588 // If we reached this point there is a problem. | 6604 // If we reached this point there is a problem. |
6589 __ Abort(kUnexpectedElementsKindInArrayConstructor); | 6605 __ Abort("Unexpected ElementsKind in array constructor"); |
6590 } | 6606 } |
6591 | 6607 |
6592 | 6608 |
6593 template<class T> | 6609 template<class T> |
6594 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { | 6610 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { |
6595 int to_index = GetSequenceIndexFromFastElementsKind( | 6611 int to_index = GetSequenceIndexFromFastElementsKind( |
6596 TERMINAL_FAST_ELEMENTS_KIND); | 6612 TERMINAL_FAST_ELEMENTS_KIND); |
6597 for (int i = 0; i <= to_index; ++i) { | 6613 for (int i = 0; i <= to_index; ++i) { |
6598 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); | 6614 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); |
6599 T stub(kind); | 6615 T stub(kind); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6645 | 6661 |
6646 if (FLAG_debug_code) { | 6662 if (FLAG_debug_code) { |
6647 // The array construct code is only set for the global and natives | 6663 // The array construct code is only set for the global and natives |
6648 // builtin Array functions which always have maps. | 6664 // builtin Array functions which always have maps. |
6649 | 6665 |
6650 // Initial map for the builtin Array function should be a map. | 6666 // Initial map for the builtin Array function should be a map. |
6651 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 6667 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
6652 // Will both indicate a NULL and a Smi. | 6668 // Will both indicate a NULL and a Smi. |
6653 STATIC_ASSERT(kSmiTag == 0); | 6669 STATIC_ASSERT(kSmiTag == 0); |
6654 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); | 6670 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); |
6655 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); | 6671 __ Check(not_smi, "Unexpected initial map for Array function"); |
6656 __ CmpObjectType(rcx, MAP_TYPE, rcx); | 6672 __ CmpObjectType(rcx, MAP_TYPE, rcx); |
6657 __ Check(equal, kUnexpectedInitialMapForArrayFunction); | 6673 __ Check(equal, "Unexpected initial map for Array function"); |
6658 | 6674 |
6659 // We should either have undefined in rbx or a valid cell | 6675 // We should either have undefined in rbx or a valid cell |
6660 Label okay_here; | 6676 Label okay_here; |
6661 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); | 6677 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); |
6662 __ Cmp(rbx, undefined_sentinel); | 6678 __ Cmp(rbx, undefined_sentinel); |
6663 __ j(equal, &okay_here); | 6679 __ j(equal, &okay_here); |
6664 __ Cmp(FieldOperand(rbx, 0), cell_map); | 6680 __ Cmp(FieldOperand(rbx, 0), cell_map); |
6665 __ Assert(equal, kExpectedPropertyCellInRegisterRbx); | 6681 __ Assert(equal, "Expected property cell in register rbx"); |
6666 __ bind(&okay_here); | 6682 __ bind(&okay_here); |
6667 } | 6683 } |
6668 | 6684 |
6669 Label no_info, switch_ready; | 6685 Label no_info, switch_ready; |
6670 // Get the elements kind and case on that. | 6686 // Get the elements kind and case on that. |
6671 __ Cmp(rbx, undefined_sentinel); | 6687 __ Cmp(rbx, undefined_sentinel); |
6672 __ j(equal, &no_info); | 6688 __ j(equal, &no_info); |
6673 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); | 6689 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); |
6674 | 6690 |
6675 // The type cell may have undefined in its value. | 6691 // The type cell may have undefined in its value. |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6760 | 6776 |
6761 if (FLAG_debug_code) { | 6777 if (FLAG_debug_code) { |
6762 // The array construct code is only set for the global and natives | 6778 // The array construct code is only set for the global and natives |
6763 // builtin Array functions which always have maps. | 6779 // builtin Array functions which always have maps. |
6764 | 6780 |
6765 // Initial map for the builtin Array function should be a map. | 6781 // Initial map for the builtin Array function should be a map. |
6766 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 6782 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
6767 // Will both indicate a NULL and a Smi. | 6783 // Will both indicate a NULL and a Smi. |
6768 STATIC_ASSERT(kSmiTag == 0); | 6784 STATIC_ASSERT(kSmiTag == 0); |
6769 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); | 6785 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); |
6770 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); | 6786 __ Check(not_smi, "Unexpected initial map for Array function"); |
6771 __ CmpObjectType(rcx, MAP_TYPE, rcx); | 6787 __ CmpObjectType(rcx, MAP_TYPE, rcx); |
6772 __ Check(equal, kUnexpectedInitialMapForArrayFunction); | 6788 __ Check(equal, "Unexpected initial map for Array function"); |
6773 } | 6789 } |
6774 | 6790 |
6775 // Figure out the right elements kind | 6791 // Figure out the right elements kind |
6776 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); | 6792 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); |
6777 | 6793 |
6778 // Load the map's "bit field 2" into |result|. We only need the first byte, | 6794 // Load the map's "bit field 2" into |result|. We only need the first byte, |
6779 // but the following masking takes care of that anyway. | 6795 // but the following masking takes care of that anyway. |
6780 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset)); | 6796 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset)); |
6781 // Retrieve elements_kind from bit field 2. | 6797 // Retrieve elements_kind from bit field 2. |
6782 __ and_(rcx, Immediate(Map::kElementsKindMask)); | 6798 __ and_(rcx, Immediate(Map::kElementsKindMask)); |
6783 __ shr(rcx, Immediate(Map::kElementsKindShift)); | 6799 __ shr(rcx, Immediate(Map::kElementsKindShift)); |
6784 | 6800 |
6785 if (FLAG_debug_code) { | 6801 if (FLAG_debug_code) { |
6786 Label done; | 6802 Label done; |
6787 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); | 6803 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); |
6788 __ j(equal, &done); | 6804 __ j(equal, &done); |
6789 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); | 6805 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); |
6790 __ Assert(equal, | 6806 __ Assert(equal, |
6791 kInvalidElementsKindForInternalArrayOrInternalPackedArray); | 6807 "Invalid ElementsKind for InternalArray or InternalPackedArray"); |
6792 __ bind(&done); | 6808 __ bind(&done); |
6793 } | 6809 } |
6794 | 6810 |
6795 Label fast_elements_case; | 6811 Label fast_elements_case; |
6796 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); | 6812 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); |
6797 __ j(equal, &fast_elements_case); | 6813 __ j(equal, &fast_elements_case); |
6798 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 6814 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
6799 | 6815 |
6800 __ bind(&fast_elements_case); | 6816 __ bind(&fast_elements_case); |
6801 GenerateCase(masm, FAST_ELEMENTS); | 6817 GenerateCase(masm, FAST_ELEMENTS); |
6802 } | 6818 } |
6803 | 6819 |
6804 | 6820 |
6805 #undef __ | 6821 #undef __ |
6806 | 6822 |
6807 } } // namespace v8::internal | 6823 } } // namespace v8::internal |
6808 | 6824 |
6809 #endif // V8_TARGET_ARCH_X64 | 6825 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |