Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(13)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 22267005: Use StackArgumenstAccessor and kPCOnStackSize/kFPOnStackSize to compute stack address/operand (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Rebased with bleeding_edge Created 7 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1968 matching lines...) Expand 10 before | Expand all | Expand 10 after
1979 __ bind(&miss); 1979 __ bind(&miss);
1980 1980
1981 StubCompiler::TailCallBuiltin( 1981 StubCompiler::TailCallBuiltin(
1982 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind())); 1982 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1983 } 1983 }
1984 1984
1985 1985
1986 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { 1986 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1987 // The key is in rdx and the parameter count is in rax. 1987 // The key is in rdx and the parameter count is in rax.
1988 1988
1989 // The displacement is used for skipping the frame pointer on the
1990 // stack. It is the offset of the last parameter (if any) relative
1991 // to the frame pointer.
1992 static const int kDisplacement = 1 * kPointerSize;
1993
1994 // Check that the key is a smi. 1989 // Check that the key is a smi.
1995 Label slow; 1990 Label slow;
1996 __ JumpIfNotSmi(rdx, &slow); 1991 __ JumpIfNotSmi(rdx, &slow);
1997 1992
1998 // Check if the calling frame is an arguments adaptor frame. We look at the 1993 // Check if the calling frame is an arguments adaptor frame. We look at the
1999 // context offset, and if the frame is not a regular one, then we find a 1994 // context offset, and if the frame is not a regular one, then we find a
2000 // Smi instead of the context. We can't use SmiCompare here, because that 1995 // Smi instead of the context. We can't use SmiCompare here, because that
2001 // only works for comparing two smis. 1996 // only works for comparing two smis.
2002 Label adaptor; 1997 Label adaptor;
2003 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 1998 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2004 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), 1999 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2005 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2000 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2006 __ j(equal, &adaptor); 2001 __ j(equal, &adaptor);
2007 2002
2008 // Check index against formal parameters count limit passed in 2003 // Check index against formal parameters count limit passed in
2009 // through register rax. Use unsigned comparison to get negative 2004 // through register rax. Use unsigned comparison to get negative
2010 // check for free. 2005 // check for free.
2011 __ cmpq(rdx, rax); 2006 __ cmpq(rdx, rax);
2012 __ j(above_equal, &slow); 2007 __ j(above_equal, &slow);
2013 2008
2014 // Read the argument from the stack and return it. 2009 // Read the argument from the stack and return it.
2015 SmiIndex index = masm->SmiToIndex(rax, rax, kPointerSizeLog2); 2010 __ SmiSub(rax, rax, rdx);
2016 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0)); 2011 __ SmiToInteger32(rax, rax);
2017 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2); 2012 StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2018 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); 2013 __ movq(rax, args.GetArgumentOperand(0));
2019 __ Ret(); 2014 __ Ret();
2020 2015
2021 // Arguments adaptor case: Check index against actual arguments 2016 // Arguments adaptor case: Check index against actual arguments
2022 // limit found in the arguments adaptor frame. Use unsigned 2017 // limit found in the arguments adaptor frame. Use unsigned
2023 // comparison to get negative check for free. 2018 // comparison to get negative check for free.
2024 __ bind(&adaptor); 2019 __ bind(&adaptor);
2025 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2020 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2026 __ cmpq(rdx, rcx); 2021 __ cmpq(rdx, rcx);
2027 __ j(above_equal, &slow); 2022 __ j(above_equal, &slow);
2028 2023
2029 // Read the argument from the stack and return it. 2024 // Read the argument from the stack and return it.
2030 index = masm->SmiToIndex(rax, rcx, kPointerSizeLog2); 2025 __ SmiSub(rcx, rcx, rdx);
2031 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0)); 2026 __ SmiToInteger32(rcx, rcx);
2032 index = masm->SmiToNegativeIndex(rdx, rdx, kPointerSizeLog2); 2027 StackArgumentsAccessor adaptor_args(rbx, rcx,
2033 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); 2028 ARGUMENTS_DONT_CONTAIN_RECEIVER);
2029 __ movq(rax, adaptor_args.GetArgumentOperand(0));
2034 __ Ret(); 2030 __ Ret();
2035 2031
2036 // Slow-case: Handle non-smi or out-of-bounds access to arguments 2032 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2037 // by calling the runtime system. 2033 // by calling the runtime system.
2038 __ bind(&slow); 2034 __ bind(&slow);
2039 __ PopReturnAddressTo(rbx); 2035 __ PopReturnAddressTo(rbx);
2040 __ push(rdx); 2036 __ push(rdx);
2041 __ PushReturnAddressFrom(rbx); 2037 __ PushReturnAddressFrom(rbx);
2042 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); 2038 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
2043 } 2039 }
(...skipping 353 matching lines...) Expand 10 before | Expand all | Expand 10 after
2397 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); 2393 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2398 #else // V8_INTERPRETED_REGEXP 2394 #else // V8_INTERPRETED_REGEXP
2399 2395
2400 // Stack frame on entry. 2396 // Stack frame on entry.
2401 // rsp[0] : return address 2397 // rsp[0] : return address
2402 // rsp[8] : last_match_info (expected JSArray) 2398 // rsp[8] : last_match_info (expected JSArray)
2403 // rsp[16] : previous index 2399 // rsp[16] : previous index
2404 // rsp[24] : subject string 2400 // rsp[24] : subject string
2405 // rsp[32] : JSRegExp object 2401 // rsp[32] : JSRegExp object
2406 2402
2407 static const int kLastMatchInfoOffset = 1 * kPointerSize; 2403 enum RegExpExecStubArgumentIndices {
2408 static const int kPreviousIndexOffset = 2 * kPointerSize; 2404 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
2409 static const int kSubjectOffset = 3 * kPointerSize; 2405 SUBJECT_STRING_ARGUMENT_INDEX,
2410 static const int kJSRegExpOffset = 4 * kPointerSize; 2406 PREVIOUS_INDEX_ARGUMENT_INDEX,
2407 LAST_MATCH_INFO_ARGUMENT_INDEX,
2408 REG_EXP_EXEC_ARGUMENT_COUNT
2409 };
2411 2410
2411 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
2412 ARGUMENTS_DONT_CONTAIN_RECEIVER);
2412 Label runtime; 2413 Label runtime;
2413 // Ensure that a RegExp stack is allocated. 2414 // Ensure that a RegExp stack is allocated.
2414 Isolate* isolate = masm->isolate(); 2415 Isolate* isolate = masm->isolate();
2415 ExternalReference address_of_regexp_stack_memory_address = 2416 ExternalReference address_of_regexp_stack_memory_address =
2416 ExternalReference::address_of_regexp_stack_memory_address(isolate); 2417 ExternalReference::address_of_regexp_stack_memory_address(isolate);
2417 ExternalReference address_of_regexp_stack_memory_size = 2418 ExternalReference address_of_regexp_stack_memory_size =
2418 ExternalReference::address_of_regexp_stack_memory_size(isolate); 2419 ExternalReference::address_of_regexp_stack_memory_size(isolate);
2419 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); 2420 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
2420 __ testq(kScratchRegister, kScratchRegister); 2421 __ testq(kScratchRegister, kScratchRegister);
2421 __ j(zero, &runtime); 2422 __ j(zero, &runtime);
2422 2423
2423 // Check that the first argument is a JSRegExp object. 2424 // Check that the first argument is a JSRegExp object.
2424 __ movq(rax, Operand(rsp, kJSRegExpOffset)); 2425 __ movq(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
2425 __ JumpIfSmi(rax, &runtime); 2426 __ JumpIfSmi(rax, &runtime);
2426 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); 2427 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
2427 __ j(not_equal, &runtime); 2428 __ j(not_equal, &runtime);
2428 2429
2429 // Check that the RegExp has been compiled (data contains a fixed array). 2430 // Check that the RegExp has been compiled (data contains a fixed array).
2430 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset)); 2431 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset));
2431 if (FLAG_debug_code) { 2432 if (FLAG_debug_code) {
2432 Condition is_smi = masm->CheckSmi(rax); 2433 Condition is_smi = masm->CheckSmi(rax);
2433 __ Check(NegateCondition(is_smi), 2434 __ Check(NegateCondition(is_smi),
2434 kUnexpectedTypeForRegExpDataFixedArrayExpected); 2435 kUnexpectedTypeForRegExpDataFixedArrayExpected);
(...skipping 12 matching lines...) Expand all
2447 __ SmiToInteger32(rdx, 2448 __ SmiToInteger32(rdx,
2448 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset)); 2449 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
2449 // Check (number_of_captures + 1) * 2 <= offsets vector size 2450 // Check (number_of_captures + 1) * 2 <= offsets vector size
2450 // Or number_of_captures <= offsets vector size / 2 - 1 2451 // Or number_of_captures <= offsets vector size / 2 - 1
2451 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2); 2452 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
2452 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1)); 2453 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
2453 __ j(above, &runtime); 2454 __ j(above, &runtime);
2454 2455
2455 // Reset offset for possibly sliced string. 2456 // Reset offset for possibly sliced string.
2456 __ Set(r14, 0); 2457 __ Set(r14, 0);
2457 __ movq(rdi, Operand(rsp, kSubjectOffset)); 2458 __ movq(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
2458 __ JumpIfSmi(rdi, &runtime); 2459 __ JumpIfSmi(rdi, &runtime);
2459 __ movq(r15, rdi); // Make a copy of the original subject string. 2460 __ movq(r15, rdi); // Make a copy of the original subject string.
2460 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 2461 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
2461 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 2462 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2462 // rax: RegExp data (FixedArray) 2463 // rax: RegExp data (FixedArray)
2463 // rdi: subject string 2464 // rdi: subject string
2464 // r15: subject string 2465 // r15: subject string
2465 // Handle subject string according to its encoding and representation: 2466 // Handle subject string according to its encoding and representation:
2466 // (1) Sequential two byte? If yes, go to (9). 2467 // (1) Sequential two byte? If yes, go to (9).
2467 // (2) Sequential one byte? If yes, go to (6). 2468 // (2) Sequential one byte? If yes, go to (6).
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
2549 2550
2550 // rdi: sequential subject string (or look-alike, external string) 2551 // rdi: sequential subject string (or look-alike, external string)
2551 // r15: original subject string 2552 // r15: original subject string
2552 // rcx: encoding of subject string (1 if ASCII, 0 if two_byte); 2553 // rcx: encoding of subject string (1 if ASCII, 0 if two_byte);
2553 // r11: code 2554 // r11: code
2554 // Load used arguments before starting to push arguments for call to native 2555 // Load used arguments before starting to push arguments for call to native
2555 // RegExp code to avoid handling changing stack height. 2556 // RegExp code to avoid handling changing stack height.
2556 // We have to use r15 instead of rdi to load the length because rdi might 2557 // We have to use r15 instead of rdi to load the length because rdi might
2557 // have been only made to look like a sequential string when it actually 2558 // have been only made to look like a sequential string when it actually
2558 // is an external string. 2559 // is an external string.
2559 __ movq(rbx, Operand(rsp, kPreviousIndexOffset)); 2560 __ movq(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
2560 __ JumpIfNotSmi(rbx, &runtime); 2561 __ JumpIfNotSmi(rbx, &runtime);
2561 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); 2562 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
2562 __ j(above_equal, &runtime); 2563 __ j(above_equal, &runtime);
2563 __ SmiToInteger64(rbx, rbx); 2564 __ SmiToInteger64(rbx, rbx);
2564 2565
2565 // rdi: subject string 2566 // rdi: subject string
2566 // rbx: previous index 2567 // rbx: previous index
2567 // rcx: encoding of subject string (1 if ASCII 0 if two_byte); 2568 // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
2568 // r11: code 2569 // r11: code
2569 // All checks done. Now push arguments for native regexp code. 2570 // All checks done. Now push arguments for native regexp code.
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
2669 __ j(equal, &success, Label::kNear); 2670 __ j(equal, &success, Label::kNear);
2670 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); 2671 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
2671 __ j(equal, &exception); 2672 __ j(equal, &exception);
2672 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); 2673 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
2673 // If none of the above, it can only be retry. 2674 // If none of the above, it can only be retry.
2674 // Handle that in the runtime system. 2675 // Handle that in the runtime system.
2675 __ j(not_equal, &runtime); 2676 __ j(not_equal, &runtime);
2676 2677
2677 // For failure return null. 2678 // For failure return null.
2678 __ LoadRoot(rax, Heap::kNullValueRootIndex); 2679 __ LoadRoot(rax, Heap::kNullValueRootIndex);
2679 __ ret(4 * kPointerSize); 2680 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
2680 2681
2681 // Load RegExp data. 2682 // Load RegExp data.
2682 __ bind(&success); 2683 __ bind(&success);
2683 __ movq(rax, Operand(rsp, kJSRegExpOffset)); 2684 __ movq(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
2684 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); 2685 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
2685 __ SmiToInteger32(rax, 2686 __ SmiToInteger32(rax,
2686 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); 2687 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
2687 // Calculate number of capture registers (number_of_captures + 1) * 2. 2688 // Calculate number of capture registers (number_of_captures + 1) * 2.
2688 __ leal(rdx, Operand(rax, rax, times_1, 2)); 2689 __ leal(rdx, Operand(rax, rax, times_1, 2));
2689 2690
2690 // rdx: Number of capture registers 2691 // rdx: Number of capture registers
2691 // Check that the fourth object is a JSArray object. 2692 // Check that the fourth object is a JSArray object.
2692 __ movq(r15, Operand(rsp, kLastMatchInfoOffset)); 2693 __ movq(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
2693 __ JumpIfSmi(r15, &runtime); 2694 __ JumpIfSmi(r15, &runtime);
2694 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister); 2695 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
2695 __ j(not_equal, &runtime); 2696 __ j(not_equal, &runtime);
2696 // Check that the JSArray is in fast case. 2697 // Check that the JSArray is in fast case.
2697 __ movq(rbx, FieldOperand(r15, JSArray::kElementsOffset)); 2698 __ movq(rbx, FieldOperand(r15, JSArray::kElementsOffset));
2698 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); 2699 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset));
2699 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex); 2700 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
2700 __ j(not_equal, &runtime); 2701 __ j(not_equal, &runtime);
2701 // Check that the last match info has space for the capture registers and the 2702 // Check that the last match info has space for the capture registers and the
2702 // additional information. Ensure no overflow in add. 2703 // additional information. Ensure no overflow in add.
2703 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); 2704 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
2704 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); 2705 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
2705 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead)); 2706 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
2706 __ cmpl(rdx, rax); 2707 __ cmpl(rdx, rax);
2707 __ j(greater, &runtime); 2708 __ j(greater, &runtime);
2708 2709
2709 // rbx: last_match_info backing store (FixedArray) 2710 // rbx: last_match_info backing store (FixedArray)
2710 // rdx: number of capture registers 2711 // rdx: number of capture registers
2711 // Store the capture count. 2712 // Store the capture count.
2712 __ Integer32ToSmi(kScratchRegister, rdx); 2713 __ Integer32ToSmi(kScratchRegister, rdx);
2713 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset), 2714 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
2714 kScratchRegister); 2715 kScratchRegister);
2715 // Store last subject and last input. 2716 // Store last subject and last input.
2716 __ movq(rax, Operand(rsp, kSubjectOffset)); 2717 __ movq(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
2717 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); 2718 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
2718 __ movq(rcx, rax); 2719 __ movq(rcx, rax);
2719 __ RecordWriteField(rbx, 2720 __ RecordWriteField(rbx,
2720 RegExpImpl::kLastSubjectOffset, 2721 RegExpImpl::kLastSubjectOffset,
2721 rax, 2722 rax,
2722 rdi, 2723 rdi,
2723 kDontSaveFPRegs); 2724 kDontSaveFPRegs);
2724 __ movq(rax, rcx); 2725 __ movq(rax, rcx);
2725 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); 2726 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
2726 __ RecordWriteField(rbx, 2727 __ RecordWriteField(rbx,
(...skipping 22 matching lines...) Expand all
2749 __ movq(FieldOperand(rbx, 2750 __ movq(FieldOperand(rbx,
2750 rdx, 2751 rdx,
2751 times_pointer_size, 2752 times_pointer_size,
2752 RegExpImpl::kFirstCaptureOffset), 2753 RegExpImpl::kFirstCaptureOffset),
2753 rdi); 2754 rdi);
2754 __ jmp(&next_capture); 2755 __ jmp(&next_capture);
2755 __ bind(&done); 2756 __ bind(&done);
2756 2757
2757 // Return last match info. 2758 // Return last match info.
2758 __ movq(rax, r15); 2759 __ movq(rax, r15);
2759 __ ret(4 * kPointerSize); 2760 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
2760 2761
2761 __ bind(&exception); 2762 __ bind(&exception);
2762 // Result must now be exception. If there is no pending exception already a 2763 // Result must now be exception. If there is no pending exception already a
2763 // stack overflow (on the backtrack stack) was detected in RegExp code but 2764 // stack overflow (on the backtrack stack) was detected in RegExp code but
2764 // haven't created the exception yet. Handle that in the runtime system. 2765 // haven't created the exception yet. Handle that in the runtime system.
2765 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 2766 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2766 ExternalReference pending_exception_address( 2767 ExternalReference pending_exception_address(
2767 Isolate::kPendingExceptionAddress, isolate); 2768 Isolate::kPendingExceptionAddress, isolate);
2768 Operand pending_exception_operand = 2769 Operand pending_exception_operand =
2769 masm->ExternalOperand(pending_exception_address, rbx); 2770 masm->ExternalOperand(pending_exception_address, rbx);
(...skipping 2058 matching lines...) Expand 10 before | Expand all | Expand 10 after
4828 4829
4829 void SubStringStub::Generate(MacroAssembler* masm) { 4830 void SubStringStub::Generate(MacroAssembler* masm) {
4830 Label runtime; 4831 Label runtime;
4831 4832
4832 // Stack frame on entry. 4833 // Stack frame on entry.
4833 // rsp[0] : return address 4834 // rsp[0] : return address
4834 // rsp[8] : to 4835 // rsp[8] : to
4835 // rsp[16] : from 4836 // rsp[16] : from
4836 // rsp[24] : string 4837 // rsp[24] : string
4837 4838
4838 const int kToOffset = 1 * kPointerSize; 4839 enum SubStringStubArgumentIndices {
4839 const int kFromOffset = kToOffset + kPointerSize; 4840 STRING_ARGUMENT_INDEX,
4840 const int kStringOffset = kFromOffset + kPointerSize; 4841 FROM_ARGUMENT_INDEX,
4841 const int kArgumentsSize = (kStringOffset + kPointerSize) - kToOffset; 4842 TO_ARGUMENT_INDEX,
4843 SUB_STRING_ARGUMENT_COUNT
4844 };
4845
4846 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
4847 ARGUMENTS_DONT_CONTAIN_RECEIVER);
4842 4848
4843 // Make sure first argument is a string. 4849 // Make sure first argument is a string.
4844 __ movq(rax, Operand(rsp, kStringOffset)); 4850 __ movq(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
4845 STATIC_ASSERT(kSmiTag == 0); 4851 STATIC_ASSERT(kSmiTag == 0);
4846 __ testl(rax, Immediate(kSmiTagMask)); 4852 __ testl(rax, Immediate(kSmiTagMask));
4847 __ j(zero, &runtime); 4853 __ j(zero, &runtime);
4848 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); 4854 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
4849 __ j(NegateCondition(is_string), &runtime); 4855 __ j(NegateCondition(is_string), &runtime);
4850 4856
4851 // rax: string 4857 // rax: string
4852 // rbx: instance type 4858 // rbx: instance type
4853 // Calculate length of sub string using the smi values. 4859 // Calculate length of sub string using the smi values.
4854 __ movq(rcx, Operand(rsp, kToOffset)); 4860 __ movq(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
4855 __ movq(rdx, Operand(rsp, kFromOffset)); 4861 __ movq(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
4856 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); 4862 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
4857 4863
4858 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. 4864 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
4859 __ cmpq(rcx, FieldOperand(rax, String::kLengthOffset)); 4865 __ cmpq(rcx, FieldOperand(rax, String::kLengthOffset));
4860 Label not_original_string; 4866 Label not_original_string;
4861 // Shorter than original string's length: an actual substring. 4867 // Shorter than original string's length: an actual substring.
4862 __ j(below, &not_original_string, Label::kNear); 4868 __ j(below, &not_original_string, Label::kNear);
4863 // Longer than original string's length or negative: unsafe arguments. 4869 // Longer than original string's length or negative: unsafe arguments.
4864 __ j(above, &runtime); 4870 __ j(above, &runtime);
4865 // Return original string. 4871 // Return original string.
4866 Counters* counters = masm->isolate()->counters(); 4872 Counters* counters = masm->isolate()->counters();
4867 __ IncrementCounter(counters->sub_string_native(), 1); 4873 __ IncrementCounter(counters->sub_string_native(), 1);
4868 __ ret(kArgumentsSize); 4874 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
4869 __ bind(&not_original_string); 4875 __ bind(&not_original_string);
4870 4876
4871 Label single_char; 4877 Label single_char;
4872 __ SmiCompare(rcx, Smi::FromInt(1)); 4878 __ SmiCompare(rcx, Smi::FromInt(1));
4873 __ j(equal, &single_char); 4879 __ j(equal, &single_char);
4874 4880
4875 __ SmiToInteger32(rcx, rcx); 4881 __ SmiToInteger32(rcx, rcx);
4876 4882
4877 // rax: string 4883 // rax: string
4878 // rbx: instance type 4884 // rbx: instance type
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
4941 __ bind(&two_byte_slice); 4947 __ bind(&two_byte_slice);
4942 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime); 4948 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
4943 __ bind(&set_slice_header); 4949 __ bind(&set_slice_header);
4944 __ Integer32ToSmi(rcx, rcx); 4950 __ Integer32ToSmi(rcx, rcx);
4945 __ movq(FieldOperand(rax, SlicedString::kLengthOffset), rcx); 4951 __ movq(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
4946 __ movq(FieldOperand(rax, SlicedString::kHashFieldOffset), 4952 __ movq(FieldOperand(rax, SlicedString::kHashFieldOffset),
4947 Immediate(String::kEmptyHashField)); 4953 Immediate(String::kEmptyHashField));
4948 __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi); 4954 __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi);
4949 __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx); 4955 __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
4950 __ IncrementCounter(counters->sub_string_native(), 1); 4956 __ IncrementCounter(counters->sub_string_native(), 1);
4951 __ ret(kArgumentsSize); 4957 __ ret(3 * kPointerSize);
4952 4958
4953 __ bind(&copy_routine); 4959 __ bind(&copy_routine);
4954 } 4960 }
4955 4961
4956 // rdi: underlying subject string 4962 // rdi: underlying subject string
4957 // rbx: instance type of underlying subject string 4963 // rbx: instance type of underlying subject string
4958 // rdx: adjusted start index (smi) 4964 // rdx: adjusted start index (smi)
4959 // rcx: length 4965 // rcx: length
4960 // The subject string can only be external or sequential string of either 4966 // The subject string can only be external or sequential string of either
4961 // encoding at this point. 4967 // encoding at this point.
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
4995 __ lea(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize)); 5001 __ lea(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
4996 5002
4997 // rax: result string 5003 // rax: result string
4998 // rcx: result length 5004 // rcx: result length
4999 // rdi: first character of result 5005 // rdi: first character of result
5000 // rsi: character of sub string start 5006 // rsi: character of sub string start
5001 // r14: original value of rsi 5007 // r14: original value of rsi
5002 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true); 5008 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
5003 __ movq(rsi, r14); // Restore rsi. 5009 __ movq(rsi, r14); // Restore rsi.
5004 __ IncrementCounter(counters->sub_string_native(), 1); 5010 __ IncrementCounter(counters->sub_string_native(), 1);
5005 __ ret(kArgumentsSize); 5011 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
5006 5012
5007 __ bind(&two_byte_sequential); 5013 __ bind(&two_byte_sequential);
5008 // Allocate the result. 5014 // Allocate the result.
5009 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime); 5015 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
5010 5016
5011 // rax: result string 5017 // rax: result string
5012 // rcx: result string length 5018 // rcx: result string length
5013 __ movq(r14, rsi); // esi used by following code. 5019 __ movq(r14, rsi); // esi used by following code.
5014 { // Locate character of sub string start. 5020 { // Locate character of sub string start.
5015 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2); 5021 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
5016 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, 5022 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
5017 SeqOneByteString::kHeaderSize - kHeapObjectTag)); 5023 SeqOneByteString::kHeaderSize - kHeapObjectTag));
5018 } 5024 }
5019 // Locate first character of result. 5025 // Locate first character of result.
5020 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); 5026 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
5021 5027
5022 // rax: result string 5028 // rax: result string
5023 // rcx: result length 5029 // rcx: result length
5024 // rdi: first character of result 5030 // rdi: first character of result
5025 // rsi: character of sub string start 5031 // rsi: character of sub string start
5026 // r14: original value of rsi 5032 // r14: original value of rsi
5027 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false); 5033 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
5028 __ movq(rsi, r14); // Restore esi. 5034 __ movq(rsi, r14); // Restore esi.
5029 __ IncrementCounter(counters->sub_string_native(), 1); 5035 __ IncrementCounter(counters->sub_string_native(), 1);
5030 __ ret(kArgumentsSize); 5036 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
5031 5037
5032 // Just jump to runtime to create the sub string. 5038 // Just jump to runtime to create the sub string.
5033 __ bind(&runtime); 5039 __ bind(&runtime);
5034 __ TailCallRuntime(Runtime::kSubString, 3, 1); 5040 __ TailCallRuntime(Runtime::kSubString, 3, 1);
5035 5041
5036 __ bind(&single_char); 5042 __ bind(&single_char);
5037 // rax: string 5043 // rax: string
5038 // rbx: instance type 5044 // rbx: instance type
5039 // rcx: sub string length (smi) 5045 // rcx: sub string length (smi)
5040 // rdx: from index (smi) 5046 // rdx: from index (smi)
5041 StringCharAtGenerator generator( 5047 StringCharAtGenerator generator(
5042 rax, rdx, rcx, rax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER); 5048 rax, rdx, rcx, rax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
5043 generator.GenerateFast(masm); 5049 generator.GenerateFast(masm);
5044 __ ret(kArgumentsSize); 5050 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
5045 generator.SkipSlow(masm, &runtime); 5051 generator.SkipSlow(masm, &runtime);
5046 } 5052 }
5047 5053
5048 5054
5049 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm, 5055 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
5050 Register left, 5056 Register left,
5051 Register right, 5057 Register right,
5052 Register scratch1, 5058 Register scratch1,
5053 Register scratch2) { 5059 Register scratch2) {
5054 Register length = scratch1; 5060 Register length = scratch1;
(...skipping 1485 matching lines...) Expand 10 before | Expand all | Expand 10 after
6540 __ bind(&fast_elements_case); 6546 __ bind(&fast_elements_case);
6541 GenerateCase(masm, FAST_ELEMENTS); 6547 GenerateCase(masm, FAST_ELEMENTS);
6542 } 6548 }
6543 6549
6544 6550
6545 #undef __ 6551 #undef __
6546 6552
6547 } } // namespace v8::internal 6553 } } // namespace v8::internal
6548 6554
6549 #endif // V8_TARGET_ARCH_X64 6555 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698