OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1974 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1985 | 1985 |
1986 void MacroAssembler::PopTryHandler() { | 1986 void MacroAssembler::PopTryHandler() { |
1987 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); | 1987 ASSERT_EQ(0, StackHandlerConstants::kNextOffset); |
1988 pop(a1); | 1988 pop(a1); |
1989 Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); | 1989 Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); |
1990 li(at, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); | 1990 li(at, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); |
1991 sw(a1, MemOperand(at)); | 1991 sw(a1, MemOperand(at)); |
1992 } | 1992 } |
1993 | 1993 |
1994 | 1994 |
| 1995 void MacroAssembler::Throw(Register value) { |
| 1996 // v0 is expected to hold the exception. |
| 1997 Move(v0, value); |
| 1998 |
| 1999 // Adjust this code if not the case. |
| 2000 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); |
| 2001 |
| 2002 // Drop the sp to the top of the handler. |
| 2003 li(a3, Operand(ExternalReference(Isolate::k_handler_address, |
| 2004 isolate()))); |
| 2005 lw(sp, MemOperand(a3)); |
| 2006 |
| 2007 // Restore the next handler and frame pointer, discard handler state. |
| 2008 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 2009 pop(a2); |
| 2010 sw(a2, MemOperand(a3)); |
| 2011 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); |
| 2012 MultiPop(a3.bit() | fp.bit()); |
| 2013 |
| 2014 // Before returning we restore the context from the frame pointer if |
| 2015 // not NULL. The frame pointer is NULL in the exception handler of a |
| 2016 // JS entry frame. |
| 2017 // Set cp to NULL if fp is NULL. |
| 2018 Label done; |
| 2019 Branch(USE_DELAY_SLOT, &done, eq, fp, Operand(zero_reg)); |
| 2020 mov(cp, zero_reg); // In branch delay slot. |
| 2021 lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2022 bind(&done); |
| 2023 |
| 2024 #ifdef DEBUG |
| 2025 // When emitting debug_code, set ra as return address for the jump. |
| 2026 // 5 instructions: add: 1, pop: 2, jump: 2. |
| 2027 const int kOffsetRaInstructions = 5; |
| 2028 Label find_ra; |
| 2029 |
| 2030 if (emit_debug_code()) { |
| 2031 // Compute ra for the Jump(t9). |
| 2032 const int kOffsetRaBytes = kOffsetRaInstructions * Assembler::kInstrSize; |
| 2033 |
| 2034 // This branch-and-link sequence is needed to get the current PC on mips, |
| 2035 // saved to the ra register. Then adjusted for instruction count. |
| 2036 bal(&find_ra); // bal exposes branch-delay. |
| 2037 nop(); // Branch delay slot nop. |
| 2038 bind(&find_ra); |
| 2039 addiu(ra, ra, kOffsetRaBytes); |
| 2040 } |
| 2041 #endif |
| 2042 |
| 2043 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); |
| 2044 pop(t9); // 2 instructions: lw, add sp. |
| 2045 Jump(t9); // 2 instructions: jr, nop (in delay slot). |
| 2046 |
| 2047 if (emit_debug_code()) { |
| 2048 // Make sure that the expected number of instructions were generated. |
| 2049 ASSERT_EQ(kOffsetRaInstructions, |
| 2050 InstructionsGeneratedSince(&find_ra)); |
| 2051 } |
| 2052 } |
| 2053 |
| 2054 |
| 2055 void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, |
| 2056 Register value) { |
| 2057 // Adjust this code if not the case. |
| 2058 STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); |
| 2059 |
| 2060 // v0 is expected to hold the exception. |
| 2061 Move(v0, value); |
| 2062 |
| 2063 // Drop sp to the top stack handler. |
| 2064 li(a3, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); |
| 2065 lw(sp, MemOperand(a3)); |
| 2066 |
| 2067 // Unwind the handlers until the ENTRY handler is found. |
| 2068 Label loop, done; |
| 2069 bind(&loop); |
| 2070 // Load the type of the current stack handler. |
| 2071 const int kStateOffset = StackHandlerConstants::kStateOffset; |
| 2072 lw(a2, MemOperand(sp, kStateOffset)); |
| 2073 Branch(&done, eq, a2, Operand(StackHandler::ENTRY)); |
| 2074 // Fetch the next handler in the list. |
| 2075 const int kNextOffset = StackHandlerConstants::kNextOffset; |
| 2076 lw(sp, MemOperand(sp, kNextOffset)); |
| 2077 jmp(&loop); |
| 2078 bind(&done); |
| 2079 |
| 2080 // Set the top handler address to next handler past the current ENTRY handler. |
| 2081 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); |
| 2082 pop(a2); |
| 2083 sw(a2, MemOperand(a3)); |
| 2084 |
| 2085 if (type == OUT_OF_MEMORY) { |
| 2086 // Set external caught exception to false. |
| 2087 ExternalReference external_caught( |
| 2088 Isolate::k_external_caught_exception_address, isolate()); |
| 2089 li(a0, Operand(false, RelocInfo::NONE)); |
| 2090 li(a2, Operand(external_caught)); |
| 2091 sw(a0, MemOperand(a2)); |
| 2092 |
| 2093 // Set pending exception and v0 to out of memory exception. |
| 2094 Failure* out_of_memory = Failure::OutOfMemoryException(); |
| 2095 li(v0, Operand(reinterpret_cast<int32_t>(out_of_memory))); |
| 2096 li(a2, Operand(ExternalReference(Isolate::k_pending_exception_address, |
| 2097 isolate()))); |
| 2098 sw(v0, MemOperand(a2)); |
| 2099 } |
| 2100 |
| 2101 // Stack layout at this point. See also StackHandlerConstants. |
| 2102 // sp -> state (ENTRY) |
| 2103 // fp |
| 2104 // ra |
| 2105 |
| 2106 // Discard handler state (a2 is not used) and restore frame pointer. |
| 2107 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize); |
| 2108 MultiPop(a2.bit() | fp.bit()); // a2: discarded state. |
| 2109 // Before returning we restore the context from the frame pointer if |
| 2110 // not NULL. The frame pointer is NULL in the exception handler of a |
| 2111 // JS entry frame. |
| 2112 Label cp_null; |
| 2113 Branch(USE_DELAY_SLOT, &cp_null, eq, fp, Operand(zero_reg)); |
| 2114 mov(cp, zero_reg); // In the branch delay slot. |
| 2115 lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2116 bind(&cp_null); |
| 2117 |
| 2118 #ifdef DEBUG |
| 2119 // When emitting debug_code, set ra as return address for the jump. |
| 2120 // 5 instructions: add: 1, pop: 2, jump: 2. |
| 2121 const int kOffsetRaInstructions = 5; |
| 2122 Label find_ra; |
| 2123 |
| 2124 if (emit_debug_code()) { |
| 2125 // Compute ra for the Jump(t9). |
| 2126 const int kOffsetRaBytes = kOffsetRaInstructions * Assembler::kInstrSize; |
| 2127 |
| 2128 // This branch-and-link sequence is needed to get the current PC on mips, |
| 2129 // saved to the ra register. Then adjusted for instruction count. |
| 2130 bal(&find_ra); // bal exposes branch-delay slot. |
| 2131 nop(); // Branch delay slot nop. |
| 2132 bind(&find_ra); |
| 2133 addiu(ra, ra, kOffsetRaBytes); |
| 2134 } |
| 2135 #endif |
| 2136 STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize); |
| 2137 pop(t9); // 2 instructions: lw, add sp. |
| 2138 Jump(t9); // 2 instructions: jr, nop (in delay slot). |
| 2139 |
| 2140 if (emit_debug_code()) { |
| 2141 // Make sure that the expected number of instructions were generated. |
| 2142 ASSERT_EQ(kOffsetRaInstructions, |
| 2143 InstructionsGeneratedSince(&find_ra)); |
| 2144 } |
| 2145 } |
| 2146 |
| 2147 |
1995 void MacroAssembler::AllocateInNewSpace(int object_size, | 2148 void MacroAssembler::AllocateInNewSpace(int object_size, |
1996 Register result, | 2149 Register result, |
1997 Register scratch1, | 2150 Register scratch1, |
1998 Register scratch2, | 2151 Register scratch2, |
1999 Label* gc_required, | 2152 Label* gc_required, |
2000 AllocationFlags flags) { | 2153 AllocationFlags flags) { |
2001 if (!FLAG_inline_new) { | 2154 if (!FLAG_inline_new) { |
2002 if (emit_debug_code()) { | 2155 if (emit_debug_code()) { |
2003 // Trash the registers to simulate an allocation failure. | 2156 // Trash the registers to simulate an allocation failure. |
2004 li(result, 0x7091); | 2157 li(result, 0x7091); |
(...skipping 339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2344 Branch(&word_loop, eq, scratch, Operand(zero_reg)); | 2497 Branch(&word_loop, eq, scratch, Operand(zero_reg)); |
2345 lbu(scratch, MemOperand(src)); | 2498 lbu(scratch, MemOperand(src)); |
2346 Addu(src, src, 1); | 2499 Addu(src, src, 1); |
2347 sb(scratch, MemOperand(dst)); | 2500 sb(scratch, MemOperand(dst)); |
2348 Addu(dst, dst, 1); | 2501 Addu(dst, dst, 1); |
2349 Subu(length, length, Operand(1)); | 2502 Subu(length, length, Operand(1)); |
2350 Branch(&byte_loop_1, ne, length, Operand(zero_reg)); | 2503 Branch(&byte_loop_1, ne, length, Operand(zero_reg)); |
2351 | 2504 |
2352 // Copy bytes in word size chunks. | 2505 // Copy bytes in word size chunks. |
2353 bind(&word_loop); | 2506 bind(&word_loop); |
2354 if (FLAG_debug_code) { | 2507 if (emit_debug_code()) { |
2355 And(scratch, src, kPointerSize - 1); | 2508 And(scratch, src, kPointerSize - 1); |
2356 Assert(eq, "Expecting alignment for CopyBytes", | 2509 Assert(eq, "Expecting alignment for CopyBytes", |
2357 scratch, Operand(zero_reg)); | 2510 scratch, Operand(zero_reg)); |
2358 } | 2511 } |
2359 Branch(&byte_loop, lt, length, Operand(kPointerSize)); | 2512 Branch(&byte_loop, lt, length, Operand(kPointerSize)); |
2360 lw(scratch, MemOperand(src)); | 2513 lw(scratch, MemOperand(src)); |
2361 Addu(src, src, kPointerSize); | 2514 Addu(src, src, kPointerSize); |
2362 | 2515 |
2363 // TODO(kalmard) check if this can be optimized to use sw in most cases. | 2516 // TODO(kalmard) check if this can be optimized to use sw in most cases. |
2364 // Can't use unaligned access - copy byte by byte. | 2517 // Can't use unaligned access - copy byte by byte. |
(...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2673 // ----------------------------------------------------------------------------- | 2826 // ----------------------------------------------------------------------------- |
2674 // Runtime calls. | 2827 // Runtime calls. |
2675 | 2828 |
2676 void MacroAssembler::CallStub(CodeStub* stub, Condition cond, | 2829 void MacroAssembler::CallStub(CodeStub* stub, Condition cond, |
2677 Register r1, const Operand& r2) { | 2830 Register r1, const Operand& r2) { |
2678 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. | 2831 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. |
2679 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2); | 2832 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2); |
2680 } | 2833 } |
2681 | 2834 |
2682 | 2835 |
| 2836 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond, |
| 2837 Register r1, const Operand& r2) { |
| 2838 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. |
| 2839 Object* result; |
| 2840 { MaybeObject* maybe_result = stub->TryGetCode(); |
| 2841 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2842 } |
| 2843 Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2); |
| 2844 return result; |
| 2845 } |
| 2846 |
| 2847 |
| 2848 |
2683 void MacroAssembler::TailCallStub(CodeStub* stub) { | 2849 void MacroAssembler::TailCallStub(CodeStub* stub) { |
2684 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. | 2850 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. |
2685 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); | 2851 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); |
2686 } | 2852 } |
2687 | 2853 |
| 2854 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, |
| 2855 Condition cond, |
| 2856 Register r1, |
| 2857 const Operand& r2) { |
| 2858 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. |
| 2859 Object* result; |
| 2860 { MaybeObject* maybe_result = stub->TryGetCode(); |
| 2861 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2862 } |
| 2863 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2); |
| 2864 return result; |
| 2865 } |
| 2866 |
| 2867 |
| 2868 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
| 2869 return ref0.address() - ref1.address(); |
| 2870 } |
| 2871 |
| 2872 |
| 2873 MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( |
| 2874 ExternalReference function, int stack_space) { |
| 2875 ExternalReference next_address = |
| 2876 ExternalReference::handle_scope_next_address(); |
| 2877 const int kNextOffset = 0; |
| 2878 const int kLimitOffset = AddressOffset( |
| 2879 ExternalReference::handle_scope_limit_address(), |
| 2880 next_address); |
| 2881 const int kLevelOffset = AddressOffset( |
| 2882 ExternalReference::handle_scope_level_address(), |
| 2883 next_address); |
| 2884 |
| 2885 // Allocate HandleScope in callee-save registers. |
| 2886 li(s3, Operand(next_address)); |
| 2887 lw(s0, MemOperand(s3, kNextOffset)); |
| 2888 lw(s1, MemOperand(s3, kLimitOffset)); |
| 2889 lw(s2, MemOperand(s3, kLevelOffset)); |
| 2890 Addu(s2, s2, Operand(1)); |
| 2891 sw(s2, MemOperand(s3, kLevelOffset)); |
| 2892 |
| 2893 // The O32 ABI requires us to pass a pointer in a0 where the returned struct |
| 2894 // (4 bytes) will be placed. This is also built into the Simulator. |
| 2895 // Set up the pointer to the returned value (a0). It was allocated in |
| 2896 // EnterExitFrame. |
| 2897 addiu(a0, fp, ExitFrameConstants::kStackSpaceOffset); |
| 2898 |
| 2899 // Native call returns to the DirectCEntry stub which redirects to the |
| 2900 // return address pushed on stack (could have moved after GC). |
| 2901 // DirectCEntry stub itself is generated early and never moves. |
| 2902 DirectCEntryStub stub; |
| 2903 stub.GenerateCall(this, function); |
| 2904 |
| 2905 // As mentioned above, on MIPS a pointer is returned - we need to dereference |
| 2906 // it to get the actual return value (which is also a pointer). |
| 2907 lw(v0, MemOperand(v0)); |
| 2908 |
| 2909 Label promote_scheduled_exception; |
| 2910 Label delete_allocated_handles; |
| 2911 Label leave_exit_frame; |
| 2912 |
| 2913 // If result is non-zero, dereference to get the result value |
| 2914 // otherwise set it to undefined. |
| 2915 Label skip; |
| 2916 LoadRoot(a0, Heap::kUndefinedValueRootIndex); |
| 2917 Branch(&skip, eq, v0, Operand(zero_reg)); |
| 2918 lw(a0, MemOperand(v0)); |
| 2919 bind(&skip); |
| 2920 mov(v0, a0); |
| 2921 |
| 2922 // No more valid handles (the result handle was the last one). Restore |
| 2923 // previous handle scope. |
| 2924 sw(s0, MemOperand(s3, kNextOffset)); |
| 2925 if (emit_debug_code()) { |
| 2926 lw(a1, MemOperand(s3, kLevelOffset)); |
| 2927 Check(eq, "Unexpected level after return from api call", a1, Operand(s2)); |
| 2928 } |
| 2929 Subu(s2, s2, Operand(1)); |
| 2930 sw(s2, MemOperand(s3, kLevelOffset)); |
| 2931 lw(at, MemOperand(s3, kLimitOffset)); |
| 2932 Branch(&delete_allocated_handles, ne, s1, Operand(at)); |
| 2933 |
| 2934 // Check if the function scheduled an exception. |
| 2935 bind(&leave_exit_frame); |
| 2936 LoadRoot(t0, Heap::kTheHoleValueRootIndex); |
| 2937 li(at, Operand(ExternalReference::scheduled_exception_address(isolate()))); |
| 2938 lw(t1, MemOperand(at)); |
| 2939 Branch(&promote_scheduled_exception, ne, t0, Operand(t1)); |
| 2940 li(s0, Operand(stack_space)); |
| 2941 LeaveExitFrame(false, s0); |
| 2942 Ret(); |
| 2943 |
| 2944 bind(&promote_scheduled_exception); |
| 2945 MaybeObject* result = TryTailCallExternalReference( |
| 2946 ExternalReference(Runtime::kPromoteScheduledException, isolate()), 0, 1); |
| 2947 if (result->IsFailure()) { |
| 2948 return result; |
| 2949 } |
| 2950 |
| 2951 // HandleScope limit has changed. Delete allocated extensions. |
| 2952 bind(&delete_allocated_handles); |
| 2953 sw(s1, MemOperand(s3, kLimitOffset)); |
| 2954 mov(s0, v0); |
| 2955 mov(a0, v0); |
| 2956 PrepareCallCFunction(1, s1); |
| 2957 li(a0, Operand(ExternalReference::isolate_address())); |
| 2958 CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate()), |
| 2959 1); |
| 2960 mov(v0, s0); |
| 2961 jmp(&leave_exit_frame); |
| 2962 |
| 2963 return result; |
| 2964 } |
| 2965 |
2688 | 2966 |
2689 void MacroAssembler::IllegalOperation(int num_arguments) { | 2967 void MacroAssembler::IllegalOperation(int num_arguments) { |
2690 if (num_arguments > 0) { | 2968 if (num_arguments > 0) { |
2691 addiu(sp, sp, num_arguments * kPointerSize); | 2969 addiu(sp, sp, num_arguments * kPointerSize); |
2692 } | 2970 } |
2693 LoadRoot(v0, Heap::kUndefinedValueRootIndex); | 2971 LoadRoot(v0, Heap::kUndefinedValueRootIndex); |
2694 } | 2972 } |
2695 | 2973 |
2696 | 2974 |
2697 void MacroAssembler::IndexFromHash(Register hash, | 2975 void MacroAssembler::IndexFromHash(Register hash, |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2886 int num_arguments, | 3164 int num_arguments, |
2887 int result_size) { | 3165 int result_size) { |
2888 // TODO(1236192): Most runtime routines don't need the number of | 3166 // TODO(1236192): Most runtime routines don't need the number of |
2889 // arguments passed in because it is constant. At some point we | 3167 // arguments passed in because it is constant. At some point we |
2890 // should remove this need and make the runtime routine entry code | 3168 // should remove this need and make the runtime routine entry code |
2891 // smarter. | 3169 // smarter. |
2892 li(a0, Operand(num_arguments)); | 3170 li(a0, Operand(num_arguments)); |
2893 JumpToExternalReference(ext); | 3171 JumpToExternalReference(ext); |
2894 } | 3172 } |
2895 | 3173 |
| 3174 MaybeObject* MacroAssembler::TryTailCallExternalReference( |
| 3175 const ExternalReference& ext, int num_arguments, int result_size) { |
| 3176 // TODO(1236192): Most runtime routines don't need the number of |
| 3177 // arguments passed in because it is constant. At some point we |
| 3178 // should remove this need and make the runtime routine entry code |
| 3179 // smarter. |
| 3180 li(a0, num_arguments); |
| 3181 return TryJumpToExternalReference(ext); |
| 3182 } |
| 3183 |
2896 | 3184 |
2897 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, | 3185 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, |
2898 int num_arguments, | 3186 int num_arguments, |
2899 int result_size) { | 3187 int result_size) { |
2900 TailCallExternalReference(ExternalReference(fid, isolate()), | 3188 TailCallExternalReference(ExternalReference(fid, isolate()), |
2901 num_arguments, | 3189 num_arguments, |
2902 result_size); | 3190 result_size); |
2903 } | 3191 } |
2904 | 3192 |
2905 | 3193 |
2906 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { | 3194 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) { |
2907 li(a1, Operand(builtin)); | 3195 li(a1, Operand(builtin)); |
2908 CEntryStub stub(1); | 3196 CEntryStub stub(1); |
2909 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 3197 Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
2910 } | 3198 } |
2911 | 3199 |
2912 | 3200 |
| 3201 MaybeObject* MacroAssembler::TryJumpToExternalReference( |
| 3202 const ExternalReference& builtin) { |
| 3203 li(a1, Operand(builtin)); |
| 3204 CEntryStub stub(1); |
| 3205 return TryTailCallStub(&stub); |
| 3206 } |
| 3207 |
| 3208 |
2913 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 3209 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, |
2914 InvokeFlag flag, | 3210 InvokeFlag flag, |
2915 const CallWrapper& call_wrapper) { | 3211 const CallWrapper& call_wrapper) { |
2916 GetBuiltinEntry(t9, id); | 3212 GetBuiltinEntry(t9, id); |
2917 if (flag == CALL_FUNCTION) { | 3213 if (flag == CALL_FUNCTION) { |
2918 call_wrapper.BeforeCall(CallSize(t9)); | 3214 call_wrapper.BeforeCall(CallSize(t9)); |
2919 Call(t9); | 3215 Call(t9); |
2920 call_wrapper.AfterCall(); | 3216 call_wrapper.AfterCall(); |
2921 } else { | 3217 } else { |
2922 ASSERT(flag == JUMP_FUNCTION); | 3218 ASSERT(flag == JUMP_FUNCTION); |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3137 | 3433 |
3138 | 3434 |
3139 void MacroAssembler::LeaveFrame(StackFrame::Type type) { | 3435 void MacroAssembler::LeaveFrame(StackFrame::Type type) { |
3140 mov(sp, fp); | 3436 mov(sp, fp); |
3141 lw(fp, MemOperand(sp, 0 * kPointerSize)); | 3437 lw(fp, MemOperand(sp, 0 * kPointerSize)); |
3142 lw(ra, MemOperand(sp, 1 * kPointerSize)); | 3438 lw(ra, MemOperand(sp, 1 * kPointerSize)); |
3143 addiu(sp, sp, 2 * kPointerSize); | 3439 addiu(sp, sp, 2 * kPointerSize); |
3144 } | 3440 } |
3145 | 3441 |
3146 | 3442 |
3147 void MacroAssembler::EnterExitFrame(Register hold_argc, | 3443 void MacroAssembler::EnterExitFrame(bool save_doubles, |
3148 Register hold_argv, | 3444 int stack_space) { |
3149 Register hold_function, | 3445 // Setup the frame structure on the stack. |
3150 bool save_doubles) { | 3446 STATIC_ASSERT(2 * kPointerSize == ExitFrameConstants::kCallerSPDisplacement); |
3151 // a0 is argc. | 3447 STATIC_ASSERT(1 * kPointerSize == ExitFrameConstants::kCallerPCOffset); |
3152 sll(t8, a0, kPointerSizeLog2); | 3448 STATIC_ASSERT(0 * kPointerSize == ExitFrameConstants::kCallerFPOffset); |
3153 addu(hold_argv, sp, t8); | |
3154 addiu(hold_argv, hold_argv, -kPointerSize); | |
3155 | 3449 |
3156 // Compute callee's stack pointer before making changes and save it as | 3450 // This is how the stack will look: |
3157 // t9 register so that it is restored as sp register on exit, thereby | 3451 // fp + 2 (==kCallerSPDisplacement) - old stack's end |
3158 // popping the args. | 3452 // [fp + 1 (==kCallerPCOffset)] - saved old ra |
3159 // t9 = sp + kPointerSize * #args | 3453 // [fp + 0 (==kCallerFPOffset)] - saved old fp |
3160 addu(t9, sp, t8); | 3454 // [fp - 1 (==kSPOffset)] - sp of the called function |
3161 | 3455 // [fp - 2 (==kCodeOffset)] - CodeObject |
3162 // Align the stack at this point. | 3456 // fp - (2 + stack_space + alignment) == sp == [fp - kSPOffset] - top of the |
3163 AlignStack(0); | 3457 // new stack (will contain saved ra) |
3164 | 3458 |
3165 // Save registers. | 3459 // Save registers. |
3166 addiu(sp, sp, -12); | 3460 addiu(sp, sp, -4 * kPointerSize); |
3167 sw(t9, MemOperand(sp, 8)); | 3461 sw(ra, MemOperand(sp, 3 * kPointerSize)); |
3168 sw(ra, MemOperand(sp, 4)); | 3462 sw(fp, MemOperand(sp, 2 * kPointerSize)); |
3169 sw(fp, MemOperand(sp, 0)); | 3463 addiu(fp, sp, 2 * kPointerSize); // Setup new frame pointer. |
3170 mov(fp, sp); // Setup new frame pointer. | |
3171 | 3464 |
3172 li(t8, Operand(CodeObject())); | 3465 if (emit_debug_code()) { |
3173 push(t8); // Accessed from ExitFrame::code_slot. | 3466 sw(zero_reg, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
| 3467 } |
| 3468 |
| 3469 li(t8, Operand(CodeObject())); // Accessed from ExitFrame::code_slot. |
| 3470 sw(t8, MemOperand(fp, ExitFrameConstants::kCodeOffset)); |
3174 | 3471 |
3175 // Save the frame pointer and the context in top. | 3472 // Save the frame pointer and the context in top. |
3176 li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); | 3473 li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); |
3177 sw(fp, MemOperand(t8)); | 3474 sw(fp, MemOperand(t8)); |
3178 li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate()))); | 3475 li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate()))); |
3179 sw(cp, MemOperand(t8)); | 3476 sw(cp, MemOperand(t8)); |
3180 | 3477 |
3181 // Setup argc and the builtin function in callee-saved registers. | 3478 // Ensure we are not saving doubles, since it's not implemented yet. |
3182 mov(hold_argc, a0); | 3479 ASSERT(save_doubles == 0); |
3183 mov(hold_function, a1); | |
3184 | 3480 |
3185 // Optionally save all double registers. | 3481 // Reserve place for the return address, stack space and an optional slot |
3186 if (save_doubles) { | 3482 // (used by the DirectCEntryStub to hold the return value if a struct is |
3187 #ifdef DEBUG | 3483 // returned) and align the frame preparing for calling the runtime function. |
3188 int frame_alignment = ActivationFrameAlignment(); | 3484 ASSERT(stack_space >= 0); |
3189 #endif | 3485 const int frame_alignment = MacroAssembler::ActivationFrameAlignment(); |
3190 // The stack alignment code above made sp unaligned, so add space for one | 3486 Subu(sp, sp, Operand((stack_space + 2) * kPointerSize)); |
3191 // more double register and use aligned addresses. | 3487 if (frame_alignment > 0) { |
3192 ASSERT(kDoubleSize == frame_alignment); | 3488 ASSERT(IsPowerOf2(frame_alignment)); |
3193 // Mark the frame as containing doubles by pushing a non-valid return | 3489 And(sp, sp, Operand(-frame_alignment)); // Align stack. |
3194 // address, i.e. 0. | |
3195 ASSERT(ExitFrameConstants::kMarkerOffset == -2 * kPointerSize); | |
3196 push(zero_reg); // Marker and alignment word. | |
3197 int space = FPURegister::kNumRegisters * kDoubleSize + kPointerSize; | |
3198 Subu(sp, sp, Operand(space)); | |
3199 // Remember: we only need to save every 2nd double FPU value. | |
3200 for (int i = 0; i < FPURegister::kNumRegisters; i+=2) { | |
3201 FPURegister reg = FPURegister::from_code(i); | |
3202 sdc1(reg, MemOperand(sp, i * kDoubleSize + kPointerSize)); | |
3203 } | |
3204 // Note that f0 will be accessible at fp - 2*kPointerSize - | |
3205 // FPURegister::kNumRegisters * kDoubleSize, since the code slot and the | |
3206 // alignment word were pushed after the fp. | |
3207 } | 3490 } |
| 3491 |
| 3492 // Set the exit frame sp value to point just before the return address |
| 3493 // location. |
| 3494 addiu(at, sp, kPointerSize); |
| 3495 sw(at, MemOperand(fp, ExitFrameConstants::kSPOffset)); |
3208 } | 3496 } |
3209 | 3497 |
3210 | 3498 |
3211 void MacroAssembler::LeaveExitFrame(bool save_doubles) { | 3499 void MacroAssembler::LeaveExitFrame(bool save_doubles, |
3212 // Optionally restore all double registers. | 3500 Register argument_count) { |
3213 if (save_doubles) { | 3501 // Ensure we are not restoring doubles, since it's not implemented yet. |
3214 // TODO(regis): Use vldrm instruction. | 3502 ASSERT(save_doubles == 0); |
3215 // Remember: we only need to restore every 2nd double FPU value. | |
3216 for (int i = 0; i < FPURegister::kNumRegisters; i+=2) { | |
3217 FPURegister reg = FPURegister::from_code(i); | |
3218 // Register f30-f31 is just below the marker. | |
3219 const int offset = ExitFrameConstants::kMarkerOffset; | |
3220 ldc1(reg, MemOperand(fp, | |
3221 (i - FPURegister::kNumRegisters) * kDoubleSize + offset)); | |
3222 } | |
3223 } | |
3224 | 3503 |
3225 // Clear top frame. | 3504 // Clear top frame. |
3226 li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); | 3505 li(t8, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); |
3227 sw(zero_reg, MemOperand(t8)); | 3506 sw(zero_reg, MemOperand(t8)); |
3228 | 3507 |
3229 // Restore current context from top and clear it in debug mode. | 3508 // Restore current context from top and clear it in debug mode. |
3230 li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate()))); | 3509 li(t8, Operand(ExternalReference(Isolate::k_context_address, isolate()))); |
3231 lw(cp, MemOperand(t8)); | 3510 lw(cp, MemOperand(t8)); |
3232 #ifdef DEBUG | 3511 #ifdef DEBUG |
3233 sw(a3, MemOperand(t8)); | 3512 sw(a3, MemOperand(t8)); |
3234 #endif | 3513 #endif |
3235 | 3514 |
3236 // Pop the arguments, restore registers, and return. | 3515 // Pop the arguments, restore registers, and return. |
3237 mov(sp, fp); // Respect ABI stack constraint. | 3516 mov(sp, fp); // Respect ABI stack constraint. |
3238 lw(fp, MemOperand(sp, 0)); | 3517 lw(fp, MemOperand(sp, ExitFrameConstants::kCallerFPOffset)); |
3239 lw(ra, MemOperand(sp, 4)); | 3518 lw(ra, MemOperand(sp, ExitFrameConstants::kCallerPCOffset)); |
3240 lw(sp, MemOperand(sp, 8)); | 3519 addiu(sp, sp, 8); |
3241 jr(ra); | 3520 if (argument_count.is_valid()) { |
3242 nop(); // Branch delay slot nop. | 3521 sll(t8, argument_count, kPointerSizeLog2); |
| 3522 addu(sp, sp, t8); |
| 3523 } |
3243 } | 3524 } |
3244 | 3525 |
3245 | 3526 |
3246 void MacroAssembler::InitializeNewString(Register string, | 3527 void MacroAssembler::InitializeNewString(Register string, |
3247 Register length, | 3528 Register length, |
3248 Heap::RootListIndex map_index, | 3529 Heap::RootListIndex map_index, |
3249 Register scratch1, | 3530 Register scratch1, |
3250 Register scratch2) { | 3531 Register scratch2) { |
3251 sll(scratch1, length, kSmiTagSize); | 3532 sll(scratch1, length, kSmiTagSize); |
3252 LoadRoot(scratch2, map_index); | 3533 LoadRoot(scratch2, map_index); |
(...skipping 13 matching lines...) Expand all Loading... |
3266 return OS::ActivationFrameAlignment(); | 3547 return OS::ActivationFrameAlignment(); |
3267 #else // defined(V8_HOST_ARCH_MIPS) | 3548 #else // defined(V8_HOST_ARCH_MIPS) |
3268 // If we are using the simulator then we should always align to the expected | 3549 // If we are using the simulator then we should always align to the expected |
3269 // alignment. As the simulator is used to generate snapshots we do not know | 3550 // alignment. As the simulator is used to generate snapshots we do not know |
3270 // if the target platform will need alignment, so this is controlled from a | 3551 // if the target platform will need alignment, so this is controlled from a |
3271 // flag. | 3552 // flag. |
3272 return FLAG_sim_stack_alignment; | 3553 return FLAG_sim_stack_alignment; |
3273 #endif // defined(V8_HOST_ARCH_MIPS) | 3554 #endif // defined(V8_HOST_ARCH_MIPS) |
3274 } | 3555 } |
3275 | 3556 |
| 3557 void MacroAssembler::AssertStackIsAligned() { |
| 3558 if (emit_debug_code()) { |
| 3559 const int frame_alignment = ActivationFrameAlignment(); |
| 3560 const int frame_alignment_mask = frame_alignment - 1; |
3276 | 3561 |
3277 void MacroAssembler::AlignStack(int offset) { | 3562 if (frame_alignment > kPointerSize) { |
3278 // On MIPS an offset of 0 aligns to 0 modulo 8 bytes, | 3563 Label alignment_as_expected; |
3279 // and an offset of 1 aligns to 4 modulo 8 bytes. | 3564 ASSERT(IsPowerOf2(frame_alignment)); |
3280 #if defined(V8_HOST_ARCH_MIPS) | 3565 andi(at, sp, frame_alignment_mask); |
3281 // Running on the real platform. Use the alignment as mandated by the local | 3566 Branch(&alignment_as_expected, eq, at, Operand(zero_reg)); |
3282 // environment. | 3567 // Don't use Check here, as it will call Runtime_Abort re-entering here. |
3283 // Note: This will break if we ever start generating snapshots on one MIPS | 3568 stop("Unexpected stack alignment"); |
3284 // platform for another MIPS platform with a different alignment. | 3569 bind(&alignment_as_expected); |
3285 int activation_frame_alignment = OS::ActivationFrameAlignment(); | 3570 } |
3286 #else // defined(V8_HOST_ARCH_MIPS) | |
3287 // If we are using the simulator then we should always align to the expected | |
3288 // alignment. As the simulator is used to generate snapshots we do not know | |
3289 // if the target platform will need alignment, so we will always align at | |
3290 // this point here. | |
3291 int activation_frame_alignment = 2 * kPointerSize; | |
3292 #endif // defined(V8_HOST_ARCH_MIPS) | |
3293 if (activation_frame_alignment != kPointerSize) { | |
3294 // This code needs to be made more general if this assert doesn't hold. | |
3295 ASSERT(activation_frame_alignment == 2 * kPointerSize); | |
3296 if (offset == 0) { | |
3297 andi(t8, sp, activation_frame_alignment - 1); | |
3298 Push(zero_reg, eq, t8, zero_reg); | |
3299 } else { | |
3300 andi(t8, sp, activation_frame_alignment - 1); | |
3301 addiu(t8, t8, -4); | |
3302 Push(zero_reg, eq, t8, zero_reg); | |
3303 } | 3571 } |
3304 } | |
3305 } | 3572 } |
3306 | 3573 |
3307 | 3574 |
3308 | |
3309 void MacroAssembler::JumpIfNotPowerOfTwoOrZero( | 3575 void MacroAssembler::JumpIfNotPowerOfTwoOrZero( |
3310 Register reg, | 3576 Register reg, |
3311 Register scratch, | 3577 Register scratch, |
3312 Label* not_power_of_two_or_zero) { | 3578 Label* not_power_of_two_or_zero) { |
3313 Subu(scratch, reg, Operand(1)); | 3579 Subu(scratch, reg, Operand(1)); |
3314 Branch(USE_DELAY_SLOT, not_power_of_two_or_zero, lt, | 3580 Branch(USE_DELAY_SLOT, not_power_of_two_or_zero, lt, |
3315 scratch, Operand(zero_reg)); | 3581 scratch, Operand(zero_reg)); |
3316 and_(at, scratch, reg); // In the delay slot. | 3582 and_(at, scratch, reg); // In the delay slot. |
3317 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg)); | 3583 Branch(not_power_of_two_or_zero, ne, at, Operand(zero_reg)); |
3318 } | 3584 } |
(...skipping 287 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3606 opcode == BGTZL); | 3872 opcode == BGTZL); |
3607 opcode = (cond == eq) ? BEQ : BNE; | 3873 opcode = (cond == eq) ? BEQ : BNE; |
3608 instr = (instr & ~kOpcodeMask) | opcode; | 3874 instr = (instr & ~kOpcodeMask) | opcode; |
3609 masm_.emit(instr); | 3875 masm_.emit(instr); |
3610 } | 3876 } |
3611 | 3877 |
3612 | 3878 |
3613 } } // namespace v8::internal | 3879 } } // namespace v8::internal |
3614 | 3880 |
3615 #endif // V8_TARGET_ARCH_MIPS | 3881 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |