| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 186 kPageSizeBits - Page::kRegionSizeLog2); | 186 kPageSizeBits - Page::kRegionSizeLog2); |
| 187 | 187 |
| 188 // Mark region dirty. | 188 // Mark region dirty. |
| 189 lw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); | 189 lw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); |
| 190 li(at, Operand(1)); | 190 li(at, Operand(1)); |
| 191 sllv(at, at, address); | 191 sllv(at, at, address); |
| 192 or_(scratch, scratch, at); | 192 or_(scratch, scratch, at); |
| 193 sw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); | 193 sw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); |
| 194 } | 194 } |
| 195 | 195 |
| 196 |
| 196 // Push and pop all registers that can hold pointers. | 197 // Push and pop all registers that can hold pointers. |
| 197 void MacroAssembler::PushSafepointRegisters() { | 198 void MacroAssembler::PushSafepointRegisters() { |
| 198 // Safepoints expect a block of kNumSafepointRegisters values on the | 199 // Safepoints expect a block of kNumSafepointRegisters values on the |
| 199 // stack, so adjust the stack for unsaved registers. | 200 // stack, so adjust the stack for unsaved registers. |
| 200 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 201 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
| 201 ASSERT(num_unsaved >= 0); | 202 ASSERT(num_unsaved >= 0); |
| 202 Subu(sp, sp, Operand(num_unsaved * kPointerSize)); | 203 Subu(sp, sp, Operand(num_unsaved * kPointerSize)); |
| 203 MultiPush(kSafepointSavedRegisters); | 204 MultiPush(kSafepointSavedRegisters); |
| 204 } | 205 } |
| 205 | 206 |
| 207 |
| 206 void MacroAssembler::PopSafepointRegisters() { | 208 void MacroAssembler::PopSafepointRegisters() { |
| 207 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; | 209 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; |
| 208 MultiPop(kSafepointSavedRegisters); | 210 MultiPop(kSafepointSavedRegisters); |
| 209 Addu(sp, sp, Operand(num_unsaved * kPointerSize)); | 211 Addu(sp, sp, Operand(num_unsaved * kPointerSize)); |
| 210 } | 212 } |
| 211 | 213 |
| 214 |
| 212 void MacroAssembler::PushSafepointRegistersAndDoubles() { | 215 void MacroAssembler::PushSafepointRegistersAndDoubles() { |
| 213 PushSafepointRegisters(); | 216 PushSafepointRegisters(); |
| 214 Subu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize)); | 217 Subu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize)); |
| 215 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) { | 218 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) { |
| 216 FPURegister reg = FPURegister::FromAllocationIndex(i); | 219 FPURegister reg = FPURegister::FromAllocationIndex(i); |
| 217 sdc1(reg, MemOperand(sp, i * kDoubleSize)); | 220 sdc1(reg, MemOperand(sp, i * kDoubleSize)); |
| 218 } | 221 } |
| 219 } | 222 } |
| 220 | 223 |
| 224 |
| 221 void MacroAssembler::PopSafepointRegistersAndDoubles() { | 225 void MacroAssembler::PopSafepointRegistersAndDoubles() { |
| 222 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) { | 226 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) { |
| 223 FPURegister reg = FPURegister::FromAllocationIndex(i); | 227 FPURegister reg = FPURegister::FromAllocationIndex(i); |
| 224 ldc1(reg, MemOperand(sp, i * kDoubleSize)); | 228 ldc1(reg, MemOperand(sp, i * kDoubleSize)); |
| 225 } | 229 } |
| 226 Addu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize)); | 230 Addu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize)); |
| 227 PopSafepointRegisters(); | 231 PopSafepointRegisters(); |
| 228 } | 232 } |
| 229 | 233 |
| 234 |
| 230 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, | 235 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, |
| 231 Register dst) { | 236 Register dst) { |
| 232 sw(src, SafepointRegistersAndDoublesSlot(dst)); | 237 sw(src, SafepointRegistersAndDoublesSlot(dst)); |
| 233 } | 238 } |
| 234 | 239 |
| 235 | 240 |
| 236 void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) { | 241 void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) { |
| 237 sw(src, SafepointRegisterSlot(dst)); | 242 sw(src, SafepointRegisterSlot(dst)); |
| 238 } | 243 } |
| 239 | 244 |
| (...skipping 797 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1037 // Restore sign if necessary. | 1042 // Restore sign if necessary. |
| 1038 mov(scratch, sign); | 1043 mov(scratch, sign); |
| 1039 result = sign; | 1044 result = sign; |
| 1040 sign = no_reg; | 1045 sign = no_reg; |
| 1041 Subu(result, zero_reg, input_high); | 1046 Subu(result, zero_reg, input_high); |
| 1042 movz(result, input_high, scratch); | 1047 movz(result, input_high, scratch); |
| 1043 bind(&done); | 1048 bind(&done); |
| 1044 } | 1049 } |
| 1045 | 1050 |
| 1046 | 1051 |
| 1052 void MacroAssembler::EmitECMATruncate(Register result, |
| 1053 FPURegister double_input, |
| 1054 FPURegister single_scratch, |
| 1055 Register scratch, |
| 1056 Register input_high, |
| 1057 Register input_low) { |
| 1058 CpuFeatures::Scope scope(FPU); |
| 1059 ASSERT(!input_high.is(result)); |
| 1060 ASSERT(!input_low.is(result)); |
| 1061 ASSERT(!input_low.is(input_high)); |
| 1062 ASSERT(!scratch.is(result) && |
| 1063 !scratch.is(input_high) && |
| 1064 !scratch.is(input_low)); |
| 1065 ASSERT(!single_scratch.is(double_input)); |
| 1066 |
| 1067 Label done; |
| 1068 Label manual; |
| 1069 |
| 1070 // Clear cumulative exception flags and save the FCSR. |
| 1071 Register scratch2 = input_high; |
| 1072 cfc1(scratch2, FCSR); |
| 1073 ctc1(zero_reg, FCSR); |
| 1074 // Try a conversion to a signed integer. |
| 1075 trunc_w_d(single_scratch, double_input); |
| 1076 mfc1(result, single_scratch); |
| 1077 // Retrieve and restore the FCSR. |
| 1078 cfc1(scratch, FCSR); |
| 1079 ctc1(scratch2, FCSR); |
| 1080 // Check for overflow and NaNs. |
| 1081 And(scratch, |
| 1082 scratch, |
| 1083 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask); |
| 1084 // If we had no exceptions we are done. |
| 1085 Branch(&done, eq, scratch, Operand(zero_reg)); |
| 1086 |
| 1087 // Load the double value and perform a manual truncation. |
| 1088 Move(input_low, input_high, double_input); |
| 1089 EmitOutOfInt32RangeTruncate(result, |
| 1090 input_high, |
| 1091 input_low, |
| 1092 scratch); |
| 1093 bind(&done); |
| 1094 } |
| 1095 |
| 1096 |
| 1047 void MacroAssembler::GetLeastBitsFromSmi(Register dst, | 1097 void MacroAssembler::GetLeastBitsFromSmi(Register dst, |
| 1048 Register src, | 1098 Register src, |
| 1049 int num_least_bits) { | 1099 int num_least_bits) { |
| 1050 Ext(dst, src, kSmiTagSize, num_least_bits); | 1100 Ext(dst, src, kSmiTagSize, num_least_bits); |
| 1051 } | 1101 } |
| 1052 | 1102 |
| 1053 | 1103 |
| 1054 void MacroAssembler::GetLeastBitsFromInt32(Register dst, | 1104 void MacroAssembler::GetLeastBitsFromInt32(Register dst, |
| 1055 Register src, | 1105 Register src, |
| 1056 int num_least_bits) { | 1106 int num_least_bits) { |
| (...skipping 1533 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2590 lbu(scratch, MemOperand(src)); | 2640 lbu(scratch, MemOperand(src)); |
| 2591 Addu(src, src, 1); | 2641 Addu(src, src, 1); |
| 2592 sb(scratch, MemOperand(dst)); | 2642 sb(scratch, MemOperand(dst)); |
| 2593 Addu(dst, dst, 1); | 2643 Addu(dst, dst, 1); |
| 2594 Subu(length, length, Operand(1)); | 2644 Subu(length, length, Operand(1)); |
| 2595 Branch(&byte_loop_1, ne, length, Operand(zero_reg)); | 2645 Branch(&byte_loop_1, ne, length, Operand(zero_reg)); |
| 2596 bind(&done); | 2646 bind(&done); |
| 2597 } | 2647 } |
| 2598 | 2648 |
| 2599 | 2649 |
| 2650 void MacroAssembler::CheckFastElements(Register map, |
| 2651 Register scratch, |
| 2652 Label* fail) { |
| 2653 STATIC_ASSERT(JSObject::FAST_ELEMENTS == 0); |
| 2654 lbu(scratch, FieldMemOperand(map, Map::kBitField2Offset)); |
| 2655 And(scratch, scratch, Operand(Map::kMaximumBitField2FastElementValue)); |
| 2656 Branch(fail, hi, scratch, Operand(zero_reg)); |
| 2657 } |
| 2658 |
| 2659 |
| 2600 void MacroAssembler::CheckMap(Register obj, | 2660 void MacroAssembler::CheckMap(Register obj, |
| 2601 Register scratch, | 2661 Register scratch, |
| 2602 Handle<Map> map, | 2662 Handle<Map> map, |
| 2603 Label* fail, | 2663 Label* fail, |
| 2604 SmiCheckType smi_check_type) { | 2664 SmiCheckType smi_check_type) { |
| 2605 if (smi_check_type == DO_SMI_CHECK) { | 2665 if (smi_check_type == DO_SMI_CHECK) { |
| 2606 JumpIfSmi(obj, fail); | 2666 JumpIfSmi(obj, fail); |
| 2607 } | 2667 } |
| 2608 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 2668 lw(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); |
| 2609 li(at, Operand(map)); | 2669 li(at, Operand(map)); |
| (...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2846 sra(expected_reg, expected_reg, kSmiTagSize); | 2906 sra(expected_reg, expected_reg, kSmiTagSize); |
| 2847 lw(code_reg, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); | 2907 lw(code_reg, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); |
| 2848 | 2908 |
| 2849 ParameterCount expected(expected_reg); | 2909 ParameterCount expected(expected_reg); |
| 2850 InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind); | 2910 InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind); |
| 2851 } | 2911 } |
| 2852 | 2912 |
| 2853 | 2913 |
| 2854 void MacroAssembler::InvokeFunction(JSFunction* function, | 2914 void MacroAssembler::InvokeFunction(JSFunction* function, |
| 2855 const ParameterCount& actual, | 2915 const ParameterCount& actual, |
| 2856 InvokeFlag flag) { | 2916 InvokeFlag flag, |
| 2917 CallKind call_kind) { |
| 2857 ASSERT(function->is_compiled()); | 2918 ASSERT(function->is_compiled()); |
| 2858 | 2919 |
| 2859 // Get the function and setup the context. | 2920 // Get the function and setup the context. |
| 2860 li(a1, Operand(Handle<JSFunction>(function))); | 2921 li(a1, Operand(Handle<JSFunction>(function))); |
| 2861 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); | 2922 lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); |
| 2862 | 2923 |
| 2863 // Invoke the cached code. | 2924 // Invoke the cached code. |
| 2864 Handle<Code> code(function->code()); | 2925 Handle<Code> code(function->code()); |
| 2865 ParameterCount expected(function->shared()->formal_parameter_count()); | 2926 ParameterCount expected(function->shared()->formal_parameter_count()); |
| 2866 if (V8::UseCrankshaft()) { | 2927 if (V8::UseCrankshaft()) { |
| 2867 UNIMPLEMENTED_MIPS(); | 2928 UNIMPLEMENTED_MIPS(); |
| 2868 } else { | 2929 } else { |
| 2869 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag); | 2930 InvokeCode(code, expected, actual, RelocInfo::CODE_TARGET, flag, call_kind); |
| 2870 } | 2931 } |
| 2871 } | 2932 } |
| 2872 | 2933 |
| 2873 | 2934 |
| 2874 void MacroAssembler::IsObjectJSObjectType(Register heap_object, | 2935 void MacroAssembler::IsObjectJSObjectType(Register heap_object, |
| 2875 Register map, | 2936 Register map, |
| 2876 Register scratch, | 2937 Register scratch, |
| 2877 Label* fail) { | 2938 Label* fail) { |
| 2878 lw(map, FieldMemOperand(heap_object, HeapObject::kMapOffset)); | 2939 lw(map, FieldMemOperand(heap_object, HeapObject::kMapOffset)); |
| 2879 IsInstanceJSObjectType(map, scratch, fail); | 2940 IsInstanceJSObjectType(map, scratch, fail); |
| 2880 } | 2941 } |
| 2881 | 2942 |
| 2882 | 2943 |
| 2883 void MacroAssembler::IsInstanceJSObjectType(Register map, | 2944 void MacroAssembler::IsInstanceJSObjectType(Register map, |
| 2884 Register scratch, | 2945 Register scratch, |
| 2885 Label* fail) { | 2946 Label* fail) { |
| 2886 lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 2947 lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
| 2887 Branch(fail, lt, scratch, Operand(FIRST_JS_OBJECT_TYPE)); | 2948 Branch(fail, lt, scratch, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 2888 Branch(fail, gt, scratch, Operand(LAST_JS_OBJECT_TYPE)); | 2949 Branch(fail, gt, scratch, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 2889 } | 2950 } |
| 2890 | 2951 |
| 2891 | 2952 |
| 2892 void MacroAssembler::IsObjectJSStringType(Register object, | 2953 void MacroAssembler::IsObjectJSStringType(Register object, |
| 2893 Register scratch, | 2954 Register scratch, |
| 2894 Label* fail) { | 2955 Label* fail) { |
| 2895 ASSERT(kNotStringTag != 0); | 2956 ASSERT(kNotStringTag != 0); |
| 2896 | 2957 |
| 2897 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 2958 lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 2898 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); | 2959 lbu(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2974 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. | 3035 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. |
| 2975 Object* result; | 3036 Object* result; |
| 2976 { MaybeObject* maybe_result = stub->TryGetCode(); | 3037 { MaybeObject* maybe_result = stub->TryGetCode(); |
| 2977 if (!maybe_result->ToObject(&result)) return maybe_result; | 3038 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2978 } | 3039 } |
| 2979 Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2); | 3040 Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2); |
| 2980 return result; | 3041 return result; |
| 2981 } | 3042 } |
| 2982 | 3043 |
| 2983 | 3044 |
| 2984 | |
| 2985 void MacroAssembler::TailCallStub(CodeStub* stub) { | 3045 void MacroAssembler::TailCallStub(CodeStub* stub) { |
| 2986 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. | 3046 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. |
| 2987 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); | 3047 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); |
| 2988 } | 3048 } |
| 2989 | 3049 |
| 3050 |
| 2990 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, | 3051 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, |
| 2991 Condition cond, | 3052 Condition cond, |
| 2992 Register r1, | 3053 Register r1, |
| 2993 const Operand& r2) { | 3054 const Operand& r2) { |
| 2994 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. | 3055 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. |
| 2995 Object* result; | 3056 Object* result; |
| 2996 { MaybeObject* maybe_result = stub->TryGetCode(); | 3057 { MaybeObject* maybe_result = stub->TryGetCode(); |
| 2997 if (!maybe_result->ToObject(&result)) return maybe_result; | 3058 if (!maybe_result->ToObject(&result)) return maybe_result; |
| 2998 } | 3059 } |
| 2999 Jump(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2); | 3060 Jump(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2); |
| (...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3300 int num_arguments, | 3361 int num_arguments, |
| 3301 int result_size) { | 3362 int result_size) { |
| 3302 // TODO(1236192): Most runtime routines don't need the number of | 3363 // TODO(1236192): Most runtime routines don't need the number of |
| 3303 // arguments passed in because it is constant. At some point we | 3364 // arguments passed in because it is constant. At some point we |
| 3304 // should remove this need and make the runtime routine entry code | 3365 // should remove this need and make the runtime routine entry code |
| 3305 // smarter. | 3366 // smarter. |
| 3306 li(a0, Operand(num_arguments)); | 3367 li(a0, Operand(num_arguments)); |
| 3307 JumpToExternalReference(ext); | 3368 JumpToExternalReference(ext); |
| 3308 } | 3369 } |
| 3309 | 3370 |
| 3371 |
| 3310 MaybeObject* MacroAssembler::TryTailCallExternalReference( | 3372 MaybeObject* MacroAssembler::TryTailCallExternalReference( |
| 3311 const ExternalReference& ext, int num_arguments, int result_size) { | 3373 const ExternalReference& ext, int num_arguments, int result_size) { |
| 3312 // TODO(1236192): Most runtime routines don't need the number of | 3374 // TODO(1236192): Most runtime routines don't need the number of |
| 3313 // arguments passed in because it is constant. At some point we | 3375 // arguments passed in because it is constant. At some point we |
| 3314 // should remove this need and make the runtime routine entry code | 3376 // should remove this need and make the runtime routine entry code |
| 3315 // smarter. | 3377 // smarter. |
| 3316 li(a0, num_arguments); | 3378 li(a0, num_arguments); |
| 3317 return TryJumpToExternalReference(ext); | 3379 return TryJumpToExternalReference(ext); |
| 3318 } | 3380 } |
| 3319 | 3381 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 3341 return TryTailCallStub(&stub); | 3403 return TryTailCallStub(&stub); |
| 3342 } | 3404 } |
| 3343 | 3405 |
| 3344 | 3406 |
| 3345 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, | 3407 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, |
| 3346 InvokeFlag flag, | 3408 InvokeFlag flag, |
| 3347 const CallWrapper& call_wrapper) { | 3409 const CallWrapper& call_wrapper) { |
| 3348 GetBuiltinEntry(t9, id); | 3410 GetBuiltinEntry(t9, id); |
| 3349 if (flag == CALL_FUNCTION) { | 3411 if (flag == CALL_FUNCTION) { |
| 3350 call_wrapper.BeforeCall(CallSize(t9)); | 3412 call_wrapper.BeforeCall(CallSize(t9)); |
| 3413 SetCallKind(t1, CALL_AS_METHOD); |
| 3351 Call(t9); | 3414 Call(t9); |
| 3352 call_wrapper.AfterCall(); | 3415 call_wrapper.AfterCall(); |
| 3353 } else { | 3416 } else { |
| 3354 ASSERT(flag == JUMP_FUNCTION); | 3417 ASSERT(flag == JUMP_FUNCTION); |
| 3418 SetCallKind(t1, CALL_AS_METHOD); |
| 3355 Jump(t9); | 3419 Jump(t9); |
| 3356 } | 3420 } |
| 3357 } | 3421 } |
| 3358 | 3422 |
| 3359 | 3423 |
| 3360 void MacroAssembler::GetBuiltinFunction(Register target, | 3424 void MacroAssembler::GetBuiltinFunction(Register target, |
| 3361 Builtins::JavaScript id) { | 3425 Builtins::JavaScript id) { |
| 3362 // Load the builtins object into target register. | 3426 // Load the builtins object into target register. |
| 3363 lw(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); | 3427 lw(target, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 3364 lw(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset)); | 3428 lw(target, FieldMemOperand(target, GlobalObject::kBuiltinsOffset)); |
| (...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3703 return OS::ActivationFrameAlignment(); | 3767 return OS::ActivationFrameAlignment(); |
| 3704 #else // defined(V8_HOST_ARCH_MIPS) | 3768 #else // defined(V8_HOST_ARCH_MIPS) |
| 3705 // If we are using the simulator then we should always align to the expected | 3769 // If we are using the simulator then we should always align to the expected |
| 3706 // alignment. As the simulator is used to generate snapshots we do not know | 3770 // alignment. As the simulator is used to generate snapshots we do not know |
| 3707 // if the target platform will need alignment, so this is controlled from a | 3771 // if the target platform will need alignment, so this is controlled from a |
| 3708 // flag. | 3772 // flag. |
| 3709 return FLAG_sim_stack_alignment; | 3773 return FLAG_sim_stack_alignment; |
| 3710 #endif // defined(V8_HOST_ARCH_MIPS) | 3774 #endif // defined(V8_HOST_ARCH_MIPS) |
| 3711 } | 3775 } |
| 3712 | 3776 |
| 3777 |
| 3713 void MacroAssembler::AssertStackIsAligned() { | 3778 void MacroAssembler::AssertStackIsAligned() { |
| 3714 if (emit_debug_code()) { | 3779 if (emit_debug_code()) { |
| 3715 const int frame_alignment = ActivationFrameAlignment(); | 3780 const int frame_alignment = ActivationFrameAlignment(); |
| 3716 const int frame_alignment_mask = frame_alignment - 1; | 3781 const int frame_alignment_mask = frame_alignment - 1; |
| 3717 | 3782 |
| 3718 if (frame_alignment > kPointerSize) { | 3783 if (frame_alignment > kPointerSize) { |
| 3719 Label alignment_as_expected; | 3784 Label alignment_as_expected; |
| 3720 ASSERT(IsPowerOf2(frame_alignment)); | 3785 ASSERT(IsPowerOf2(frame_alignment)); |
| 3721 andi(at, sp, frame_alignment_mask); | 3786 andi(at, sp, frame_alignment_mask); |
| 3722 Branch(&alignment_as_expected, eq, at, Operand(zero_reg)); | 3787 Branch(&alignment_as_expected, eq, at, Operand(zero_reg)); |
| (...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4039 opcode == BGTZL); | 4104 opcode == BGTZL); |
| 4040 opcode = (cond == eq) ? BEQ : BNE; | 4105 opcode = (cond == eq) ? BEQ : BNE; |
| 4041 instr = (instr & ~kOpcodeMask) | opcode; | 4106 instr = (instr & ~kOpcodeMask) | opcode; |
| 4042 masm_.emit(instr); | 4107 masm_.emit(instr); |
| 4043 } | 4108 } |
| 4044 | 4109 |
| 4045 | 4110 |
| 4046 } } // namespace v8::internal | 4111 } } // namespace v8::internal |
| 4047 | 4112 |
| 4048 #endif // V8_TARGET_ARCH_MIPS | 4113 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |