Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(50)

Side by Side Diff: src/mips/macro-assembler-mips.cc

Issue 6993054: MIPS: Fixed FPU rounding checks and related errors in the Simulator. (Closed)
Patch Set: Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
186 kPageSizeBits - Page::kRegionSizeLog2); 186 kPageSizeBits - Page::kRegionSizeLog2);
187 187
188 // Mark region dirty. 188 // Mark region dirty.
189 lw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); 189 lw(scratch, MemOperand(object, Page::kDirtyFlagOffset));
190 li(at, Operand(1)); 190 li(at, Operand(1));
191 sllv(at, at, address); 191 sllv(at, at, address);
192 or_(scratch, scratch, at); 192 or_(scratch, scratch, at);
193 sw(scratch, MemOperand(object, Page::kDirtyFlagOffset)); 193 sw(scratch, MemOperand(object, Page::kDirtyFlagOffset));
194 } 194 }
195 195
196
196 // Push and pop all registers that can hold pointers. 197 // Push and pop all registers that can hold pointers.
197 void MacroAssembler::PushSafepointRegisters() { 198 void MacroAssembler::PushSafepointRegisters() {
198 // Safepoints expect a block of kNumSafepointRegisters values on the 199 // Safepoints expect a block of kNumSafepointRegisters values on the
199 // stack, so adjust the stack for unsaved registers. 200 // stack, so adjust the stack for unsaved registers.
200 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; 201 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
201 ASSERT(num_unsaved >= 0); 202 ASSERT(num_unsaved >= 0);
202 Subu(sp, sp, Operand(num_unsaved * kPointerSize)); 203 Subu(sp, sp, Operand(num_unsaved * kPointerSize));
203 MultiPush(kSafepointSavedRegisters); 204 MultiPush(kSafepointSavedRegisters);
204 } 205 }
205 206
207
206 void MacroAssembler::PopSafepointRegisters() { 208 void MacroAssembler::PopSafepointRegisters() {
207 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters; 209 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
208 MultiPop(kSafepointSavedRegisters); 210 MultiPop(kSafepointSavedRegisters);
209 Addu(sp, sp, Operand(num_unsaved * kPointerSize)); 211 Addu(sp, sp, Operand(num_unsaved * kPointerSize));
210 } 212 }
211 213
214
212 void MacroAssembler::PushSafepointRegistersAndDoubles() { 215 void MacroAssembler::PushSafepointRegistersAndDoubles() {
213 PushSafepointRegisters(); 216 PushSafepointRegisters();
214 Subu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize)); 217 Subu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize));
215 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) { 218 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) {
216 FPURegister reg = FPURegister::FromAllocationIndex(i); 219 FPURegister reg = FPURegister::FromAllocationIndex(i);
217 sdc1(reg, MemOperand(sp, i * kDoubleSize)); 220 sdc1(reg, MemOperand(sp, i * kDoubleSize));
218 } 221 }
219 } 222 }
220 223
224
221 void MacroAssembler::PopSafepointRegistersAndDoubles() { 225 void MacroAssembler::PopSafepointRegistersAndDoubles() {
222 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) { 226 for (int i = 0; i < FPURegister::kNumAllocatableRegisters; i+=2) {
223 FPURegister reg = FPURegister::FromAllocationIndex(i); 227 FPURegister reg = FPURegister::FromAllocationIndex(i);
224 ldc1(reg, MemOperand(sp, i * kDoubleSize)); 228 ldc1(reg, MemOperand(sp, i * kDoubleSize));
225 } 229 }
226 Addu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize)); 230 Addu(sp, sp, Operand(FPURegister::kNumAllocatableRegisters * kDoubleSize));
227 PopSafepointRegisters(); 231 PopSafepointRegisters();
228 } 232 }
229 233
234
230 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src, 235 void MacroAssembler::StoreToSafepointRegistersAndDoublesSlot(Register src,
231 Register dst) { 236 Register dst) {
232 sw(src, SafepointRegistersAndDoublesSlot(dst)); 237 sw(src, SafepointRegistersAndDoublesSlot(dst));
233 } 238 }
234 239
235 240
236 void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) { 241 void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
237 sw(src, SafepointRegisterSlot(dst)); 242 sw(src, SafepointRegisterSlot(dst));
238 } 243 }
239 244
(...skipping 2780 matching lines...) Expand 10 before | Expand all | Expand 10 after
3020 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 3025 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3021 Object* result; 3026 Object* result;
3022 { MaybeObject* maybe_result = stub->TryGetCode(); 3027 { MaybeObject* maybe_result = stub->TryGetCode();
3023 if (!maybe_result->ToObject(&result)) return maybe_result; 3028 if (!maybe_result->ToObject(&result)) return maybe_result;
3024 } 3029 }
3025 Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2); 3030 Call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2);
3026 return result; 3031 return result;
3027 } 3032 }
3028 3033
3029 3034
3030
3031 void MacroAssembler::TailCallStub(CodeStub* stub) { 3035 void MacroAssembler::TailCallStub(CodeStub* stub) {
3032 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 3036 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3033 Jump(stub->GetCode(), RelocInfo::CODE_TARGET); 3037 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
3034 } 3038 }
3035 3039
3040
3036 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, 3041 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub,
3037 Condition cond, 3042 Condition cond,
3038 Register r1, 3043 Register r1,
3039 const Operand& r2) { 3044 const Operand& r2) {
3040 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 3045 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
3041 Object* result; 3046 Object* result;
3042 { MaybeObject* maybe_result = stub->TryGetCode(); 3047 { MaybeObject* maybe_result = stub->TryGetCode();
3043 if (!maybe_result->ToObject(&result)) return maybe_result; 3048 if (!maybe_result->ToObject(&result)) return maybe_result;
3044 } 3049 }
3045 Jump(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2); 3050 Jump(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond, r1, r2);
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after
3346 int num_arguments, 3351 int num_arguments,
3347 int result_size) { 3352 int result_size) {
3348 // TODO(1236192): Most runtime routines don't need the number of 3353 // TODO(1236192): Most runtime routines don't need the number of
3349 // arguments passed in because it is constant. At some point we 3354 // arguments passed in because it is constant. At some point we
3350 // should remove this need and make the runtime routine entry code 3355 // should remove this need and make the runtime routine entry code
3351 // smarter. 3356 // smarter.
3352 li(a0, Operand(num_arguments)); 3357 li(a0, Operand(num_arguments));
3353 JumpToExternalReference(ext); 3358 JumpToExternalReference(ext);
3354 } 3359 }
3355 3360
3361
3356 MaybeObject* MacroAssembler::TryTailCallExternalReference( 3362 MaybeObject* MacroAssembler::TryTailCallExternalReference(
3357 const ExternalReference& ext, int num_arguments, int result_size) { 3363 const ExternalReference& ext, int num_arguments, int result_size) {
3358 // TODO(1236192): Most runtime routines don't need the number of 3364 // TODO(1236192): Most runtime routines don't need the number of
3359 // arguments passed in because it is constant. At some point we 3365 // arguments passed in because it is constant. At some point we
3360 // should remove this need and make the runtime routine entry code 3366 // should remove this need and make the runtime routine entry code
3361 // smarter. 3367 // smarter.
3362 li(a0, num_arguments); 3368 li(a0, num_arguments);
3363 return TryJumpToExternalReference(ext); 3369 return TryJumpToExternalReference(ext);
3364 } 3370 }
3365 3371
(...skipping 385 matching lines...) Expand 10 before | Expand all | Expand 10 after
3751 return OS::ActivationFrameAlignment(); 3757 return OS::ActivationFrameAlignment();
3752 #else // defined(V8_HOST_ARCH_MIPS) 3758 #else // defined(V8_HOST_ARCH_MIPS)
3753 // If we are using the simulator then we should always align to the expected 3759 // If we are using the simulator then we should always align to the expected
3754 // alignment. As the simulator is used to generate snapshots we do not know 3760 // alignment. As the simulator is used to generate snapshots we do not know
3755 // if the target platform will need alignment, so this is controlled from a 3761 // if the target platform will need alignment, so this is controlled from a
3756 // flag. 3762 // flag.
3757 return FLAG_sim_stack_alignment; 3763 return FLAG_sim_stack_alignment;
3758 #endif // defined(V8_HOST_ARCH_MIPS) 3764 #endif // defined(V8_HOST_ARCH_MIPS)
3759 } 3765 }
3760 3766
3767
3761 void MacroAssembler::AssertStackIsAligned() { 3768 void MacroAssembler::AssertStackIsAligned() {
3762 if (emit_debug_code()) { 3769 if (emit_debug_code()) {
3763 const int frame_alignment = ActivationFrameAlignment(); 3770 const int frame_alignment = ActivationFrameAlignment();
3764 const int frame_alignment_mask = frame_alignment - 1; 3771 const int frame_alignment_mask = frame_alignment - 1;
3765 3772
3766 if (frame_alignment > kPointerSize) { 3773 if (frame_alignment > kPointerSize) {
3767 Label alignment_as_expected; 3774 Label alignment_as_expected;
3768 ASSERT(IsPowerOf2(frame_alignment)); 3775 ASSERT(IsPowerOf2(frame_alignment));
3769 andi(at, sp, frame_alignment_mask); 3776 andi(at, sp, frame_alignment_mask);
3770 Branch(&alignment_as_expected, eq, at, Operand(zero_reg)); 3777 Branch(&alignment_as_expected, eq, at, Operand(zero_reg));
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
4087 opcode == BGTZL); 4094 opcode == BGTZL);
4088 opcode = (cond == eq) ? BEQ : BNE; 4095 opcode = (cond == eq) ? BEQ : BNE;
4089 instr = (instr & ~kOpcodeMask) | opcode; 4096 instr = (instr & ~kOpcodeMask) | opcode;
4090 masm_.emit(instr); 4097 masm_.emit(instr);
4091 } 4098 }
4092 4099
4093 4100
4094 } } // namespace v8::internal 4101 } } // namespace v8::internal
4095 4102
4096 #endif // V8_TARGET_ARCH_MIPS 4103 #endif // V8_TARGET_ARCH_MIPS
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698