OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <limits.h> // For LONG_MIN, LONG_MAX. | 5 #include <limits.h> // For LONG_MIN, LONG_MAX. |
6 | 6 |
7 #include "src/v8.h" | 7 #include "src/v8.h" |
8 | 8 |
9 #if V8_TARGET_ARCH_MIPS | 9 #if V8_TARGET_ARCH_MIPS |
10 | 10 |
(...skipping 4395 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4406 | 4406 |
4407 void MacroAssembler::TailCallStub(CodeStub* stub, | 4407 void MacroAssembler::TailCallStub(CodeStub* stub, |
4408 Condition cond, | 4408 Condition cond, |
4409 Register r1, | 4409 Register r1, |
4410 const Operand& r2, | 4410 const Operand& r2, |
4411 BranchDelaySlot bd) { | 4411 BranchDelaySlot bd) { |
4412 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd); | 4412 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2, bd); |
4413 } | 4413 } |
4414 | 4414 |
4415 | 4415 |
4416 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | |
4417 return ref0.address() - ref1.address(); | |
4418 } | |
4419 | |
4420 | |
4421 void MacroAssembler::CallApiFunctionAndReturn( | |
4422 Register function_address, ExternalReference thunk_ref, int stack_space, | |
4423 MemOperand* stack_space_operand, MemOperand return_value_operand, | |
4424 MemOperand* context_restore_operand) { | |
4425 ExternalReference next_address = | |
4426 ExternalReference::handle_scope_next_address(isolate()); | |
4427 const int kNextOffset = 0; | |
4428 const int kLimitOffset = AddressOffset( | |
4429 ExternalReference::handle_scope_limit_address(isolate()), | |
4430 next_address); | |
4431 const int kLevelOffset = AddressOffset( | |
4432 ExternalReference::handle_scope_level_address(isolate()), | |
4433 next_address); | |
4434 | |
4435 DCHECK(function_address.is(a1) || function_address.is(a2)); | |
4436 | |
4437 Label profiler_disabled; | |
4438 Label end_profiler_check; | |
4439 li(t9, Operand(ExternalReference::is_profiling_address(isolate()))); | |
4440 lb(t9, MemOperand(t9, 0)); | |
4441 Branch(&profiler_disabled, eq, t9, Operand(zero_reg)); | |
4442 | |
4443 // Additional parameter is the address of the actual callback. | |
4444 li(t9, Operand(thunk_ref)); | |
4445 jmp(&end_profiler_check); | |
4446 | |
4447 bind(&profiler_disabled); | |
4448 mov(t9, function_address); | |
4449 bind(&end_profiler_check); | |
4450 | |
4451 // Allocate HandleScope in callee-save registers. | |
4452 li(s3, Operand(next_address)); | |
4453 lw(s0, MemOperand(s3, kNextOffset)); | |
4454 lw(s1, MemOperand(s3, kLimitOffset)); | |
4455 lw(s2, MemOperand(s3, kLevelOffset)); | |
4456 Addu(s2, s2, Operand(1)); | |
4457 sw(s2, MemOperand(s3, kLevelOffset)); | |
4458 | |
4459 if (FLAG_log_timer_events) { | |
4460 FrameScope frame(this, StackFrame::MANUAL); | |
4461 PushSafepointRegisters(); | |
4462 PrepareCallCFunction(1, a0); | |
4463 li(a0, Operand(ExternalReference::isolate_address(isolate()))); | |
4464 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1); | |
4465 PopSafepointRegisters(); | |
4466 } | |
4467 | |
4468 // Native call returns to the DirectCEntry stub which redirects to the | |
4469 // return address pushed on stack (could have moved after GC). | |
4470 // DirectCEntry stub itself is generated early and never moves. | |
4471 DirectCEntryStub stub(isolate()); | |
4472 stub.GenerateCall(this, t9); | |
4473 | |
4474 if (FLAG_log_timer_events) { | |
4475 FrameScope frame(this, StackFrame::MANUAL); | |
4476 PushSafepointRegisters(); | |
4477 PrepareCallCFunction(1, a0); | |
4478 li(a0, Operand(ExternalReference::isolate_address(isolate()))); | |
4479 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1); | |
4480 PopSafepointRegisters(); | |
4481 } | |
4482 | |
4483 Label promote_scheduled_exception; | |
4484 Label exception_handled; | |
4485 Label delete_allocated_handles; | |
4486 Label leave_exit_frame; | |
4487 Label return_value_loaded; | |
4488 | |
4489 // Load value from ReturnValue. | |
4490 lw(v0, return_value_operand); | |
4491 bind(&return_value_loaded); | |
4492 | |
4493 // No more valid handles (the result handle was the last one). Restore | |
4494 // previous handle scope. | |
4495 sw(s0, MemOperand(s3, kNextOffset)); | |
4496 if (emit_debug_code()) { | |
4497 lw(a1, MemOperand(s3, kLevelOffset)); | |
4498 Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2)); | |
4499 } | |
4500 Subu(s2, s2, Operand(1)); | |
4501 sw(s2, MemOperand(s3, kLevelOffset)); | |
4502 lw(at, MemOperand(s3, kLimitOffset)); | |
4503 Branch(&delete_allocated_handles, ne, s1, Operand(at)); | |
4504 | |
4505 // Check if the function scheduled an exception. | |
4506 bind(&leave_exit_frame); | |
4507 LoadRoot(t0, Heap::kTheHoleValueRootIndex); | |
4508 li(at, Operand(ExternalReference::scheduled_exception_address(isolate()))); | |
4509 lw(t1, MemOperand(at)); | |
4510 Branch(&promote_scheduled_exception, ne, t0, Operand(t1)); | |
4511 bind(&exception_handled); | |
4512 | |
4513 bool restore_context = context_restore_operand != NULL; | |
4514 if (restore_context) { | |
4515 lw(cp, *context_restore_operand); | |
4516 } | |
4517 if (stack_space_operand != NULL) { | |
4518 lw(s0, *stack_space_operand); | |
4519 } else { | |
4520 li(s0, Operand(stack_space)); | |
4521 } | |
4522 LeaveExitFrame(false, s0, !restore_context, EMIT_RETURN, | |
4523 stack_space_operand != NULL); | |
4524 | |
4525 bind(&promote_scheduled_exception); | |
4526 { | |
4527 FrameScope frame(this, StackFrame::INTERNAL); | |
4528 CallExternalReference( | |
4529 ExternalReference(Runtime::kPromoteScheduledException, isolate()), | |
4530 0); | |
4531 } | |
4532 jmp(&exception_handled); | |
4533 | |
4534 // HandleScope limit has changed. Delete allocated extensions. | |
4535 bind(&delete_allocated_handles); | |
4536 sw(s1, MemOperand(s3, kLimitOffset)); | |
4537 mov(s0, v0); | |
4538 mov(a0, v0); | |
4539 PrepareCallCFunction(1, s1); | |
4540 li(a0, Operand(ExternalReference::isolate_address(isolate()))); | |
4541 CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate()), | |
4542 1); | |
4543 mov(v0, s0); | |
4544 jmp(&leave_exit_frame); | |
4545 } | |
4546 | |
4547 | |
4548 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { | 4416 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) { |
4549 return has_frame_ || !stub->SometimesSetsUpAFrame(); | 4417 return has_frame_ || !stub->SometimesSetsUpAFrame(); |
4550 } | 4418 } |
4551 | 4419 |
4552 | 4420 |
4553 void MacroAssembler::IndexFromHash(Register hash, Register index) { | 4421 void MacroAssembler::IndexFromHash(Register hash, Register index) { |
4554 // If the hash field contains an array index pick it out. The assert checks | 4422 // If the hash field contains an array index pick it out. The assert checks |
4555 // that the constants for the maximum number of digits for an array index | 4423 // that the constants for the maximum number of digits for an array index |
4556 // cached in the hash field and the number of bits reserved for it does not | 4424 // cached in the hash field and the number of bits reserved for it does not |
4557 // conflict. | 4425 // conflict. |
(...skipping 1718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6276 } | 6144 } |
6277 if (mag.shift > 0) sra(result, result, mag.shift); | 6145 if (mag.shift > 0) sra(result, result, mag.shift); |
6278 srl(at, dividend, 31); | 6146 srl(at, dividend, 31); |
6279 Addu(result, result, Operand(at)); | 6147 Addu(result, result, Operand(at)); |
6280 } | 6148 } |
6281 | 6149 |
6282 | 6150 |
6283 } } // namespace v8::internal | 6151 } } // namespace v8::internal |
6284 | 6152 |
6285 #endif // V8_TARGET_ARCH_MIPS | 6153 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |