OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved.7 | 1 // Copyright 2012 the V8 project authors. All rights reserved.7 |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
200 // Possibly allocate a local context. | 200 // Possibly allocate a local context. |
201 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 201 int heap_slots = info()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
202 if (heap_slots > 0) { | 202 if (heap_slots > 0) { |
203 Comment(";;; Allocate local context"); | 203 Comment(";;; Allocate local context"); |
204 // Argument to NewContext is the function, which is in a1. | 204 // Argument to NewContext is the function, which is in a1. |
205 if (heap_slots <= FastNewContextStub::kMaximumSlots) { | 205 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
206 FastNewContextStub stub(heap_slots); | 206 FastNewContextStub stub(heap_slots); |
207 __ CallStub(&stub); | 207 __ CallStub(&stub); |
208 } else { | 208 } else { |
209 __ push(a1); | 209 __ push(a1); |
210 __ CallRuntime(Runtime::kNewFunctionContext, 1); | 210 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1); |
211 } | 211 } |
212 RecordSafepoint(Safepoint::kNoLazyDeopt); | 212 RecordSafepoint(Safepoint::kNoLazyDeopt); |
213 // Context is returned in both v0. It replaces the context passed to us. | 213 // Context is returned in both v0. It replaces the context passed to us. |
214 // It's saved in the stack and kept live in cp. | 214 // It's saved in the stack and kept live in cp. |
215 __ mov(cp, v0); | 215 __ mov(cp, v0); |
216 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 216 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
217 // Copy any necessary parameters into the context. | 217 // Copy any necessary parameters into the context. |
218 int num_parameters = scope()->num_parameters(); | 218 int num_parameters = scope()->num_parameters(); |
219 for (int i = 0; i < num_parameters; i++) { | 219 for (int i = 0; i < num_parameters; i++) { |
220 Variable* var = scope()->parameter(i); | 220 Variable* var = scope()->parameter(i); |
(...skipping 3271 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3492 } | 3492 } |
3493 } | 3493 } |
3494 | 3494 |
3495 | 3495 |
3496 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { | 3496 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) { |
3497 ASSERT(ToRegister(instr->context()).is(cp)); | 3497 ASSERT(ToRegister(instr->context()).is(cp)); |
3498 __ li(scratch0(), instr->hydrogen()->pairs()); | 3498 __ li(scratch0(), instr->hydrogen()->pairs()); |
3499 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); | 3499 __ li(scratch1(), Operand(Smi::FromInt(instr->hydrogen()->flags()))); |
3500 // The context is the first argument. | 3500 // The context is the first argument. |
3501 __ Push(cp, scratch0(), scratch1()); | 3501 __ Push(cp, scratch0(), scratch1()); |
3502 CallRuntime(Runtime::kDeclareGlobals, 3, instr); | 3502 CallRuntime(Runtime::kHiddenDeclareGlobals, 3, instr); |
3503 } | 3503 } |
3504 | 3504 |
3505 | 3505 |
3506 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, | 3506 void LCodeGen::CallKnownFunction(Handle<JSFunction> function, |
3507 int formal_parameter_count, | 3507 int formal_parameter_count, |
3508 int arity, | 3508 int arity, |
3509 LInstruction* instr, | 3509 LInstruction* instr, |
3510 A1State a1_state) { | 3510 A1State a1_state) { |
3511 bool dont_adapt_arguments = | 3511 bool dont_adapt_arguments = |
3512 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; | 3512 formal_parameter_count == SharedFunctionInfo::kDontAdaptArgumentsSentinel; |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3581 // exponent: floating point exponent value. | 3581 // exponent: floating point exponent value. |
3582 | 3582 |
3583 Label allocated, slow; | 3583 Label allocated, slow; |
3584 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex); | 3584 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex); |
3585 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow); | 3585 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow); |
3586 __ Branch(&allocated); | 3586 __ Branch(&allocated); |
3587 | 3587 |
3588 // Slow case: Call the runtime system to do the number allocation. | 3588 // Slow case: Call the runtime system to do the number allocation. |
3589 __ bind(&slow); | 3589 __ bind(&slow); |
3590 | 3590 |
3591 CallRuntimeFromDeferred(Runtime::kAllocateHeapNumber, 0, instr, | 3591 CallRuntimeFromDeferred(Runtime::kHiddenAllocateHeapNumber, 0, instr, |
3592 instr->context()); | 3592 instr->context()); |
3593 // Set the pointer to the new heap number in tmp. | 3593 // Set the pointer to the new heap number in tmp. |
3594 if (!tmp1.is(v0)) | 3594 if (!tmp1.is(v0)) |
3595 __ mov(tmp1, v0); | 3595 __ mov(tmp1, v0); |
3596 // Restore input_reg after call to runtime. | 3596 // Restore input_reg after call to runtime. |
3597 __ LoadFromSafepointRegisterSlot(input, input); | 3597 __ LoadFromSafepointRegisterSlot(input, input); |
3598 __ lw(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); | 3598 __ lw(exponent, FieldMemOperand(input, HeapNumber::kExponentOffset)); |
3599 | 3599 |
3600 __ bind(&allocated); | 3600 __ bind(&allocated); |
3601 // exponent: floating point exponent value. | 3601 // exponent: floating point exponent value. |
(...skipping 1063 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4665 // TODO(3095996): Put a valid pointer value in the stack slot where the | 4665 // TODO(3095996): Put a valid pointer value in the stack slot where the |
4666 // result register is stored, as this register is in the pointer map, but | 4666 // result register is stored, as this register is in the pointer map, but |
4667 // contains an integer value. | 4667 // contains an integer value. |
4668 __ mov(dst, zero_reg); | 4668 __ mov(dst, zero_reg); |
4669 | 4669 |
4670 // Preserve the value of all registers. | 4670 // Preserve the value of all registers. |
4671 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4671 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
4672 | 4672 |
4673 // NumberTagI and NumberTagD use the context from the frame, rather than | 4673 // NumberTagI and NumberTagD use the context from the frame, rather than |
4674 // the environment's HContext or HInlinedContext value. | 4674 // the environment's HContext or HInlinedContext value. |
4675 // They only call Runtime::kAllocateHeapNumber. | 4675 // They only call Runtime::kHiddenAllocateHeapNumber. |
4676 // The corresponding HChange instructions are added in a phase that does | 4676 // The corresponding HChange instructions are added in a phase that does |
4677 // not have easy access to the local context. | 4677 // not have easy access to the local context. |
4678 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4678 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
4679 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 4679 __ CallRuntimeSaveDoubles(Runtime::kHiddenAllocateHeapNumber); |
4680 RecordSafepointWithRegisters( | 4680 RecordSafepointWithRegisters( |
4681 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4681 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
4682 __ Subu(v0, v0, kHeapObjectTag); | 4682 __ Subu(v0, v0, kHeapObjectTag); |
4683 __ StoreToSafepointRegisterSlot(v0, dst); | 4683 __ StoreToSafepointRegisterSlot(v0, dst); |
4684 } | 4684 } |
4685 | 4685 |
4686 | 4686 |
4687 // Done. Put the value in dbl_scratch into the value of the allocated heap | 4687 // Done. Put the value in dbl_scratch into the value of the allocated heap |
4688 // number. | 4688 // number. |
4689 __ bind(&done); | 4689 __ bind(&done); |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4730 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { | 4730 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { |
4731 // TODO(3095996): Get rid of this. For now, we need to make the | 4731 // TODO(3095996): Get rid of this. For now, we need to make the |
4732 // result register contain a valid pointer because it is already | 4732 // result register contain a valid pointer because it is already |
4733 // contained in the register pointer map. | 4733 // contained in the register pointer map. |
4734 Register reg = ToRegister(instr->result()); | 4734 Register reg = ToRegister(instr->result()); |
4735 __ mov(reg, zero_reg); | 4735 __ mov(reg, zero_reg); |
4736 | 4736 |
4737 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 4737 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
4738 // NumberTagI and NumberTagD use the context from the frame, rather than | 4738 // NumberTagI and NumberTagD use the context from the frame, rather than |
4739 // the environment's HContext or HInlinedContext value. | 4739 // the environment's HContext or HInlinedContext value. |
4740 // They only call Runtime::kAllocateHeapNumber. | 4740 // They only call Runtime::kHiddenAllocateHeapNumber. |
4741 // The corresponding HChange instructions are added in a phase that does | 4741 // The corresponding HChange instructions are added in a phase that does |
4742 // not have easy access to the local context. | 4742 // not have easy access to the local context. |
4743 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4743 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
4744 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber); | 4744 __ CallRuntimeSaveDoubles(Runtime::kHiddenAllocateHeapNumber); |
4745 RecordSafepointWithRegisters( | 4745 RecordSafepointWithRegisters( |
4746 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); | 4746 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt); |
4747 __ Subu(v0, v0, kHeapObjectTag); | 4747 __ Subu(v0, v0, kHeapObjectTag); |
4748 __ StoreToSafepointRegisterSlot(v0, reg); | 4748 __ StoreToSafepointRegisterSlot(v0, reg); |
4749 } | 4749 } |
4750 | 4750 |
4751 | 4751 |
4752 void LCodeGen::DoSmiTag(LSmiTag* instr) { | 4752 void LCodeGen::DoSmiTag(LSmiTag* instr) { |
4753 HChange* hchange = instr->hydrogen(); | 4753 HChange* hchange = instr->hydrogen(); |
4754 Register input = ToRegister(instr->value()); | 4754 Register input = ToRegister(instr->value()); |
(...skipping 589 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5344 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); | 5344 flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE); |
5345 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { | 5345 } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) { |
5346 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); | 5346 ASSERT(!instr->hydrogen()->IsNewSpaceAllocation()); |
5347 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); | 5347 flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE); |
5348 } else { | 5348 } else { |
5349 flags = AllocateTargetSpace::update(flags, NEW_SPACE); | 5349 flags = AllocateTargetSpace::update(flags, NEW_SPACE); |
5350 } | 5350 } |
5351 __ Push(Smi::FromInt(flags)); | 5351 __ Push(Smi::FromInt(flags)); |
5352 | 5352 |
5353 CallRuntimeFromDeferred( | 5353 CallRuntimeFromDeferred( |
5354 Runtime::kAllocateInTargetSpace, 2, instr, instr->context()); | 5354 Runtime::kHiddenAllocateInTargetSpace, 2, instr, instr->context()); |
5355 __ StoreToSafepointRegisterSlot(v0, result); | 5355 __ StoreToSafepointRegisterSlot(v0, result); |
5356 } | 5356 } |
5357 | 5357 |
5358 | 5358 |
5359 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { | 5359 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { |
5360 ASSERT(ToRegister(instr->value()).is(a0)); | 5360 ASSERT(ToRegister(instr->value()).is(a0)); |
5361 ASSERT(ToRegister(instr->result()).is(v0)); | 5361 ASSERT(ToRegister(instr->result()).is(v0)); |
5362 __ push(a0); | 5362 __ push(a0); |
5363 CallRuntime(Runtime::kToFastProperties, 1, instr); | 5363 CallRuntime(Runtime::kToFastProperties, 1, instr); |
5364 } | 5364 } |
(...skipping 13 matching lines...) Expand all Loading... |
5378 __ lw(a1, FieldMemOperand(t3, literal_offset)); | 5378 __ lw(a1, FieldMemOperand(t3, literal_offset)); |
5379 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); | 5379 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); |
5380 __ Branch(&materialized, ne, a1, Operand(at)); | 5380 __ Branch(&materialized, ne, a1, Operand(at)); |
5381 | 5381 |
5382 // Create regexp literal using runtime function | 5382 // Create regexp literal using runtime function |
5383 // Result will be in v0. | 5383 // Result will be in v0. |
5384 __ li(t2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); | 5384 __ li(t2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); |
5385 __ li(t1, Operand(instr->hydrogen()->pattern())); | 5385 __ li(t1, Operand(instr->hydrogen()->pattern())); |
5386 __ li(t0, Operand(instr->hydrogen()->flags())); | 5386 __ li(t0, Operand(instr->hydrogen()->flags())); |
5387 __ Push(t3, t2, t1, t0); | 5387 __ Push(t3, t2, t1, t0); |
5388 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); | 5388 CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4, instr); |
5389 __ mov(a1, v0); | 5389 __ mov(a1, v0); |
5390 | 5390 |
5391 __ bind(&materialized); | 5391 __ bind(&materialized); |
5392 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 5392 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
5393 Label allocated, runtime_allocate; | 5393 Label allocated, runtime_allocate; |
5394 | 5394 |
5395 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); | 5395 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); |
5396 __ jmp(&allocated); | 5396 __ jmp(&allocated); |
5397 | 5397 |
5398 __ bind(&runtime_allocate); | 5398 __ bind(&runtime_allocate); |
5399 __ li(a0, Operand(Smi::FromInt(size))); | 5399 __ li(a0, Operand(Smi::FromInt(size))); |
5400 __ Push(a1, a0); | 5400 __ Push(a1, a0); |
5401 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 5401 CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1, instr); |
5402 __ pop(a1); | 5402 __ pop(a1); |
5403 | 5403 |
5404 __ bind(&allocated); | 5404 __ bind(&allocated); |
5405 // Copy the content into the newly allocated memory. | 5405 // Copy the content into the newly allocated memory. |
5406 // (Unroll copy loop once for better throughput). | 5406 // (Unroll copy loop once for better throughput). |
5407 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { | 5407 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { |
5408 __ lw(a3, FieldMemOperand(a1, i)); | 5408 __ lw(a3, FieldMemOperand(a1, i)); |
5409 __ lw(a2, FieldMemOperand(a1, i + kPointerSize)); | 5409 __ lw(a2, FieldMemOperand(a1, i + kPointerSize)); |
5410 __ sw(a3, FieldMemOperand(v0, i)); | 5410 __ sw(a3, FieldMemOperand(v0, i)); |
5411 __ sw(a2, FieldMemOperand(v0, i + kPointerSize)); | 5411 __ sw(a2, FieldMemOperand(v0, i + kPointerSize)); |
(...skipping 13 matching lines...) Expand all Loading... |
5425 if (!pretenure && instr->hydrogen()->has_no_literals()) { | 5425 if (!pretenure && instr->hydrogen()->has_no_literals()) { |
5426 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), | 5426 FastNewClosureStub stub(instr->hydrogen()->strict_mode(), |
5427 instr->hydrogen()->is_generator()); | 5427 instr->hydrogen()->is_generator()); |
5428 __ li(a2, Operand(instr->hydrogen()->shared_info())); | 5428 __ li(a2, Operand(instr->hydrogen()->shared_info())); |
5429 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 5429 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
5430 } else { | 5430 } else { |
5431 __ li(a2, Operand(instr->hydrogen()->shared_info())); | 5431 __ li(a2, Operand(instr->hydrogen()->shared_info())); |
5432 __ li(a1, Operand(pretenure ? factory()->true_value() | 5432 __ li(a1, Operand(pretenure ? factory()->true_value() |
5433 : factory()->false_value())); | 5433 : factory()->false_value())); |
5434 __ Push(cp, a2, a1); | 5434 __ Push(cp, a2, a1); |
5435 CallRuntime(Runtime::kNewClosure, 3, instr); | 5435 CallRuntime(Runtime::kHiddenNewClosure, 3, instr); |
5436 } | 5436 } |
5437 } | 5437 } |
5438 | 5438 |
5439 | 5439 |
5440 void LCodeGen::DoTypeof(LTypeof* instr) { | 5440 void LCodeGen::DoTypeof(LTypeof* instr) { |
5441 ASSERT(ToRegister(instr->result()).is(v0)); | 5441 ASSERT(ToRegister(instr->result()).is(v0)); |
5442 Register input = ToRegister(instr->value()); | 5442 Register input = ToRegister(instr->value()); |
5443 __ push(input); | 5443 __ push(input); |
5444 CallRuntime(Runtime::kTypeof, 1, instr); | 5444 CallRuntime(Runtime::kTypeof, 1, instr); |
5445 } | 5445 } |
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5649 | 5649 |
5650 | 5650 |
5651 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5651 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
5652 // Nothing to see here, move on! | 5652 // Nothing to see here, move on! |
5653 } | 5653 } |
5654 | 5654 |
5655 | 5655 |
5656 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { | 5656 void LCodeGen::DoDeferredStackCheck(LStackCheck* instr) { |
5657 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 5657 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
5658 LoadContextFromDeferred(instr->context()); | 5658 LoadContextFromDeferred(instr->context()); |
5659 __ CallRuntimeSaveDoubles(Runtime::kStackGuard); | 5659 __ CallRuntimeSaveDoubles(Runtime::kHiddenStackGuard); |
5660 RecordSafepointWithLazyDeopt( | 5660 RecordSafepointWithLazyDeopt( |
5661 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | 5661 instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
5662 ASSERT(instr->HasEnvironment()); | 5662 ASSERT(instr->HasEnvironment()); |
5663 LEnvironment* env = instr->environment(); | 5663 LEnvironment* env = instr->environment(); |
5664 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5664 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5665 } | 5665 } |
5666 | 5666 |
5667 | 5667 |
5668 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 5668 void LCodeGen::DoStackCheck(LStackCheck* instr) { |
5669 class DeferredStackCheck V8_FINAL : public LDeferredCode { | 5669 class DeferredStackCheck V8_FINAL : public LDeferredCode { |
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5814 __ Subu(scratch, result, scratch); | 5814 __ Subu(scratch, result, scratch); |
5815 __ lw(result, FieldMemOperand(scratch, | 5815 __ lw(result, FieldMemOperand(scratch, |
5816 FixedArray::kHeaderSize - kPointerSize)); | 5816 FixedArray::kHeaderSize - kPointerSize)); |
5817 __ bind(&done); | 5817 __ bind(&done); |
5818 } | 5818 } |
5819 | 5819 |
5820 | 5820 |
5821 #undef __ | 5821 #undef __ |
5822 | 5822 |
5823 } } // namespace v8::internal | 5823 } } // namespace v8::internal |
OLD | NEW |