OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2540 Register map = temp; | 2540 Register map = temp; |
2541 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 2541 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
2542 { | 2542 { |
2543 // Block constant pool emission to ensure the positions of instructions are | 2543 // Block constant pool emission to ensure the positions of instructions are |
2544 // as expected by the patcher. See InstanceofStub::Generate(). | 2544 // as expected by the patcher. See InstanceofStub::Generate(). |
2545 Assembler::BlockConstPoolScope block_const_pool(masm()); | 2545 Assembler::BlockConstPoolScope block_const_pool(masm()); |
2546 __ bind(deferred->map_check()); // Label for calculating code patching. | 2546 __ bind(deferred->map_check()); // Label for calculating code patching. |
2547 // We use Factory::the_hole_value() on purpose instead of loading from the | 2547 // We use Factory::the_hole_value() on purpose instead of loading from the |
2548 // root array to force relocation to be able to later patch with | 2548 // root array to force relocation to be able to later patch with |
2549 // the cached map. | 2549 // the cached map. |
2550 PredictableCodeSizeScope predictable(masm_); | 2550 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize); |
2551 Handle<JSGlobalPropertyCell> cell = | 2551 Handle<JSGlobalPropertyCell> cell = |
2552 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); | 2552 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); |
2553 __ mov(ip, Operand(Handle<Object>(cell))); | 2553 __ mov(ip, Operand(Handle<Object>(cell))); |
2554 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); | 2554 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); |
2555 __ cmp(map, Operand(ip)); | 2555 __ cmp(map, Operand(ip)); |
2556 __ b(ne, &cache_miss); | 2556 __ b(ne, &cache_miss); |
2557 // We use Factory::the_hole_value() on purpose instead of loading from the | 2557 // We use Factory::the_hole_value() on purpose instead of loading from the |
2558 // root array to force relocation to be able to later patch | 2558 // root array to force relocation to be able to later patch |
2559 // with true or false. | 2559 // with true or false. |
2560 __ mov(result, Operand(factory()->the_hole_value())); | 2560 __ mov(result, Operand(factory()->the_hole_value())); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2604 | 2604 |
2605 // Get the temp register reserved by the instruction. This needs to be r4 as | 2605 // Get the temp register reserved by the instruction. This needs to be r4 as |
2606 // its slot of the pushing of safepoint registers is used to communicate the | 2606 // its slot of the pushing of safepoint registers is used to communicate the |
2607 // offset to the location of the map check. | 2607 // offset to the location of the map check. |
2608 Register temp = ToRegister(instr->temp()); | 2608 Register temp = ToRegister(instr->temp()); |
2609 ASSERT(temp.is(r4)); | 2609 ASSERT(temp.is(r4)); |
2610 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | 2610 __ LoadHeapObject(InstanceofStub::right(), instr->function()); |
2611 static const int kAdditionalDelta = 5; | 2611 static const int kAdditionalDelta = 5; |
2612 // Make sure that code size is predicable, since we use specific constants | 2612 // Make sure that code size is predicable, since we use specific constants |
2613 // offsets in the code to find embedded values.. | 2613 // offsets in the code to find embedded values.. |
2614 PredictableCodeSizeScope predictable(masm_); | 2614 PredictableCodeSizeScope predictable(masm_, 6 * Assembler::kInstrSize); |
2615 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; | 2615 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
2616 Label before_push_delta; | 2616 Label before_push_delta; |
2617 __ bind(&before_push_delta); | 2617 __ bind(&before_push_delta); |
2618 __ BlockConstPoolFor(kAdditionalDelta); | 2618 __ BlockConstPoolFor(kAdditionalDelta); |
2619 __ mov(temp, Operand(delta * kPointerSize)); | 2619 __ mov(temp, Operand(delta * kPointerSize)); |
2620 // The mov above can generate one or two instructions. The delta was computed | 2620 // The mov above can generate one or two instructions. The delta was computed |
2621 // for two instructions, so we need to pad here in case of one instruction. | 2621 // for two instructions, so we need to pad here in case of one instruction. |
2622 if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) { | 2622 if (masm_->InstructionsGeneratedSince(&before_push_delta) != 2) { |
2623 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta)); | 2623 ASSERT_EQ(1, masm_->InstructionsGeneratedSince(&before_push_delta)); |
2624 __ nop(); | 2624 __ nop(); |
(...skipping 3008 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5633 LEnvironment* env = instr->environment(); | 5633 LEnvironment* env = instr->environment(); |
5634 // There is no LLazyBailout instruction for stack-checks. We have to | 5634 // There is no LLazyBailout instruction for stack-checks. We have to |
5635 // prepare for lazy deoptimization explicitly here. | 5635 // prepare for lazy deoptimization explicitly here. |
5636 if (instr->hydrogen()->is_function_entry()) { | 5636 if (instr->hydrogen()->is_function_entry()) { |
5637 // Perform stack overflow check. | 5637 // Perform stack overflow check. |
5638 Label done; | 5638 Label done; |
5639 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5639 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
5640 __ cmp(sp, Operand(ip)); | 5640 __ cmp(sp, Operand(ip)); |
5641 __ b(hs, &done); | 5641 __ b(hs, &done); |
5642 StackCheckStub stub; | 5642 StackCheckStub stub; |
5643 PredictableCodeSizeScope predictable(masm_); | 5643 PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); |
5644 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 5644 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
5645 EnsureSpaceForLazyDeopt(); | 5645 EnsureSpaceForLazyDeopt(); |
5646 __ bind(&done); | 5646 __ bind(&done); |
5647 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5647 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
5648 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5648 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
5649 } else { | 5649 } else { |
5650 ASSERT(instr->hydrogen()->is_backwards_branch()); | 5650 ASSERT(instr->hydrogen()->is_backwards_branch()); |
5651 // Perform stack overflow check if this goto needs it before jumping. | 5651 // Perform stack overflow check if this goto needs it before jumping. |
5652 DeferredStackCheck* deferred_stack_check = | 5652 DeferredStackCheck* deferred_stack_check = |
5653 new(zone()) DeferredStackCheck(this, instr); | 5653 new(zone()) DeferredStackCheck(this, instr); |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5772 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); | 5772 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); |
5773 __ ldr(result, FieldMemOperand(scratch, | 5773 __ ldr(result, FieldMemOperand(scratch, |
5774 FixedArray::kHeaderSize - kPointerSize)); | 5774 FixedArray::kHeaderSize - kPointerSize)); |
5775 __ bind(&done); | 5775 __ bind(&done); |
5776 } | 5776 } |
5777 | 5777 |
5778 | 5778 |
5779 #undef __ | 5779 #undef __ |
5780 | 5780 |
5781 } } // namespace v8::internal | 5781 } } // namespace v8::internal |
OLD | NEW |