| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 553 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 564 LInstruction* instr) { | 564 LInstruction* instr) { |
| 565 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); | 565 CallCodeGeneric(code, mode, instr, RECORD_SIMPLE_SAFEPOINT); |
| 566 } | 566 } |
| 567 | 567 |
| 568 | 568 |
| 569 void LCodeGen::CallCodeGeneric(Handle<Code> code, | 569 void LCodeGen::CallCodeGeneric(Handle<Code> code, |
| 570 RelocInfo::Mode mode, | 570 RelocInfo::Mode mode, |
| 571 LInstruction* instr, | 571 LInstruction* instr, |
| 572 SafepointMode safepoint_mode) { | 572 SafepointMode safepoint_mode) { |
| 573 ASSERT(instr != NULL); | 573 ASSERT(instr != NULL); |
| 574 // Block literal pool emission to ensure nop indicating no inlined smi code |
| 575 // is in the correct position. |
| 576 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 574 LPointerMap* pointers = instr->pointer_map(); | 577 LPointerMap* pointers = instr->pointer_map(); |
| 575 RecordPosition(pointers->position()); | 578 RecordPosition(pointers->position()); |
| 576 __ Call(code, mode); | 579 __ Call(code, mode); |
| 577 RecordSafepointWithLazyDeopt(instr, safepoint_mode); | 580 RecordSafepointWithLazyDeopt(instr, safepoint_mode); |
| 578 | 581 |
| 579 // Signal that we don't inline smi code before these stubs in the | 582 // Signal that we don't inline smi code before these stubs in the |
| 580 // optimizing code generator. | 583 // optimizing code generator. |
| 581 if (code->kind() == Code::BINARY_OP_IC || | 584 if (code->kind() == Code::BINARY_OP_IC || |
| 582 code->kind() == Code::COMPARE_IC) { | 585 code->kind() == Code::COMPARE_IC) { |
| 583 __ nop(); | 586 __ nop(); |
| (...skipping 971 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1555 } | 1558 } |
| 1556 } | 1559 } |
| 1557 | 1560 |
| 1558 | 1561 |
| 1559 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1562 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1560 ASSERT(ToRegister(instr->InputAt(0)).is(r1)); | 1563 ASSERT(ToRegister(instr->InputAt(0)).is(r1)); |
| 1561 ASSERT(ToRegister(instr->InputAt(1)).is(r0)); | 1564 ASSERT(ToRegister(instr->InputAt(1)).is(r0)); |
| 1562 ASSERT(ToRegister(instr->result()).is(r0)); | 1565 ASSERT(ToRegister(instr->result()).is(r0)); |
| 1563 | 1566 |
| 1564 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1567 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 1568 // Block literal pool emission to ensure nop indicating no inlined smi code |
| 1569 // is in the correct position. |
| 1570 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 1565 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1571 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1566 __ nop(); // Signals no inlined code. | 1572 __ nop(); // Signals no inlined code. |
| 1567 } | 1573 } |
| 1568 | 1574 |
| 1569 | 1575 |
| 1570 int LCodeGen::GetNextEmittedBlock(int block) { | 1576 int LCodeGen::GetNextEmittedBlock(int block) { |
| 1571 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 1577 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
| 1572 LLabel* label = chunk_->GetLabel(i); | 1578 LLabel* label = chunk_->GetLabel(i); |
| 1573 if (!label->HasReplacement()) return i; | 1579 if (!label->HasReplacement()) return i; |
| 1574 } | 1580 } |
| (...skipping 611 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2186 | 2192 |
| 2187 // A Smi is not instance of anything. | 2193 // A Smi is not instance of anything. |
| 2188 __ JumpIfSmi(object, &false_result); | 2194 __ JumpIfSmi(object, &false_result); |
| 2189 | 2195 |
| 2190 // This is the inlined call site instanceof cache. The two occurences of the | 2196 // This is the inlined call site instanceof cache. The two occurences of the |
| 2191 // hole value will be patched to the last map/result pair generated by the | 2197 // hole value will be patched to the last map/result pair generated by the |
| 2192 // instanceof stub. | 2198 // instanceof stub. |
| 2193 Label cache_miss; | 2199 Label cache_miss; |
| 2194 Register map = temp; | 2200 Register map = temp; |
| 2195 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 2201 __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 2196 __ bind(deferred->map_check()); // Label for calculating code patching. | 2202 { |
| 2197 // We use Factory::the_hole_value() on purpose instead of loading from the | 2203 // Block constant pool emission to ensure the positions of instructions are |
| 2198 // root array to force relocation to be able to later patch with | 2204 // as expected by the patcher. See InstanceofStub::Generate(). |
| 2199 // the cached map. | 2205 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 2200 Handle<JSGlobalPropertyCell> cell = | 2206 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 2201 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); | 2207 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 2202 __ mov(ip, Operand(Handle<Object>(cell))); | 2208 // root array to force relocation to be able to later patch with |
| 2203 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); | 2209 // the cached map. |
| 2204 __ cmp(map, Operand(ip)); | 2210 Handle<JSGlobalPropertyCell> cell = |
| 2205 __ b(ne, &cache_miss); | 2211 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); |
| 2206 // We use Factory::the_hole_value() on purpose instead of loading from the | 2212 __ mov(ip, Operand(Handle<Object>(cell))); |
| 2207 // root array to force relocation to be able to later patch | 2213 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); |
| 2208 // with true or false. | 2214 __ cmp(map, Operand(ip)); |
| 2209 __ mov(result, Operand(factory()->the_hole_value())); | 2215 __ b(ne, &cache_miss); |
| 2216 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 2217 // root array to force relocation to be able to later patch |
| 2218 // with true or false. |
| 2219 __ mov(result, Operand(factory()->the_hole_value())); |
| 2220 } |
| 2210 __ b(&done); | 2221 __ b(&done); |
| 2211 | 2222 |
| 2212 // The inlined call site cache did not match. Check null and string before | 2223 // The inlined call site cache did not match. Check null and string before |
| 2213 // calling the deferred code. | 2224 // calling the deferred code. |
| 2214 __ bind(&cache_miss); | 2225 __ bind(&cache_miss); |
| 2215 // Null is not instance of anything. | 2226 // Null is not instance of anything. |
| 2216 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 2227 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 2217 __ cmp(object, Operand(ip)); | 2228 __ cmp(object, Operand(ip)); |
| 2218 __ b(eq, &false_result); | 2229 __ b(eq, &false_result); |
| 2219 | 2230 |
| (...skipping 2696 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4916 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | 4927 __ cmp(temp1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); |
| 4917 } | 4928 } |
| 4918 | 4929 |
| 4919 | 4930 |
| 4920 void LCodeGen::EnsureSpaceForLazyDeopt() { | 4931 void LCodeGen::EnsureSpaceForLazyDeopt() { |
| 4921 // Ensure that we have enough space after the previous lazy-bailout | 4932 // Ensure that we have enough space after the previous lazy-bailout |
| 4922 // instruction for patching the code here. | 4933 // instruction for patching the code here. |
| 4923 int current_pc = masm()->pc_offset(); | 4934 int current_pc = masm()->pc_offset(); |
| 4924 int patch_size = Deoptimizer::patch_size(); | 4935 int patch_size = Deoptimizer::patch_size(); |
| 4925 if (current_pc < last_lazy_deopt_pc_ + patch_size) { | 4936 if (current_pc < last_lazy_deopt_pc_ + patch_size) { |
| 4937 // Block literal pool emission for duration of padding. |
| 4938 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 4926 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; | 4939 int padding_size = last_lazy_deopt_pc_ + patch_size - current_pc; |
| 4927 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); | 4940 ASSERT_EQ(0, padding_size % Assembler::kInstrSize); |
| 4928 while (padding_size > 0) { | 4941 while (padding_size > 0) { |
| 4929 __ nop(); | 4942 __ nop(); |
| 4930 padding_size -= Assembler::kInstrSize; | 4943 padding_size -= Assembler::kInstrSize; |
| 4931 } | 4944 } |
| 4932 } | 4945 } |
| 4933 last_lazy_deopt_pc_ = masm()->pc_offset(); | 4946 last_lazy_deopt_pc_ = masm()->pc_offset(); |
| 4934 } | 4947 } |
| 4935 | 4948 |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5129 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); | 5142 __ sub(scratch, result, Operand(index, LSL, kPointerSizeLog2 - kSmiTagSize)); |
| 5130 __ ldr(result, FieldMemOperand(scratch, | 5143 __ ldr(result, FieldMemOperand(scratch, |
| 5131 FixedArray::kHeaderSize - kPointerSize)); | 5144 FixedArray::kHeaderSize - kPointerSize)); |
| 5132 __ bind(&done); | 5145 __ bind(&done); |
| 5133 } | 5146 } |
| 5134 | 5147 |
| 5135 | 5148 |
| 5136 #undef __ | 5149 #undef __ |
| 5137 | 5150 |
| 5138 } } // namespace v8::internal | 5151 } } // namespace v8::internal |
| OLD | NEW |