OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/cpu-profiler.h" | 10 #include "src/cpu-profiler.h" |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
43 bool LCodeGen::GenerateCode() { | 43 bool LCodeGen::GenerateCode() { |
44 LPhase phase("Z_Code generation", chunk()); | 44 LPhase phase("Z_Code generation", chunk()); |
45 DCHECK(is_unused()); | 45 DCHECK(is_unused()); |
46 status_ = GENERATING; | 46 status_ = GENERATING; |
47 | 47 |
48 // Open a frame scope to indicate that there is a frame on the stack. The | 48 // Open a frame scope to indicate that there is a frame on the stack. The |
49 // NONE indicates that the scope shouldn't actually generate code to set up | 49 // NONE indicates that the scope shouldn't actually generate code to set up |
50 // the frame (that is done in GeneratePrologue). | 50 // the frame (that is done in GeneratePrologue). |
51 FrameScope frame_scope(masm_, StackFrame::NONE); | 51 FrameScope frame_scope(masm_, StackFrame::NONE); |
52 | 52 |
53 return GeneratePrologue() && GenerateBody() && GenerateDeferredCode() && | 53 bool rc = GeneratePrologue() && GenerateBody() && GenerateDeferredCode() && |
54 GenerateJumpTable() && GenerateSafepointTable(); | 54 GenerateJumpTable() && GenerateSafepointTable(); |
| 55 #ifdef DEBUG |
| 56 if (!rc) { |
| 57 // Avoid DCHECK(!is_linked()) failure in ~Label() |
| 58 masm()->EmitConstantPool(); |
| 59 } |
| 60 #endif |
| 61 return rc; |
55 } | 62 } |
56 | 63 |
57 | 64 |
58 void LCodeGen::FinishCode(Handle<Code> code) { | 65 void LCodeGen::FinishCode(Handle<Code> code) { |
59 DCHECK(is_done()); | 66 DCHECK(is_done()); |
60 code->set_stack_slots(GetStackSlotCount()); | 67 code->set_stack_slots(GetStackSlotCount()); |
61 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); | 68 code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); |
62 PopulateDeoptimizationData(code); | 69 PopulateDeoptimizationData(code); |
63 } | 70 } |
64 | 71 |
(...skipping 300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
365 DCHECK(info()->IsStub()); | 372 DCHECK(info()->IsStub()); |
366 RestoreCallerDoubles(); | 373 RestoreCallerDoubles(); |
367 } | 374 } |
368 | 375 |
369 // Add the base address to the offset previously loaded in entry_offset. | 376 // Add the base address to the offset previously loaded in entry_offset. |
370 __ mov(ip, Operand(ExternalReference::ForDeoptEntry(base))); | 377 __ mov(ip, Operand(ExternalReference::ForDeoptEntry(base))); |
371 __ add(ip, entry_offset, ip); | 378 __ add(ip, entry_offset, ip); |
372 __ Jump(ip); | 379 __ Jump(ip); |
373 } | 380 } |
374 | 381 |
| 382 masm()->EmitConstantPool(); |
| 383 |
375 // The deoptimization jump table is the last part of the instruction | 384 // The deoptimization jump table is the last part of the instruction |
376 // sequence. Mark the generated code as done unless we bailed out. | 385 // sequence. Mark the generated code as done unless we bailed out. |
377 if (!is_aborted()) status_ = DONE; | 386 if (!is_aborted()) status_ = DONE; |
378 return !is_aborted(); | 387 return !is_aborted(); |
379 } | 388 } |
380 | 389 |
381 | 390 |
382 bool LCodeGen::GenerateSafepointTable() { | 391 bool LCodeGen::GenerateSafepointTable() { |
383 DCHECK(is_done()); | 392 DCHECK(is_done()); |
384 safepoints_.Emit(masm(), GetStackSlotCount()); | 393 safepoints_.Emit(masm(), GetStackSlotCount()); |
(...skipping 2493 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2878 // A Smi is not instance of anything. | 2887 // A Smi is not instance of anything. |
2879 __ JumpIfSmi(object, &false_result); | 2888 __ JumpIfSmi(object, &false_result); |
2880 | 2889 |
2881 // This is the inlined call site instanceof cache. The two occurences of the | 2890 // This is the inlined call site instanceof cache. The two occurences of the |
2882 // hole value will be patched to the last map/result pair generated by the | 2891 // hole value will be patched to the last map/result pair generated by the |
2883 // instanceof stub. | 2892 // instanceof stub. |
2884 Label cache_miss; | 2893 Label cache_miss; |
2885 Register map = temp; | 2894 Register map = temp; |
2886 __ LoadP(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 2895 __ LoadP(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
2887 { | 2896 { |
2888 // Block constant pool emission to ensure the positions of instructions are | 2897 // Block trampoline emission to ensure the positions of instructions are |
2889 // as expected by the patcher. See InstanceofStub::Generate(). | 2898 // as expected by the patcher. See InstanceofStub::Generate(). |
2890 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 2899 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
2891 __ bind(deferred->map_check()); // Label for calculating code patching. | 2900 __ bind(deferred->map_check()); // Label for calculating code patching. |
2892 // We use Factory::the_hole_value() on purpose instead of loading from the | 2901 // We use Factory::the_hole_value() on purpose instead of loading from the |
2893 // root array to force relocation to be able to later patch with | 2902 // root array to force relocation to be able to later patch with |
2894 // the cached map. | 2903 // the cached map. |
2895 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); | 2904 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); |
2896 __ mov(ip, Operand(cell)); | 2905 __ mov(ip, Operand(cell)); |
2897 __ LoadP(ip, FieldMemOperand(ip, Cell::kValueOffset)); | 2906 __ LoadP(ip, FieldMemOperand(ip, Cell::kValueOffset)); |
2898 __ cmp(map, ip); | 2907 __ cmp(map, ip); |
2899 __ bne(&cache_miss); | 2908 __ bc_short(ne, &cache_miss); |
2900 // We use Factory::the_hole_value() on purpose instead of loading from the | 2909 // We use Factory::the_hole_value() on purpose instead of loading from the |
2901 // root array to force relocation to be able to later patch | 2910 // root array to force relocation to be able to later patch |
2902 // with true or false. | 2911 // with true or false. |
2903 __ mov(result, Operand(factory()->the_hole_value())); | 2912 __ mov(result, Operand(factory()->the_hole_value())); |
2904 } | 2913 } |
2905 __ b(&done); | 2914 __ b(&done); |
2906 | 2915 |
2907 // The inlined call site cache did not match. Check null and string before | 2916 // The inlined call site cache did not match. Check null and string before |
2908 // calling the deferred code. | 2917 // calling the deferred code. |
2909 __ bind(&cache_miss); | 2918 __ bind(&cache_miss); |
(...skipping 3291 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6201 __ Push(scope_info); | 6210 __ Push(scope_info); |
6202 __ push(ToRegister(instr->function())); | 6211 __ push(ToRegister(instr->function())); |
6203 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6212 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
6204 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6213 RecordSafepoint(Safepoint::kNoLazyDeopt); |
6205 } | 6214 } |
6206 | 6215 |
6207 | 6216 |
6208 #undef __ | 6217 #undef __ |
6209 } | 6218 } |
6210 } // namespace v8::internal | 6219 } // namespace v8::internal |
OLD | NEW |