| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 262 | 262 | 
| 263   // Adjust the frame size, subsuming the unoptimized frame into the | 263   // Adjust the frame size, subsuming the unoptimized frame into the | 
| 264   // optimized frame. | 264   // optimized frame. | 
| 265   int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 265   int slots = GetStackSlotCount() - graph()->osr()->UnoptimizedFrameSlots(); | 
| 266   ASSERT(slots >= 0); | 266   ASSERT(slots >= 0); | 
| 267   __ sub(sp, sp, Operand(slots * kPointerSize)); | 267   __ sub(sp, sp, Operand(slots * kPointerSize)); | 
| 268 } | 268 } | 
| 269 | 269 | 
| 270 | 270 | 
| 271 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 271 void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { | 
|  | 272   if (instr->IsCall()) { | 
|  | 273     EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 
|  | 274   } | 
| 272   if (!instr->IsLazyBailout() && !instr->IsGap()) { | 275   if (!instr->IsLazyBailout() && !instr->IsGap()) { | 
| 273     safepoints_.BumpLastLazySafepointIndex(); | 276     safepoints_.BumpLastLazySafepointIndex(); | 
| 274   } | 277   } | 
| 275 } | 278 } | 
| 276 | 279 | 
| 277 | 280 | 
| 278 bool LCodeGen::GenerateDeferredCode() { | 281 bool LCodeGen::GenerateDeferredCode() { | 
| 279   ASSERT(is_generating()); | 282   ASSERT(is_generating()); | 
| 280   if (deferred_.length() > 0) { | 283   if (deferred_.length() > 0) { | 
| 281     for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 284     for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 
| (...skipping 5244 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5526         __ nop(); | 5529         __ nop(); | 
| 5527         padding_size -= Assembler::kInstrSize; | 5530         padding_size -= Assembler::kInstrSize; | 
| 5528       } | 5531       } | 
| 5529     } | 5532     } | 
| 5530   } | 5533   } | 
| 5531   last_lazy_deopt_pc_ = masm()->pc_offset(); | 5534   last_lazy_deopt_pc_ = masm()->pc_offset(); | 
| 5532 } | 5535 } | 
| 5533 | 5536 | 
| 5534 | 5537 | 
| 5535 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5538 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 
| 5536   EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5539   last_lazy_deopt_pc_ = masm()->pc_offset(); | 
| 5537   ASSERT(instr->HasEnvironment()); | 5540   ASSERT(instr->HasEnvironment()); | 
| 5538   LEnvironment* env = instr->environment(); | 5541   LEnvironment* env = instr->environment(); | 
| 5539   RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5542   RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 
| 5540   safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5543   safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 
| 5541 } | 5544 } | 
| 5542 | 5545 | 
| 5543 | 5546 | 
| 5544 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5547 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 
| 5545   Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5548   Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 
| 5546   // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5549   // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5600     Label done; | 5603     Label done; | 
| 5601     __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5604     __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 
| 5602     __ cmp(sp, Operand(ip)); | 5605     __ cmp(sp, Operand(ip)); | 
| 5603     __ b(hs, &done); | 5606     __ b(hs, &done); | 
| 5604     PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); | 5607     PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); | 
| 5605     ASSERT(instr->context()->IsRegister()); | 5608     ASSERT(instr->context()->IsRegister()); | 
| 5606     ASSERT(ToRegister(instr->context()).is(cp)); | 5609     ASSERT(ToRegister(instr->context()).is(cp)); | 
| 5607     CallCode(isolate()->builtins()->StackCheck(), | 5610     CallCode(isolate()->builtins()->StackCheck(), | 
| 5608               RelocInfo::CODE_TARGET, | 5611               RelocInfo::CODE_TARGET, | 
| 5609               instr); | 5612               instr); | 
| 5610     EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |  | 
| 5611     __ bind(&done); | 5613     __ bind(&done); | 
| 5612     RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |  | 
| 5613     safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |  | 
| 5614   } else { | 5614   } else { | 
| 5615     ASSERT(instr->hydrogen()->is_backwards_branch()); | 5615     ASSERT(instr->hydrogen()->is_backwards_branch()); | 
| 5616     // Perform stack overflow check if this goto needs it before jumping. | 5616     // Perform stack overflow check if this goto needs it before jumping. | 
| 5617     DeferredStackCheck* deferred_stack_check = | 5617     DeferredStackCheck* deferred_stack_check = | 
| 5618         new(zone()) DeferredStackCheck(this, instr); | 5618         new(zone()) DeferredStackCheck(this, instr); | 
| 5619     __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 5619     __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 
| 5620     __ cmp(sp, Operand(ip)); | 5620     __ cmp(sp, Operand(ip)); | 
| 5621     __ b(lo, deferred_stack_check->entry()); | 5621     __ b(lo, deferred_stack_check->entry()); | 
| 5622     EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 5622     EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 
| 5623     __ bind(instr->done_label()); | 5623     __ bind(instr->done_label()); | 
| (...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5735   __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5735   __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 
| 5736   __ ldr(result, FieldMemOperand(scratch, | 5736   __ ldr(result, FieldMemOperand(scratch, | 
| 5737                                  FixedArray::kHeaderSize - kPointerSize)); | 5737                                  FixedArray::kHeaderSize - kPointerSize)); | 
| 5738   __ bind(&done); | 5738   __ bind(&done); | 
| 5739 } | 5739 } | 
| 5740 | 5740 | 
| 5741 | 5741 | 
| 5742 #undef __ | 5742 #undef __ | 
| 5743 | 5743 | 
| 5744 } }  // namespace v8::internal | 5744 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|