| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 319 | 319 | 
| 320   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 320   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 
| 321   __ RecordComment("[ Deoptimization jump table"); | 321   __ RecordComment("[ Deoptimization jump table"); | 
| 322   Label table_start; | 322   Label table_start; | 
| 323   __ bind(&table_start); | 323   __ bind(&table_start); | 
| 324   Label needs_frame_not_call; | 324   Label needs_frame_not_call; | 
| 325   Label needs_frame_is_call; | 325   Label needs_frame_is_call; | 
| 326   for (int i = 0; i < deopt_jump_table_.length(); i++) { | 326   for (int i = 0; i < deopt_jump_table_.length(); i++) { | 
| 327     __ bind(&deopt_jump_table_[i].label); | 327     __ bind(&deopt_jump_table_[i].label); | 
| 328     Address entry = deopt_jump_table_[i].address; | 328     Address entry = deopt_jump_table_[i].address; | 
|  | 329     bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt; | 
|  | 330     Deoptimizer::BailoutType type = | 
|  | 331         is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; | 
|  | 332     int id = Deoptimizer::GetDeoptimizationId(entry, type); | 
|  | 333     if (id == Deoptimizer::kNotDeoptimizationEntry) { | 
|  | 334       Comment(";;; jump table entry %d.", i); | 
|  | 335     } else { | 
|  | 336       Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 
|  | 337     } | 
| 329     __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); | 338     __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); | 
| 330     if (deopt_jump_table_[i].needs_frame) { | 339     if (deopt_jump_table_[i].needs_frame) { | 
| 331       if (deopt_jump_table_[i].is_lazy_deopt) { | 340       if (is_lazy_deopt) { | 
| 332         if (needs_frame_is_call.is_bound()) { | 341         if (needs_frame_is_call.is_bound()) { | 
| 333           __ Branch(&needs_frame_is_call); | 342           __ Branch(&needs_frame_is_call); | 
| 334         } else { | 343         } else { | 
| 335           __ bind(&needs_frame_is_call); | 344           __ bind(&needs_frame_is_call); | 
| 336           __ MultiPush(cp.bit() | fp.bit() | ra.bit()); | 345           __ MultiPush(cp.bit() | fp.bit() | ra.bit()); | 
| 337           // This variant of deopt can only be used with stubs. Since we don't | 346           // This variant of deopt can only be used with stubs. Since we don't | 
| 338           // have a function pointer to install in the stack frame that we're | 347           // have a function pointer to install in the stack frame that we're | 
| 339           // building, install a special marker there instead. | 348           // building, install a special marker there instead. | 
| 340           ASSERT(info()->IsStub()); | 349           ASSERT(info()->IsStub()); | 
| 341           __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 350           __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 
| (...skipping 11 matching lines...) Expand all  Loading... | 
| 353           // have a function pointer to install in the stack frame that we're | 362           // have a function pointer to install in the stack frame that we're | 
| 354           // building, install a special marker there instead. | 363           // building, install a special marker there instead. | 
| 355           ASSERT(info()->IsStub()); | 364           ASSERT(info()->IsStub()); | 
| 356           __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 365           __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 
| 357           __ push(scratch0()); | 366           __ push(scratch0()); | 
| 358           __ Addu(fp, sp, Operand(2 * kPointerSize)); | 367           __ Addu(fp, sp, Operand(2 * kPointerSize)); | 
| 359           __ Jump(t9); | 368           __ Jump(t9); | 
| 360         } | 369         } | 
| 361       } | 370       } | 
| 362     } else { | 371     } else { | 
| 363       if (deopt_jump_table_[i].is_lazy_deopt) { | 372       if (is_lazy_deopt) { | 
| 364         __ Call(t9); | 373         __ Call(t9); | 
| 365       } else { | 374       } else { | 
| 366         __ Jump(t9); | 375         __ Jump(t9); | 
| 367       } | 376       } | 
| 368     } | 377     } | 
| 369   } | 378   } | 
| 370   __ RecordComment("]"); | 379   __ RecordComment("]"); | 
| 371 | 380 | 
| 372   // The deoptimization jump table is the last part of the instruction | 381   // The deoptimization jump table is the last part of the instruction | 
| 373   // sequence. Mark the generated code as done unless we bailed out. | 382   // sequence. Mark the generated code as done unless we bailed out. | 
| (...skipping 5448 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5822   __ Subu(scratch, result, scratch); | 5831   __ Subu(scratch, result, scratch); | 
| 5823   __ lw(result, FieldMemOperand(scratch, | 5832   __ lw(result, FieldMemOperand(scratch, | 
| 5824                                 FixedArray::kHeaderSize - kPointerSize)); | 5833                                 FixedArray::kHeaderSize - kPointerSize)); | 
| 5825   __ bind(&done); | 5834   __ bind(&done); | 
| 5826 } | 5835 } | 
| 5827 | 5836 | 
| 5828 | 5837 | 
| 5829 #undef __ | 5838 #undef __ | 
| 5830 | 5839 | 
| 5831 } }  // namespace v8::internal | 5840 } }  // namespace v8::internal | 
| OLD | NEW | 
|---|