| OLD | NEW | 
|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 25 matching lines...) Expand all  Loading... | 
| 36 namespace v8 { | 36 namespace v8 { | 
| 37 namespace internal { | 37 namespace internal { | 
| 38 | 38 | 
| 39 | 39 | 
| 40 // When invoking builtins, we need to record the safepoint in the middle of | 40 // When invoking builtins, we need to record the safepoint in the middle of | 
| 41 // the invoke instruction sequence generated by the macro assembler. | 41 // the invoke instruction sequence generated by the macro assembler. | 
| 42 class SafepointGenerator : public PostCallGenerator { | 42 class SafepointGenerator : public PostCallGenerator { | 
| 43  public: | 43  public: | 
| 44   SafepointGenerator(LCodeGen* codegen, | 44   SafepointGenerator(LCodeGen* codegen, | 
| 45                      LPointerMap* pointers, | 45                      LPointerMap* pointers, | 
| 46                      int deoptimization_index, | 46                      int deoptimization_index) | 
| 47                      bool ensure_reloc_space = false) |  | 
| 48       : codegen_(codegen), | 47       : codegen_(codegen), | 
| 49         pointers_(pointers), | 48         pointers_(pointers), | 
| 50         deoptimization_index_(deoptimization_index), | 49         deoptimization_index_(deoptimization_index) {} | 
| 51         ensure_reloc_space_(ensure_reloc_space) { } |  | 
| 52   virtual ~SafepointGenerator() { } | 50   virtual ~SafepointGenerator() { } | 
| 53 | 51 | 
| 54   virtual void Generate() { | 52   virtual void Generate() { | 
| 55     // Ensure that we have enough space in the reloc info to patch | 53     // Ensure that we have enough space in the reloc info to patch | 
| 56     // this with calls when doing deoptimization. | 54     // this with calls when doing deoptimization. | 
| 57     if (ensure_reloc_space_) { | 55     codegen_->EnsureRelocSpaceForDeoptimization(); | 
| 58       codegen_->EnsureRelocSpaceForDeoptimization(); |  | 
| 59     } |  | 
| 60     codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 56     codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 
| 61   } | 57   } | 
| 62 | 58 | 
| 63  private: | 59  private: | 
| 64   LCodeGen* codegen_; | 60   LCodeGen* codegen_; | 
| 65   LPointerMap* pointers_; | 61   LPointerMap* pointers_; | 
| 66   int deoptimization_index_; | 62   int deoptimization_index_; | 
| 67   bool ensure_reloc_space_; |  | 
| 68 }; | 63 }; | 
| 69 | 64 | 
| 70 | 65 | 
| 71 #define __ masm()-> | 66 #define __ masm()-> | 
| 72 | 67 | 
| 73 bool LCodeGen::GenerateCode() { | 68 bool LCodeGen::GenerateCode() { | 
| 74   HPhase phase("Code generation", chunk()); | 69   HPhase phase("Code generation", chunk()); | 
| 75   ASSERT(is_unused()); | 70   ASSERT(is_unused()); | 
| 76   status_ = GENERATING; | 71   status_ = GENERATING; | 
| 77   CpuFeatures::Scope scope(SSE2); | 72   CpuFeatures::Scope scope(SSE2); | 
| (...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 379             false); | 374             false); | 
| 380       } | 375       } | 
| 381     } | 376     } | 
| 382 | 377 | 
| 383     AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); | 378     AddToTranslation(translation, value, environment->HasTaggedValueAt(i)); | 
| 384   } | 379   } | 
| 385 } | 380 } | 
| 386 | 381 | 
| 387 | 382 | 
| 388 void LCodeGen::EnsureRelocSpaceForDeoptimization() { | 383 void LCodeGen::EnsureRelocSpaceForDeoptimization() { | 
| 389   // Since we patch the reloc info with RUNTIME_ENTRY calls every patch | 384   // Since we patch the reloc info with RUNTIME_ENTRY calls every | 
| 390   // site will take up 2 bytes + any pc-jumps. | 385   // patch site will take up 2 bytes + any pc-jumps.  We are | 
| 391   // We are conservative and always reserver 6 bytes in case where a | 386   // conservative and always reserve 6 bytes in case a simple pc-jump | 
| 392   // simple pc-jump is not enough. | 387   // is not enough. | 
| 393   uint32_t pc_delta = | 388   uint32_t pc_delta = | 
| 394       masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset; | 389       masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset; | 
| 395   if (is_uintn(pc_delta, 6)) { | 390   if (is_uintn(pc_delta, 6)) { | 
| 396     deoptimization_reloc_size.min_size += 2; | 391     deoptimization_reloc_size.min_size += 2; | 
| 397   } else { | 392   } else { | 
| 398     deoptimization_reloc_size.min_size += 6; | 393     deoptimization_reloc_size.min_size += 6; | 
| 399   } | 394   } | 
| 400   deoptimization_reloc_size.last_pc_offset = masm()->pc_offset(); | 395   deoptimization_reloc_size.last_pc_offset = masm()->pc_offset(); | 
| 401 } | 396 } | 
| 402 | 397 | 
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 472                            bool adjusted) { | 467                            bool adjusted) { | 
| 473   ASSERT(instr != NULL); | 468   ASSERT(instr != NULL); | 
| 474   ASSERT(instr->HasPointerMap()); | 469   ASSERT(instr->HasPointerMap()); | 
| 475   LPointerMap* pointers = instr->pointer_map(); | 470   LPointerMap* pointers = instr->pointer_map(); | 
| 476   RecordPosition(pointers->position()); | 471   RecordPosition(pointers->position()); | 
| 477 | 472 | 
| 478   if (!adjusted) { | 473   if (!adjusted) { | 
| 479     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 474     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 
| 480   } | 475   } | 
| 481   __ CallRuntime(fun, argc); | 476   __ CallRuntime(fun, argc); | 
|  | 477   EnsureRelocSpaceForDeoptimization(); | 
| 482   RegisterLazyDeoptimization(instr); | 478   RegisterLazyDeoptimization(instr); | 
| 483 } | 479 } | 
| 484 | 480 | 
| 485 | 481 | 
| 486 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 482 void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) { | 
| 487   // Create the environment to bailout to. If the call has side effects | 483   // Create the environment to bailout to. If the call has side effects | 
| 488   // execution has to continue after the call otherwise execution can continue | 484   // execution has to continue after the call otherwise execution can continue | 
| 489   // from a previous bailout point repeating the call. | 485   // from a previous bailout point repeating the call. | 
| 490   LEnvironment* deoptimization_environment; | 486   LEnvironment* deoptimization_environment; | 
| 491   if (instr->HasDeoptimizationEnvironment()) { | 487   if (instr->HasDeoptimizationEnvironment()) { | 
| (...skipping 1871 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2363 | 2359 | 
| 2364   // Invoke the function. | 2360   // Invoke the function. | 
| 2365   __ bind(&invoke); | 2361   __ bind(&invoke); | 
| 2366   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 2362   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 
| 2367   LPointerMap* pointers = instr->pointer_map(); | 2363   LPointerMap* pointers = instr->pointer_map(); | 
| 2368   LEnvironment* env = instr->deoptimization_environment(); | 2364   LEnvironment* env = instr->deoptimization_environment(); | 
| 2369   RecordPosition(pointers->position()); | 2365   RecordPosition(pointers->position()); | 
| 2370   RegisterEnvironmentForDeoptimization(env); | 2366   RegisterEnvironmentForDeoptimization(env); | 
| 2371   SafepointGenerator safepoint_generator(this, | 2367   SafepointGenerator safepoint_generator(this, | 
| 2372                                          pointers, | 2368                                          pointers, | 
| 2373                                          env->deoptimization_index(), | 2369                                          env->deoptimization_index()); | 
| 2374                                          true); |  | 
| 2375   v8::internal::ParameterCount actual(eax); | 2370   v8::internal::ParameterCount actual(eax); | 
| 2376   __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator); | 2371   __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator); | 
| 2377 } | 2372 } | 
| 2378 | 2373 | 
| 2379 | 2374 | 
| 2380 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 2375 void LCodeGen::DoPushArgument(LPushArgument* instr) { | 
| 2381   LOperand* argument = instr->InputAt(0); | 2376   LOperand* argument = instr->InputAt(0); | 
| 2382   if (argument->IsConstantOperand()) { | 2377   if (argument->IsConstantOperand()) { | 
| 2383     __ push(ToImmediate(argument)); | 2378     __ push(ToImmediate(argument)); | 
| 2384   } else { | 2379   } else { | 
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 2436   } | 2431   } | 
| 2437 | 2432 | 
| 2438   LPointerMap* pointers = instr->pointer_map(); | 2433   LPointerMap* pointers = instr->pointer_map(); | 
| 2439   RecordPosition(pointers->position()); | 2434   RecordPosition(pointers->position()); | 
| 2440 | 2435 | 
| 2441   // Invoke function. | 2436   // Invoke function. | 
| 2442   if (*function == *info()->closure()) { | 2437   if (*function == *info()->closure()) { | 
| 2443     __ CallSelf(); | 2438     __ CallSelf(); | 
| 2444   } else { | 2439   } else { | 
| 2445     __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 2440     __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); | 
| 2446     EnsureRelocSpaceForDeoptimization(); |  | 
| 2447   } | 2441   } | 
|  | 2442   EnsureRelocSpaceForDeoptimization(); | 
| 2448 | 2443 | 
| 2449   // Setup deoptimization. | 2444   // Setup deoptimization. | 
| 2450   RegisterLazyDeoptimization(instr); | 2445   RegisterLazyDeoptimization(instr); | 
| 2451 } | 2446 } | 
| 2452 | 2447 | 
| 2453 | 2448 | 
| 2454 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2449 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 
| 2455   ASSERT(ToRegister(instr->result()).is(eax)); | 2450   ASSERT(ToRegister(instr->result()).is(eax)); | 
| 2456   __ mov(edi, instr->function()); | 2451   __ mov(edi, instr->function()); | 
| 2457   CallKnownFunction(instr->function(), instr->arity(), instr); | 2452   CallKnownFunction(instr->function(), instr->arity(), instr); | 
| (...skipping 1508 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 3966   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 3961   ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); | 
| 3967   LPointerMap* pointers = instr->pointer_map(); | 3962   LPointerMap* pointers = instr->pointer_map(); | 
| 3968   LEnvironment* env = instr->deoptimization_environment(); | 3963   LEnvironment* env = instr->deoptimization_environment(); | 
| 3969   RecordPosition(pointers->position()); | 3964   RecordPosition(pointers->position()); | 
| 3970   RegisterEnvironmentForDeoptimization(env); | 3965   RegisterEnvironmentForDeoptimization(env); | 
| 3971   // Create safepoint generator that will also ensure enough space in the | 3966   // Create safepoint generator that will also ensure enough space in the | 
| 3972   // reloc info for patching in deoptimization (since this is invoking a | 3967   // reloc info for patching in deoptimization (since this is invoking a | 
| 3973   // builtin) | 3968   // builtin) | 
| 3974   SafepointGenerator safepoint_generator(this, | 3969   SafepointGenerator safepoint_generator(this, | 
| 3975                                          pointers, | 3970                                          pointers, | 
| 3976                                          env->deoptimization_index(), | 3971                                          env->deoptimization_index()); | 
| 3977                                          true); |  | 
| 3978   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 3972   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); | 
| 3979   __ push(Immediate(Smi::FromInt(strict_mode_flag()))); | 3973   __ push(Immediate(Smi::FromInt(strict_mode_flag()))); | 
| 3980   __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); | 3974   __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); | 
| 3981 } | 3975 } | 
| 3982 | 3976 | 
| 3983 | 3977 | 
| 3984 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 3978 void LCodeGen::DoStackCheck(LStackCheck* instr) { | 
| 3985   // Perform stack overflow check. | 3979   // Perform stack overflow check. | 
| 3986   NearLabel done; | 3980   NearLabel done; | 
| 3987   ExternalReference stack_limit = | 3981   ExternalReference stack_limit = | 
| (...skipping 22 matching lines...) Expand all  Loading... | 
| 4010   ASSERT(osr_pc_offset_ == -1); | 4004   ASSERT(osr_pc_offset_ == -1); | 
| 4011   osr_pc_offset_ = masm()->pc_offset(); | 4005   osr_pc_offset_ = masm()->pc_offset(); | 
| 4012 } | 4006 } | 
| 4013 | 4007 | 
| 4014 | 4008 | 
| 4015 #undef __ | 4009 #undef __ | 
| 4016 | 4010 | 
| 4017 } }  // namespace v8::internal | 4011 } }  // namespace v8::internal | 
| 4018 | 4012 | 
| 4019 #endif  // V8_TARGET_ARCH_IA32 | 4013 #endif  // V8_TARGET_ARCH_IA32 | 
| OLD | NEW | 
|---|