OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 301 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
312 | 312 |
313 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 313 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
314 __ mov(ebx, Immediate(profiling_counter_)); | 314 __ mov(ebx, Immediate(profiling_counter_)); |
315 __ sub(FieldOperand(ebx, Cell::kValueOffset), | 315 __ sub(FieldOperand(ebx, Cell::kValueOffset), |
316 Immediate(Smi::FromInt(delta))); | 316 Immediate(Smi::FromInt(delta))); |
317 } | 317 } |
318 | 318 |
319 | 319 |
320 void FullCodeGenerator::EmitProfilingCounterReset() { | 320 void FullCodeGenerator::EmitProfilingCounterReset() { |
321 int reset_value = FLAG_interrupt_budget; | 321 int reset_value = FLAG_interrupt_budget; |
322 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { | |
323 // Self-optimization is a one-off thing: if it fails, don't try again. | |
324 reset_value = Smi::kMaxValue; | |
325 } | |
326 __ mov(ebx, Immediate(profiling_counter_)); | 322 __ mov(ebx, Immediate(profiling_counter_)); |
327 __ mov(FieldOperand(ebx, Cell::kValueOffset), | 323 __ mov(FieldOperand(ebx, Cell::kValueOffset), |
328 Immediate(Smi::FromInt(reset_value))); | 324 Immediate(Smi::FromInt(reset_value))); |
329 } | 325 } |
330 | 326 |
331 | 327 |
332 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 328 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
333 Label* back_edge_target) { | 329 Label* back_edge_target) { |
334 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 330 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
335 Label ok; | 331 Label ok; |
336 | 332 |
337 int weight = 1; | 333 ASSERT(back_edge_target->is_bound()); |
338 if (FLAG_weighted_back_edges) { | 334 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); |
339 ASSERT(back_edge_target->is_bound()); | 335 int weight = Min(kMaxBackEdgeWeight, |
340 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 336 Max(1, distance / kCodeSizeMultiplier)); |
341 weight = Min(kMaxBackEdgeWeight, | |
342 Max(1, distance / kCodeSizeMultiplier)); | |
343 } | |
344 EmitProfilingCounterDecrement(weight); | 337 EmitProfilingCounterDecrement(weight); |
345 __ j(positive, &ok, Label::kNear); | 338 __ j(positive, &ok, Label::kNear); |
346 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | 339 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
347 | 340 |
348 // Record a mapping of this PC offset to the OSR id. This is used to find | 341 // Record a mapping of this PC offset to the OSR id. This is used to find |
349 // the AST id from the unoptimized code in order to use it as a key into | 342 // the AST id from the unoptimized code in order to use it as a key into |
350 // the deoptimization input data found in the optimized code. | 343 // the deoptimization input data found in the optimized code. |
351 RecordBackEdge(stmt->OsrEntryId()); | 344 RecordBackEdge(stmt->OsrEntryId()); |
352 | 345 |
353 EmitProfilingCounterReset(); | 346 EmitProfilingCounterReset(); |
(...skipping 11 matching lines...) Expand all Loading... |
365 Comment cmnt(masm_, "[ Return sequence"); | 358 Comment cmnt(masm_, "[ Return sequence"); |
366 if (return_label_.is_bound()) { | 359 if (return_label_.is_bound()) { |
367 __ jmp(&return_label_); | 360 __ jmp(&return_label_); |
368 } else { | 361 } else { |
369 // Common return label | 362 // Common return label |
370 __ bind(&return_label_); | 363 __ bind(&return_label_); |
371 if (FLAG_trace) { | 364 if (FLAG_trace) { |
372 __ push(eax); | 365 __ push(eax); |
373 __ CallRuntime(Runtime::kTraceExit, 1); | 366 __ CallRuntime(Runtime::kTraceExit, 1); |
374 } | 367 } |
375 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { | 368 // Pretend that the exit is a backwards jump to the entry. |
376 // Pretend that the exit is a backwards jump to the entry. | 369 int weight = 1; |
377 int weight = 1; | 370 if (info_->ShouldSelfOptimize()) { |
378 if (info_->ShouldSelfOptimize()) { | 371 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
379 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 372 } else { |
380 } else if (FLAG_weighted_back_edges) { | 373 int distance = masm_->pc_offset(); |
381 int distance = masm_->pc_offset(); | 374 weight = Min(kMaxBackEdgeWeight, |
382 weight = Min(kMaxBackEdgeWeight, | 375 Max(1, distance / kCodeSizeMultiplier)); |
383 Max(1, distance / kCodeSizeMultiplier)); | |
384 } | |
385 EmitProfilingCounterDecrement(weight); | |
386 Label ok; | |
387 __ j(positive, &ok, Label::kNear); | |
388 __ push(eax); | |
389 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { | |
390 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | |
391 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); | |
392 } else { | |
393 __ call(isolate()->builtins()->InterruptCheck(), | |
394 RelocInfo::CODE_TARGET); | |
395 } | |
396 __ pop(eax); | |
397 EmitProfilingCounterReset(); | |
398 __ bind(&ok); | |
399 } | 376 } |
| 377 EmitProfilingCounterDecrement(weight); |
| 378 Label ok; |
| 379 __ j(positive, &ok, Label::kNear); |
| 380 __ push(eax); |
| 381 __ call(isolate()->builtins()->InterruptCheck(), |
| 382 RelocInfo::CODE_TARGET); |
| 383 __ pop(eax); |
| 384 EmitProfilingCounterReset(); |
| 385 __ bind(&ok); |
400 #ifdef DEBUG | 386 #ifdef DEBUG |
401 // Add a label for checking the size of the code used for returning. | 387 // Add a label for checking the size of the code used for returning. |
402 Label check_exit_codesize; | 388 Label check_exit_codesize; |
403 masm_->bind(&check_exit_codesize); | 389 masm_->bind(&check_exit_codesize); |
404 #endif | 390 #endif |
405 SetSourcePosition(function()->end_position() - 1); | 391 SetSourcePosition(function()->end_position() - 1); |
406 __ RecordJSReturn(); | 392 __ RecordJSReturn(); |
407 // Do not use the leave instruction here because it is too short to | 393 // Do not use the leave instruction here because it is too short to |
408 // patch with the code required by the debugger. | 394 // patch with the code required by the debugger. |
409 __ mov(esp, ebp); | 395 __ mov(esp, ebp); |
(...skipping 4503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4913 | 4899 |
4914 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), | 4900 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), |
4915 Assembler::target_address_at(call_target_address)); | 4901 Assembler::target_address_at(call_target_address)); |
4916 return OSR_AFTER_STACK_CHECK; | 4902 return OSR_AFTER_STACK_CHECK; |
4917 } | 4903 } |
4918 | 4904 |
4919 | 4905 |
4920 } } // namespace v8::internal | 4906 } } // namespace v8::internal |
4921 | 4907 |
4922 #endif // V8_TARGET_ARCH_IA32 | 4908 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |