Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 6639023: Merge revisions 7089, 7095, 7096 to trunk.... (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 21 matching lines...) Expand all
32 #include "x64/lithium-codegen-x64.h" 32 #include "x64/lithium-codegen-x64.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "stub-cache.h" 34 #include "stub-cache.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 39
40 // When invoking builtins, we need to record the safepoint in the middle of 40 // When invoking builtins, we need to record the safepoint in the middle of
41 // the invoke instruction sequence generated by the macro assembler. 41 // the invoke instruction sequence generated by the macro assembler.
42 class SafepointGenerator : public PostCallGenerator { 42 class SafepointGenerator : public CallWrapper {
43 public: 43 public:
44 SafepointGenerator(LCodeGen* codegen, 44 SafepointGenerator(LCodeGen* codegen,
45 LPointerMap* pointers, 45 LPointerMap* pointers,
46 int deoptimization_index, 46 int deoptimization_index,
47 bool ensure_reloc_space = false) 47 bool ensure_reloc_space = false)
48 : codegen_(codegen), 48 : codegen_(codegen),
49 pointers_(pointers), 49 pointers_(pointers),
50 deoptimization_index_(deoptimization_index), 50 deoptimization_index_(deoptimization_index),
51 ensure_reloc_space_(ensure_reloc_space), 51 ensure_reloc_space_(ensure_reloc_space) { }
52 previous_safepoint_position_(-kMinSafepointSize) { }
53 virtual ~SafepointGenerator() { } 52 virtual ~SafepointGenerator() { }
54 53
55 virtual void Generate() { 54 virtual void BeforeCall(int call_size) {
55 ASSERT(call_size >= 0);
56 // Ensure that we have enough space after the previous safepoint position 56 // Ensure that we have enough space after the previous safepoint position
57 // for the generated code there. 57 // for the jump generated there.
58 int position = codegen_->masm()->pc_offset(); 58 int call_end = codegen_->masm()->pc_offset() + call_size;
59 ASSERT(position > previous_safepoint_position_); 59 int prev_jump_end = codegen_->LastSafepointEnd() + kMinSafepointSize;
60 if (position < previous_safepoint_position_ + kMinSafepointSize) { 60 if (call_end < prev_jump_end) {
61 int padding_size = 61 int padding_size = prev_jump_end - call_end;
62 previous_safepoint_position_ + kMinSafepointSize - position;
63 STATIC_ASSERT(kMinSafepointSize <= 9); // One multibyte nop is enough. 62 STATIC_ASSERT(kMinSafepointSize <= 9); // One multibyte nop is enough.
64 codegen_->masm()->nop(padding_size); 63 codegen_->masm()->nop(padding_size);
65 position += padding_size;
66 } 64 }
65 }
66
67 virtual void AfterCall() {
67 // Ensure that we have enough space in the reloc info to patch 68 // Ensure that we have enough space in the reloc info to patch
68 // this with calls when doing deoptimization. 69 // this with calls when doing deoptimization.
69 if (ensure_reloc_space_) { 70 if (ensure_reloc_space_) {
70 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true); 71 codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
71 } 72 }
72 codegen_->RecordSafepoint(pointers_, deoptimization_index_); 73 codegen_->RecordSafepoint(pointers_, deoptimization_index_);
73 previous_safepoint_position_ = position;
74 } 74 }
75 75
76 private: 76 private:
77 static const int kMinSafepointSize = 77 static const int kMinSafepointSize =
78 MacroAssembler::kShortCallInstructionLength; 78 MacroAssembler::kShortCallInstructionLength;
79 LCodeGen* codegen_; 79 LCodeGen* codegen_;
80 LPointerMap* pointers_; 80 LPointerMap* pointers_;
81 int deoptimization_index_; 81 int deoptimization_index_;
82 bool ensure_reloc_space_; 82 bool ensure_reloc_space_;
83 int previous_safepoint_position_;
84 }; 83 };
85 84
86 85
87 #define __ masm()-> 86 #define __ masm()->
88 87
89 bool LCodeGen::GenerateCode() { 88 bool LCodeGen::GenerateCode() {
90 HPhase phase("Code generation", chunk()); 89 HPhase phase("Code generation", chunk());
91 ASSERT(is_unused()); 90 ASSERT(is_unused());
92 status_ = GENERATING; 91 status_ = GENERATING;
93 return GeneratePrologue() && 92 return GeneratePrologue() &&
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after
252 if (current_instruction_ < instructions_->length() - 1) { 251 if (current_instruction_ < instructions_->length() - 1) {
253 return instructions_->at(current_instruction_ + 1); 252 return instructions_->at(current_instruction_ + 1);
254 } else { 253 } else {
255 return NULL; 254 return NULL;
256 } 255 }
257 } 256 }
258 257
259 258
260 bool LCodeGen::GenerateJumpTable() { 259 bool LCodeGen::GenerateJumpTable() {
261 for (int i = 0; i < jump_table_.length(); i++) { 260 for (int i = 0; i < jump_table_.length(); i++) {
262 JumpTableEntry* info = jump_table_[i]; 261 __ bind(&jump_table_[i].label);
263 __ bind(&(info->label_)); 262 __ Jump(jump_table_[i].address, RelocInfo::RUNTIME_ENTRY);
264 __ Jump(info->address_, RelocInfo::RUNTIME_ENTRY);
265 } 263 }
266 return !is_aborted(); 264 return !is_aborted();
267 } 265 }
268 266
269 267
270 bool LCodeGen::GenerateDeferredCode() { 268 bool LCodeGen::GenerateDeferredCode() {
271 ASSERT(is_generating()); 269 ASSERT(is_generating());
272 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { 270 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
273 LDeferredCode* code = deferred_[i]; 271 LDeferredCode* code = deferred_[i];
274 __ bind(code->entry()); 272 __ bind(code->entry());
(...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after
532 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER); 530 Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
533 ASSERT(entry != NULL); 531 ASSERT(entry != NULL);
534 if (entry == NULL) { 532 if (entry == NULL) {
535 Abort("bailout was not prepared"); 533 Abort("bailout was not prepared");
536 return; 534 return;
537 } 535 }
538 536
539 if (cc == no_condition) { 537 if (cc == no_condition) {
540 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); 538 __ Jump(entry, RelocInfo::RUNTIME_ENTRY);
541 } else { 539 } else {
542 JumpTableEntry* jump_info = NULL;
543 // We often have several deopts to the same entry, reuse the last 540 // We often have several deopts to the same entry, reuse the last
544 // jump entry if this is the case. 541 // jump entry if this is the case.
545 if (jump_table_.length() > 0 && 542 if (jump_table_.is_empty() ||
546 jump_table_[jump_table_.length() - 1]->address_ == entry) { 543 jump_table_.last().address != entry) {
547 jump_info = jump_table_[jump_table_.length() - 1]; 544 jump_table_.Add(entry);
548 } else {
549 jump_info = new JumpTableEntry(entry);
550 jump_table_.Add(jump_info);
551 } 545 }
552 __ j(cc, &jump_info->label_); 546 __ j(cc, &jump_table_.last().label);
553 } 547 }
554 } 548 }
555 549
556 550
557 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { 551 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
558 int length = deoptimizations_.length(); 552 int length = deoptimizations_.length();
559 if (length == 0) return; 553 if (length == 0) return;
560 ASSERT(FLAG_deopt); 554 ASSERT(FLAG_deopt);
561 Handle<DeoptimizationInputData> data = 555 Handle<DeoptimizationInputData> data =
562 Factory::NewDeoptimizationInputData(length, TENURED); 556 Factory::NewDeoptimizationInputData(length, TENURED);
(...skipping 3101 matching lines...) Expand 10 before | Expand all | Expand 10 after
3664 RegisterEnvironmentForDeoptimization(environment); 3658 RegisterEnvironmentForDeoptimization(environment);
3665 ASSERT(osr_pc_offset_ == -1); 3659 ASSERT(osr_pc_offset_ == -1);
3666 osr_pc_offset_ = masm()->pc_offset(); 3660 osr_pc_offset_ = masm()->pc_offset();
3667 } 3661 }
3668 3662
3669 #undef __ 3663 #undef __
3670 3664
3671 } } // namespace v8::internal 3665 } } // namespace v8::internal
3672 3666
3673 #endif // V8_TARGET_ARCH_X64 3667 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698