Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(109)

Side by Side Diff: src/x64/assembler-x64-inl.h

Issue 11574027: Use direct jump and call instruction for X64 when the deoptimization entries are in the code range (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
79 if (current > 0 && code_targets_.last().is_identical_to(target)) { 79 if (current > 0 && code_targets_.last().is_identical_to(target)) {
80 // Optimization if we keep jumping to the same code target. 80 // Optimization if we keep jumping to the same code target.
81 emitl(current - 1); 81 emitl(current - 1);
82 } else { 82 } else {
83 code_targets_.Add(target); 83 code_targets_.Add(target);
84 emitl(current); 84 emitl(current);
85 } 85 }
86 } 86 }
87 87
88 88
89 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
90 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
91 ASSERT(isolate()->code_range()->exists());
92 RecordRelocInfo(rmode);
93 emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
94 }
95
96
89 void Assembler::emit_rex_64(Register reg, Register rm_reg) { 97 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
90 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit()); 98 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
91 } 99 }
92 100
93 101
94 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) { 102 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
95 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3); 103 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
96 } 104 }
97 105
98 106
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
201 209
202 Address Assembler::target_address_from_return_address(Address pc) { 210 Address Assembler::target_address_from_return_address(Address pc) {
203 return pc - kCallTargetAddressOffset; 211 return pc - kCallTargetAddressOffset;
204 } 212 }
205 213
206 214
207 Handle<Object> Assembler::code_target_object_handle_at(Address pc) { 215 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
208 return code_targets_[Memory::int32_at(pc)]; 216 return code_targets_[Memory::int32_at(pc)];
209 } 217 }
210 218
219
220 Address Assembler::runtime_entry_at(Address pc) {
221 ASSERT(isolate()->code_range()->exists());
222 return Memory::int32_at(pc) + isolate()->code_range()->start();
223 }
224
211 // ----------------------------------------------------------------------------- 225 // -----------------------------------------------------------------------------
212 // Implementation of RelocInfo 226 // Implementation of RelocInfo
213 227
214 // The modes possibly affected by apply must be in kApplyMask. 228 // The modes possibly affected by apply must be in kApplyMask.
215 void RelocInfo::apply(intptr_t delta) { 229 void RelocInfo::apply(intptr_t delta) {
216 if (IsInternalReference(rmode_)) { 230 if (IsInternalReference(rmode_)) {
217 // absolute code pointer inside code object moves with the code object. 231 // absolute code pointer inside code object moves with the code object.
218 Memory::Address_at(pc_) += static_cast<int32_t>(delta); 232 Memory::Address_at(pc_) += static_cast<int32_t>(delta);
219 CPU::FlushICache(pc_, sizeof(Address)); 233 CPU::FlushICache(pc_, sizeof(Address));
220 } else if (IsCodeTarget(rmode_)) { 234 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
221 Memory::int32_at(pc_) -= static_cast<int32_t>(delta); 235 Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
222 CPU::FlushICache(pc_, sizeof(int32_t)); 236 CPU::FlushICache(pc_, sizeof(int32_t));
223 } else if (rmode_ == CODE_AGE_SEQUENCE) { 237 } else if (rmode_ == CODE_AGE_SEQUENCE) {
224 if (*pc_ == kCallOpcode) { 238 if (*pc_ == kCallOpcode) {
225 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1); 239 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
226 *p -= static_cast<int32_t>(delta); // Relocate entry. 240 *p -= static_cast<int32_t>(delta); // Relocate entry.
227 CPU::FlushICache(p, sizeof(uint32_t)); 241 CPU::FlushICache(p, sizeof(uint32_t));
228 } 242 }
229 } 243 }
230 } 244 }
231 245
232 246
233 Address RelocInfo::target_address() { 247 Address RelocInfo::target_address() {
234 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); 248 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
235 if (IsCodeTarget(rmode_)) { 249 return Assembler::target_address_at(pc_);
236 return Assembler::target_address_at(pc_);
237 } else {
238 return Memory::Address_at(pc_);
239 }
240 } 250 }
241 251
242 252
243 Address RelocInfo::target_address_address() { 253 Address RelocInfo::target_address_address() {
244 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY 254 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
245 || rmode_ == EMBEDDED_OBJECT 255 || rmode_ == EMBEDDED_OBJECT
246 || rmode_ == EXTERNAL_REFERENCE); 256 || rmode_ == EXTERNAL_REFERENCE);
247 return reinterpret_cast<Address>(pc_); 257 return reinterpret_cast<Address>(pc_);
248 } 258 }
249 259
250 260
251 int RelocInfo::target_address_size() { 261 int RelocInfo::target_address_size() {
252 if (IsCodedSpecially()) { 262 if (IsCodedSpecially()) {
253 return Assembler::kSpecialTargetSize; 263 return Assembler::kSpecialTargetSize;
254 } else { 264 } else {
255 return kPointerSize; 265 return kPointerSize;
256 } 266 }
257 } 267 }
258 268
259 269
260 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { 270 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
261 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); 271 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
262 if (IsCodeTarget(rmode_)) { 272 Assembler::set_target_address_at(pc_, target);
263 Assembler::set_target_address_at(pc_, target); 273 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
264 Object* target_code = Code::GetCodeFromTargetAddress(target); 274 Object* target_code = Code::GetCodeFromTargetAddress(target);
265 if (mode == UPDATE_WRITE_BARRIER && host() != NULL) { 275 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
266 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 276 host(), this, HeapObject::cast(target_code));
267 host(), this, HeapObject::cast(target_code));
268 }
269 } else {
270 Memory::Address_at(pc_) = target;
271 CPU::FlushICache(pc_, sizeof(Address));
272 } 277 }
273 } 278 }
274 279
275 280
276 Object* RelocInfo::target_object() { 281 Object* RelocInfo::target_object() {
277 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 282 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
278 return Memory::Object_at(pc_); 283 return Memory::Object_at(pc_);
279 } 284 }
280 285
281 286
(...skipping 25 matching lines...) Expand all
307 CPU::FlushICache(pc_, sizeof(Address)); 312 CPU::FlushICache(pc_, sizeof(Address));
308 if (mode == UPDATE_WRITE_BARRIER && 313 if (mode == UPDATE_WRITE_BARRIER &&
309 host() != NULL && 314 host() != NULL &&
310 target->IsHeapObject()) { 315 target->IsHeapObject()) {
311 host()->GetHeap()->incremental_marking()->RecordWrite( 316 host()->GetHeap()->incremental_marking()->RecordWrite(
312 host(), &Memory::Object_at(pc_), HeapObject::cast(target)); 317 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
313 } 318 }
314 } 319 }
315 320
316 321
322 Address RelocInfo::target_runtime_entry(Assembler* origin) {
323 ASSERT(IsRuntimeEntry(rmode_));
324 return origin->runtime_entry_at(pc_);
325 }
326
327
328 void RelocInfo::set_target_runtime_entry(Address target,
329 WriteBarrierMode mode) {
330 ASSERT(IsRuntimeEntry(rmode_));
331 if (target_address() != target) set_target_address(target, mode);
332 }
333
334
317 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() { 335 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() {
318 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); 336 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
319 Address address = Memory::Address_at(pc_); 337 Address address = Memory::Address_at(pc_);
320 return Handle<JSGlobalPropertyCell>( 338 return Handle<JSGlobalPropertyCell>(
321 reinterpret_cast<JSGlobalPropertyCell**>(address)); 339 reinterpret_cast<JSGlobalPropertyCell**>(address));
322 } 340 }
323 341
324 342
325 JSGlobalPropertyCell* RelocInfo::target_cell() { 343 JSGlobalPropertyCell* RelocInfo::target_cell() {
326 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); 344 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL);
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
436 visitor->VisitCodeAgeSequence(this); 454 visitor->VisitCodeAgeSequence(this);
437 #ifdef ENABLE_DEBUGGER_SUPPORT 455 #ifdef ENABLE_DEBUGGER_SUPPORT
438 // TODO(isolates): Get a cached isolate below. 456 // TODO(isolates): Get a cached isolate below.
439 } else if (((RelocInfo::IsJSReturn(mode) && 457 } else if (((RelocInfo::IsJSReturn(mode) &&
440 IsPatchedReturnSequence()) || 458 IsPatchedReturnSequence()) ||
441 (RelocInfo::IsDebugBreakSlot(mode) && 459 (RelocInfo::IsDebugBreakSlot(mode) &&
442 IsPatchedDebugBreakSlotSequence())) && 460 IsPatchedDebugBreakSlotSequence())) &&
443 Isolate::Current()->debug()->has_break_points()) { 461 Isolate::Current()->debug()->has_break_points()) {
444 visitor->VisitDebugTarget(this); 462 visitor->VisitDebugTarget(this);
445 #endif 463 #endif
446 } else if (mode == RelocInfo::RUNTIME_ENTRY) { 464 } else if (RelocInfo::IsRuntimeEntry(mode)) {
447 visitor->VisitRuntimeEntry(this); 465 visitor->VisitRuntimeEntry(this);
448 } 466 }
449 } 467 }
450 468
451 469
452 template<typename StaticVisitor> 470 template<typename StaticVisitor>
453 void RelocInfo::Visit(Heap* heap) { 471 void RelocInfo::Visit(Heap* heap) {
454 RelocInfo::Mode mode = rmode(); 472 RelocInfo::Mode mode = rmode();
455 if (mode == RelocInfo::EMBEDDED_OBJECT) { 473 if (mode == RelocInfo::EMBEDDED_OBJECT) {
456 StaticVisitor::VisitEmbeddedPointer(heap, this); 474 StaticVisitor::VisitEmbeddedPointer(heap, this);
457 CPU::FlushICache(pc_, sizeof(Address)); 475 CPU::FlushICache(pc_, sizeof(Address));
458 } else if (RelocInfo::IsCodeTarget(mode)) { 476 } else if (RelocInfo::IsCodeTarget(mode)) {
459 StaticVisitor::VisitCodeTarget(heap, this); 477 StaticVisitor::VisitCodeTarget(heap, this);
460 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) { 478 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
461 StaticVisitor::VisitGlobalPropertyCell(heap, this); 479 StaticVisitor::VisitGlobalPropertyCell(heap, this);
462 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { 480 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
463 StaticVisitor::VisitExternalReference(this); 481 StaticVisitor::VisitExternalReference(this);
464 CPU::FlushICache(pc_, sizeof(Address)); 482 CPU::FlushICache(pc_, sizeof(Address));
465 } else if (RelocInfo::IsCodeAgeSequence(mode)) { 483 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
466 StaticVisitor::VisitCodeAgeSequence(heap, this); 484 StaticVisitor::VisitCodeAgeSequence(heap, this);
467 #ifdef ENABLE_DEBUGGER_SUPPORT 485 #ifdef ENABLE_DEBUGGER_SUPPORT
468 } else if (heap->isolate()->debug()->has_break_points() && 486 } else if (heap->isolate()->debug()->has_break_points() &&
469 ((RelocInfo::IsJSReturn(mode) && 487 ((RelocInfo::IsJSReturn(mode) &&
470 IsPatchedReturnSequence()) || 488 IsPatchedReturnSequence()) ||
471 (RelocInfo::IsDebugBreakSlot(mode) && 489 (RelocInfo::IsDebugBreakSlot(mode) &&
472 IsPatchedDebugBreakSlotSequence()))) { 490 IsPatchedDebugBreakSlotSequence()))) {
473 StaticVisitor::VisitDebugTarget(heap, this); 491 StaticVisitor::VisitDebugTarget(heap, this);
474 #endif 492 #endif
475 } else if (mode == RelocInfo::RUNTIME_ENTRY) { 493 } else if (RelocInfo::IsRuntimeEntry(mode)) {
476 StaticVisitor::VisitRuntimeEntry(this); 494 StaticVisitor::VisitRuntimeEntry(this);
477 } 495 }
478 } 496 }
479 497
480 498
481 // ----------------------------------------------------------------------------- 499 // -----------------------------------------------------------------------------
482 // Implementation of Operand 500 // Implementation of Operand
483 501
484 void Operand::set_modrm(int mod, Register rm_reg) { 502 void Operand::set_modrm(int mod, Register rm_reg) {
485 ASSERT(is_uint2(mod)); 503 ASSERT(is_uint2(mod));
(...skipping 26 matching lines...) Expand all
512 ASSERT(len_ == 1 || len_ == 2); 530 ASSERT(len_ == 1 || len_ == 2);
513 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]); 531 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
514 *p = disp; 532 *p = disp;
515 len_ += sizeof(int32_t); 533 len_ += sizeof(int32_t);
516 } 534 }
517 535
518 536
519 } } // namespace v8::internal 537 } } // namespace v8::internal
520 538
521 #endif // V8_X64_ASSEMBLER_X64_INL_H_ 539 #endif // V8_X64_ASSEMBLER_X64_INL_H_
OLDNEW
« no previous file with comments | « src/x64/assembler-x64.cc ('k') | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698