Chromium Code Reviews

Side by Side Diff: src/x64/assembler-x64-inl.h

Issue 11574027: Use direct jump and call instruction for X64 when the deoptimization entries are in the code range (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments.
Jump to:
View unified diff | | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 68 matching lines...)
79 if (current > 0 && code_targets_.last().is_identical_to(target)) { 79 if (current > 0 && code_targets_.last().is_identical_to(target)) {
80 // Optimization if we keep jumping to the same code target. 80 // Optimization if we keep jumping to the same code target.
81 emitl(current - 1); 81 emitl(current - 1);
82 } else { 82 } else {
83 code_targets_.Add(target); 83 code_targets_.Add(target);
84 emitl(current); 84 emitl(current);
85 } 85 }
86 } 86 }
87 87
88 88
89 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
danno 2013/03/07 14:57:23 I don't think you need to keep the runtime_entries
haitao.feng 2013/03/08 05:30:05 Done.
90 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
91 RecordRelocInfo(rmode);
92 int current = runtime_entries_.length();
93 if (current > 0 && runtime_entries_.last() == entry) {
94 // Optimization if we keep jumping to the same entry.
95 emitl(current - 1);
96 } else {
97 runtime_entries_.Add(entry);
98 emitl(current);
99 }
100 }
101
102
89 void Assembler::emit_rex_64(Register reg, Register rm_reg) { 103 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
90 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit()); 104 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
91 } 105 }
92 106
93 107
94 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) { 108 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
95 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3); 109 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
96 } 110 }
97 111
98 112
(...skipping 102 matching lines...)
201 215
202 Address Assembler::target_address_from_return_address(Address pc) { 216 Address Assembler::target_address_from_return_address(Address pc) {
203 return pc - kCallTargetAddressOffset; 217 return pc - kCallTargetAddressOffset;
204 } 218 }
205 219
206 220
207 Handle<Object> Assembler::code_target_object_handle_at(Address pc) { 221 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
208 return code_targets_[Memory::int32_at(pc)]; 222 return code_targets_[Memory::int32_at(pc)];
209 } 223 }
210 224
225
226 Address Assembler::runtime_entry_at(Address pc) {
227 return runtime_entries_[Memory::int32_at(pc)];
228 }
229
211 // ----------------------------------------------------------------------------- 230 // -----------------------------------------------------------------------------
212 // Implementation of RelocInfo 231 // Implementation of RelocInfo
213 232
214 // The modes possibly affected by apply must be in kApplyMask. 233 // The modes possibly affected by apply must be in kApplyMask.
215 void RelocInfo::apply(intptr_t delta) { 234 void RelocInfo::apply(intptr_t delta) {
216 if (IsInternalReference(rmode_)) { 235 if (IsInternalReference(rmode_)) {
217 // absolute code pointer inside code object moves with the code object. 236 // absolute code pointer inside code object moves with the code object.
218 Memory::Address_at(pc_) += static_cast<int32_t>(delta); 237 Memory::Address_at(pc_) += static_cast<int32_t>(delta);
219 CPU::FlushICache(pc_, sizeof(Address)); 238 CPU::FlushICache(pc_, sizeof(Address));
220 } else if (IsCodeTarget(rmode_)) { 239 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
221 Memory::int32_at(pc_) -= static_cast<int32_t>(delta); 240 Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
222 CPU::FlushICache(pc_, sizeof(int32_t)); 241 CPU::FlushICache(pc_, sizeof(int32_t));
223 } else if (rmode_ == CODE_AGE_SEQUENCE) { 242 } else if (rmode_ == CODE_AGE_SEQUENCE) {
224 if (*pc_ == kCallOpcode) { 243 if (*pc_ == kCallOpcode) {
225 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1); 244 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
226 *p -= static_cast<int32_t>(delta); // Relocate entry. 245 *p -= static_cast<int32_t>(delta); // Relocate entry.
227 CPU::FlushICache(p, sizeof(uint32_t)); 246 CPU::FlushICache(p, sizeof(uint32_t));
228 } 247 }
229 } 248 }
230 } 249 }
231 250
232 251
233 Address RelocInfo::target_address() { 252 Address RelocInfo::target_address() {
234 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); 253 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
235 if (IsCodeTarget(rmode_)) { 254 return Assembler::target_address_at(pc_);
236 return Assembler::target_address_at(pc_);
237 } else {
238 return Memory::Address_at(pc_);
239 }
240 } 255 }
241 256
242 257
243 Address RelocInfo::target_address_address() { 258 Address RelocInfo::target_address_address() {
244 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY 259 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
245 || rmode_ == EMBEDDED_OBJECT 260 || rmode_ == EMBEDDED_OBJECT
246 || rmode_ == EXTERNAL_REFERENCE); 261 || rmode_ == EXTERNAL_REFERENCE);
247 return reinterpret_cast<Address>(pc_); 262 return reinterpret_cast<Address>(pc_);
248 } 263 }
249 264
250 265
251 int RelocInfo::target_address_size() { 266 int RelocInfo::target_address_size() {
252 if (IsCodedSpecially()) { 267 if (IsCodedSpecially()) {
253 return Assembler::kSpecialTargetSize; 268 return Assembler::kSpecialTargetSize;
254 } else { 269 } else {
255 return kPointerSize; 270 return kPointerSize;
256 } 271 }
257 } 272 }
258 273
259 274
260 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { 275 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) {
261 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); 276 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
262 if (IsCodeTarget(rmode_)) { 277 Assembler::set_target_address_at(pc_, target);
263 Assembler::set_target_address_at(pc_, target); 278 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) {
264 Object* target_code = Code::GetCodeFromTargetAddress(target); 279 Object* target_code = Code::GetCodeFromTargetAddress(target);
265 if (mode == UPDATE_WRITE_BARRIER && host() != NULL) { 280 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
266 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 281 host(), this, HeapObject::cast(target_code));
267 host(), this, HeapObject::cast(target_code));
268 }
269 } else {
270 Memory::Address_at(pc_) = target;
271 CPU::FlushICache(pc_, sizeof(Address));
272 } 282 }
273 } 283 }
274 284
275 285
276 Object* RelocInfo::target_object() { 286 Object* RelocInfo::target_object() {
277 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 287 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
278 return Memory::Object_at(pc_); 288 return Memory::Object_at(pc_);
279 } 289 }
280 290
281 291
282 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { 292 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
283 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 293 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
284 if (rmode_ == EMBEDDED_OBJECT) { 294 if (rmode_ == EMBEDDED_OBJECT) {
285 return Memory::Object_Handle_at(pc_); 295 return Memory::Object_Handle_at(pc_);
286 } else { 296 } else {
287 return origin->code_target_object_handle_at(pc_); 297 return origin->code_target_object_handle_at(pc_);
288 } 298 }
289 } 299 }
290 300
291 301
302 bool RelocInfo::NeedsInitializeRuntimeEntry() {
303 return true;
304 }
305
306
307 Address RelocInfo::target_runtime_entry(Assembler* origin) {
308 ASSERT(IsRuntimeEntry(rmode_));
309 return origin->runtime_entry_at(pc_);
310 }
311
312
292 Object** RelocInfo::target_object_address() { 313 Object** RelocInfo::target_object_address() {
293 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 314 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
294 return reinterpret_cast<Object**>(pc_); 315 return reinterpret_cast<Object**>(pc_);
295 } 316 }
296 317
297 318
298 Address* RelocInfo::target_reference_address() { 319 Address* RelocInfo::target_reference_address() {
299 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE); 320 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
300 return reinterpret_cast<Address*>(pc_); 321 return reinterpret_cast<Address*>(pc_);
301 } 322 }
(...skipping 134 matching lines...)
436 visitor->VisitCodeAgeSequence(this); 457 visitor->VisitCodeAgeSequence(this);
437 #ifdef ENABLE_DEBUGGER_SUPPORT 458 #ifdef ENABLE_DEBUGGER_SUPPORT
438 // TODO(isolates): Get a cached isolate below. 459 // TODO(isolates): Get a cached isolate below.
439 } else if (((RelocInfo::IsJSReturn(mode) && 460 } else if (((RelocInfo::IsJSReturn(mode) &&
440 IsPatchedReturnSequence()) || 461 IsPatchedReturnSequence()) ||
441 (RelocInfo::IsDebugBreakSlot(mode) && 462 (RelocInfo::IsDebugBreakSlot(mode) &&
442 IsPatchedDebugBreakSlotSequence())) && 463 IsPatchedDebugBreakSlotSequence())) &&
443 Isolate::Current()->debug()->has_break_points()) { 464 Isolate::Current()->debug()->has_break_points()) {
444 visitor->VisitDebugTarget(this); 465 visitor->VisitDebugTarget(this);
445 #endif 466 #endif
446 } else if (mode == RelocInfo::RUNTIME_ENTRY) { 467 } else if (RelocInfo::IsRuntimeEntry(mode)) {
447 visitor->VisitRuntimeEntry(this); 468 visitor->VisitRuntimeEntry(this);
448 } 469 }
449 } 470 }
450 471
451 472
452 template<typename StaticVisitor> 473 template<typename StaticVisitor>
453 void RelocInfo::Visit(Heap* heap) { 474 void RelocInfo::Visit(Heap* heap) {
454 RelocInfo::Mode mode = rmode(); 475 RelocInfo::Mode mode = rmode();
455 if (mode == RelocInfo::EMBEDDED_OBJECT) { 476 if (mode == RelocInfo::EMBEDDED_OBJECT) {
456 StaticVisitor::VisitEmbeddedPointer(heap, this); 477 StaticVisitor::VisitEmbeddedPointer(heap, this);
457 CPU::FlushICache(pc_, sizeof(Address)); 478 CPU::FlushICache(pc_, sizeof(Address));
458 } else if (RelocInfo::IsCodeTarget(mode)) { 479 } else if (RelocInfo::IsCodeTarget(mode)) {
459 StaticVisitor::VisitCodeTarget(heap, this); 480 StaticVisitor::VisitCodeTarget(heap, this);
460 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) { 481 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) {
461 StaticVisitor::VisitGlobalPropertyCell(heap, this); 482 StaticVisitor::VisitGlobalPropertyCell(heap, this);
462 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { 483 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
463 StaticVisitor::VisitExternalReference(this); 484 StaticVisitor::VisitExternalReference(this);
464 CPU::FlushICache(pc_, sizeof(Address)); 485 CPU::FlushICache(pc_, sizeof(Address));
465 } else if (RelocInfo::IsCodeAgeSequence(mode)) { 486 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
466 StaticVisitor::VisitCodeAgeSequence(heap, this); 487 StaticVisitor::VisitCodeAgeSequence(heap, this);
467 #ifdef ENABLE_DEBUGGER_SUPPORT 488 #ifdef ENABLE_DEBUGGER_SUPPORT
468 } else if (heap->isolate()->debug()->has_break_points() && 489 } else if (heap->isolate()->debug()->has_break_points() &&
469 ((RelocInfo::IsJSReturn(mode) && 490 ((RelocInfo::IsJSReturn(mode) &&
470 IsPatchedReturnSequence()) || 491 IsPatchedReturnSequence()) ||
471 (RelocInfo::IsDebugBreakSlot(mode) && 492 (RelocInfo::IsDebugBreakSlot(mode) &&
472 IsPatchedDebugBreakSlotSequence()))) { 493 IsPatchedDebugBreakSlotSequence()))) {
473 StaticVisitor::VisitDebugTarget(heap, this); 494 StaticVisitor::VisitDebugTarget(heap, this);
474 #endif 495 #endif
475 } else if (mode == RelocInfo::RUNTIME_ENTRY) { 496 } else if (RelocInfo::IsRuntimeEntry(mode)) {
476 StaticVisitor::VisitRuntimeEntry(this); 497 StaticVisitor::VisitRuntimeEntry(this);
477 } 498 }
478 } 499 }
479 500
480 501
481 // ----------------------------------------------------------------------------- 502 // -----------------------------------------------------------------------------
482 // Implementation of Operand 503 // Implementation of Operand
483 504
484 void Operand::set_modrm(int mod, Register rm_reg) { 505 void Operand::set_modrm(int mod, Register rm_reg) {
485 ASSERT(is_uint2(mod)); 506 ASSERT(is_uint2(mod));
(...skipping 26 matching lines...)
512 ASSERT(len_ == 1 || len_ == 2); 533 ASSERT(len_ == 1 || len_ == 2);
513 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]); 534 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
514 *p = disp; 535 *p = disp;
515 len_ += sizeof(int32_t); 536 len_ += sizeof(int32_t);
516 } 537 }
517 538
518 539
519 } } // namespace v8::internal 540 } } // namespace v8::internal
520 541
521 #endif // V8_X64_ASSEMBLER_X64_INL_H_ 542 #endif // V8_X64_ASSEMBLER_X64_INL_H_
OLDNEW

Powered by Google App Engine