Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(102)

Side by Side Diff: runtime/vm/assembler_x64.cc

Issue 22825023: Uses an object pool on x64 (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/assembler_x64.h ('k') | runtime/vm/code_patcher_x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" 5 #include "vm/globals.h"
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/heap.h" 9 #include "vm/heap.h"
10 #include "vm/memory_region.h" 10 #include "vm/memory_region.h"
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
88 88
89 89
90 void Assembler::call(Label* label) { 90 void Assembler::call(Label* label) {
91 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 91 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
92 static const int kSize = 5; 92 static const int kSize = 5;
93 EmitUint8(0xE8); 93 EmitUint8(0xE8);
94 EmitLabel(label, kSize); 94 EmitLabel(label, kSize);
95 } 95 }
96 96
97 97
98 void Assembler::LoadExternalLabel(const ExternalLabel* label) {
99 const int32_t offset = Array::element_offset(AddExternalLabel(label));
100 LoadWordFromPoolOffset(TMP, offset - kHeapObjectTag);
101 }
102
103
98 void Assembler::call(const ExternalLabel* label) { 104 void Assembler::call(const ExternalLabel* label) {
99 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 105 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
100 intptr_t call_start = buffer_.GetPosition();
101 106
102 // Encode movq(TMP, Immediate(label->address())), but always as imm64. 107 // Encode movq(TMP, Immediate(label->address())), but always as imm64.
103 EmitRegisterREX(TMP, REX_W); 108 EmitRegisterREX(TMP, REX_W);
104 EmitUint8(0xB8 | (TMP & 7)); 109 EmitUint8(0xB8 | (TMP & 7));
105 EmitInt64(label->address()); 110 EmitInt64(label->address());
106 111
107 // Encode call(TMP). 112 // Encode call(TMP).
108 Operand operand(TMP); 113 Operand operand(TMP);
109 EmitOperandREX(2, operand, REX_NONE); 114 EmitOperandREX(2, operand, REX_NONE);
110 EmitUint8(0xFF); 115 EmitUint8(0xFF);
111 EmitOperand(2, operand); 116 EmitOperand(2, operand);
112
113 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
114 } 117 }
115 118
116 119
120 void Assembler::CallPatchable(const ExternalLabel* label) {
121 LoadExternalLabel(label);
122 {
123 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
124 // Encode call(TMP).
125 Operand operand(TMP);
126 EmitOperandREX(2, operand, REX_NONE);
127 EmitUint8(0xFF);
128 EmitOperand(2, operand);
129 }
130 }
131
117 void Assembler::pushq(Register reg) { 132 void Assembler::pushq(Register reg) {
118 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 133 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
119 EmitRegisterREX(reg, REX_NONE); 134 EmitRegisterREX(reg, REX_NONE);
120 EmitUint8(0x50 | (reg & 7)); 135 EmitUint8(0x50 | (reg & 7));
121 } 136 }
122 137
123 138
124 void Assembler::pushq(const Address& address) { 139 void Assembler::pushq(const Address& address) {
125 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 140 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
126 EmitOperandREX(6, address, REX_NONE); 141 EmitOperandREX(6, address, REX_NONE);
(...skipping 1861 matching lines...) Expand 10 before | Expand all | Expand 10 after
1988 EmitNearLabelLink(label); 2003 EmitNearLabelLink(label);
1989 } else { 2004 } else {
1990 EmitUint8(0xE9); 2005 EmitUint8(0xE9);
1991 EmitLabelLink(label); 2006 EmitLabelLink(label);
1992 } 2007 }
1993 } 2008 }
1994 2009
1995 2010
1996 void Assembler::jmp(const ExternalLabel* label) { 2011 void Assembler::jmp(const ExternalLabel* label) {
1997 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2012 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
1998 intptr_t call_start = buffer_.GetPosition();
1999 2013
2000 // Encode movq(TMP, Immediate(label->address())), but always as imm64. 2014 // Encode movq(TMP, Immediate(label->address())), but always as imm64.
2001 EmitRegisterREX(TMP, REX_W); 2015 EmitRegisterREX(TMP, REX_W);
2002 EmitUint8(0xB8 | (TMP & 7)); 2016 EmitUint8(0xB8 | (TMP & 7));
2003 EmitInt64(label->address()); 2017 EmitInt64(label->address());
2004 2018
2005 // Encode jmp(TMP). 2019 // Encode jmp(TMP).
2006 Operand operand(TMP); 2020 Operand operand(TMP);
2007 EmitOperandREX(4, operand, REX_NONE); 2021 EmitOperandREX(4, operand, REX_NONE);
2008 EmitUint8(0xFF); 2022 EmitUint8(0xFF);
2009 EmitOperand(4, operand); 2023 EmitOperand(4, operand);
2010
2011 ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
2012 } 2024 }
2013 2025
2014 2026
2015 void Assembler::lock() { 2027 void Assembler::lock() {
2016 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2028 AssemblerBuffer::EnsureCapacity ensured(&buffer_);
2017 EmitUint8(0xF0); 2029 EmitUint8(0xF0);
2018 } 2030 }
2019 2031
2020 2032
2021 void Assembler::cmpxchgl(const Address& address, Register reg) { 2033 void Assembler::cmpxchgl(const Address& address, Register reg) {
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2084 if (stack_elements <= 4) { 2096 if (stack_elements <= 4) {
2085 for (intptr_t i = 0; i < stack_elements; i++) { 2097 for (intptr_t i = 0; i < stack_elements; i++) {
2086 popq(TMP); 2098 popq(TMP);
2087 } 2099 }
2088 return; 2100 return;
2089 } 2101 }
2090 addq(RSP, Immediate(stack_elements * kWordSize)); 2102 addq(RSP, Immediate(stack_elements * kWordSize));
2091 } 2103 }
2092 2104
2093 2105
2106 int32_t Assembler::AddObject(const Object& obj) {
2107 ASSERT(obj.IsNotTemporaryScopedHandle());
2108 ASSERT(obj.IsOld());
2109 if (object_pool_.IsNull()) {
2110 // The object pool cannot be used in the vm isolate.
2111 ASSERT(Isolate::Current() != Dart::vm_isolate());
2112 object_pool_ = GrowableObjectArray::New(Heap::kOld);
2113 }
2114 for (int i = 0; i < object_pool_.Length(); i++) {
2115 if (object_pool_.At(i) == obj.raw()) {
2116 return i;
2117 }
2118 }
2119 object_pool_.Add(obj, Heap::kOld);
2120 return object_pool_.Length() - 1;
2121 }
2122
2123
2124 int32_t Assembler::AddExternalLabel(const ExternalLabel* label) {
2125 if (object_pool_.IsNull()) {
2126 // The object pool cannot be used in the vm isolate.
2127 ASSERT(Isolate::Current() != Dart::vm_isolate());
2128 object_pool_ = GrowableObjectArray::New(Heap::kOld);
2129 }
2130 const word address = label->address();
2131 ASSERT(Utils::IsAligned(address, 4));
2132 // The address is stored in the object array as a RawSmi.
2133 const Smi& smi = Smi::Handle(Smi::New(address >> kSmiTagShift));
2134 // Do not reuse an existing entry, since each reference may be patched
2135 // independently.
2136 object_pool_.Add(smi, Heap::kOld);
2137 return object_pool_.Length() - 1;
2138 }
2139
2140
2141 void Assembler::LoadWordFromPoolOffset(Register dst, int32_t offset) {
2142 movq(dst, Address(PP, offset));
2143 // This sequence must be of fixed size. If offset fits in a signed byte we
2144 // have to pad with nops.
2145 if (Utils::IsInt(8, offset)) {
2146 nop(3);
2147 }
2148 }
2149
2150
2094 void Assembler::LoadObject(Register dst, const Object& object) { 2151 void Assembler::LoadObject(Register dst, const Object& object) {
2095 if (object.IsSmi() || object.InVMHeap()) { 2152 if (object.IsSmi() || object.InVMHeap()) {
2096 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); 2153 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw())));
2097 } else { 2154 } else {
2098 ASSERT(object.IsNotTemporaryScopedHandle()); 2155 ASSERT(object.IsNotTemporaryScopedHandle());
2099 ASSERT(object.IsOld()); 2156 ASSERT(object.IsOld());
2100 AssemblerBuffer::EnsureCapacity ensured(&buffer_); 2157 const int32_t offset = Array::element_offset(AddObject(object));
2101 EmitRegisterREX(dst, REX_W); 2158 LoadWordFromPoolOffset(dst, offset - kHeapObjectTag);
2102 EmitUint8(0xB8 | (dst & 7));
2103 buffer_.EmitObject(object);
2104 } 2159 }
2105 } 2160 }
2106 2161
2107 2162
2108 void Assembler::StoreObject(const Address& dst, const Object& object) { 2163 void Assembler::StoreObject(const Address& dst, const Object& object) {
2109 if (object.IsSmi() || object.InVMHeap()) { 2164 if (object.IsSmi() || object.InVMHeap()) {
2110 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); 2165 movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw())));
2111 } else { 2166 } else {
2112 ASSERT(object.IsNotTemporaryScopedHandle()); 2167 ASSERT(object.IsNotTemporaryScopedHandle());
2113 ASSERT(object.IsOld()); 2168 ASSERT(object.IsOld());
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
2285 } 2340 }
2286 pushq(RBP); 2341 pushq(RBP);
2287 movq(RBP, RSP); 2342 movq(RBP, RSP);
2288 if (frame_size != 0) { 2343 if (frame_size != 0) {
2289 Immediate frame_space(frame_size); 2344 Immediate frame_space(frame_size);
2290 subq(RSP, frame_space); 2345 subq(RSP, frame_space);
2291 } 2346 }
2292 } 2347 }
2293 2348
2294 2349
2295 void Assembler::LeaveFrame() { 2350 void Assembler::LeaveFrame(bool restore_pp) {
2296 movq(RSP, RBP); 2351 if (restore_pp) {
2297 popq(RBP); 2352 leaq(RSP, Address(RBP, -2 * kWordSize)); // Restore stack pointer.
Ivan Posva 2013/08/22 05:26:25 Wouldn't this be shorter? if (restore_pp) { mov
2353 popq(PP); // Restore PP.
2354 popq(RBP); // Ignore PC marker.
2355 } else {
2356 movq(RSP, RBP); // Restore stack pointer.
2357 }
2358 popq(RBP); // Restore frame pointer.
2298 } 2359 }
2299 2360
2300 2361
2301 void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) { 2362 void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) {
2302 // Reserve space for arguments and align frame before entering 2363 // Reserve space for arguments and align frame before entering
2303 // the C++ world. 2364 // the C++ world.
2304 AddImmediate(RSP, Immediate(-frame_space)); 2365 AddImmediate(RSP, Immediate(-frame_space));
2305 if (OS::ActivationFrameAlignment() > 0) { 2366 if (OS::ActivationFrameAlignment() > 0) {
2306 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1))); 2367 andq(RSP, Immediate(~(OS::ActivationFrameAlignment() - 1)));
2307 } 2368 }
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2370 2431
2371 leave(); 2432 leave();
2372 } 2433 }
2373 2434
2374 2435
2375 void Assembler::CallRuntime(const RuntimeEntry& entry) { 2436 void Assembler::CallRuntime(const RuntimeEntry& entry) {
2376 entry.Call(this); 2437 entry.Call(this);
2377 } 2438 }
2378 2439
2379 2440
2441 void Assembler::LoadPoolPointer() {
2442 Label next;
2443 call(&next);
2444 Bind(&next);
2445
2446 // Load new pool pointer.
2447 const intptr_t object_pool_pc_dist =
2448 Instructions::HeaderSize() - Instructions::object_pool_offset() +
2449 CodeSize();
2450 addq(Address(RSP, 0), Immediate(-object_pool_pc_dist));
2451 popq(PP);
2452 movq(PP, Address(PP, 0));
Ivan Posva 2013/08/22 05:26:25 popq(PP); movq(PP, Address(PP, -object_pool_pc_dis
2453 }
2454
2455
2380 void Assembler::EnterDartFrame(intptr_t frame_size) { 2456 void Assembler::EnterDartFrame(intptr_t frame_size) {
2381 EnterFrame(0); 2457 EnterFrame(0);
2382 Label dart_entry; 2458 Label dart_entry;
2383 call(&dart_entry); 2459 call(&dart_entry);
2384 Bind(&dart_entry); 2460 Bind(&dart_entry);
2385 // The runtime system assumes that the code marker address is 2461 // The runtime system assumes that the code marker address is
2386 // kEntryPointToPcMarkerOffset bytes from the entry. If there is any code 2462 // kEntryPointToPcMarkerOffset bytes from the entry. If there is any code
2387 // generated before entering the frame, the address needs to be adjusted. 2463 // generated before entering the frame, the address needs to be adjusted.
2464 const intptr_t object_pool_pc_dist =
2465 Instructions::HeaderSize() - Instructions::object_pool_offset() +
2466 CodeSize();
2388 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); 2467 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize();
2389 if (offset != 0) { 2468 if (offset != 0) {
2390 addq(Address(RSP, 0), Immediate(offset)); 2469 addq(Address(RSP, 0), Immediate(offset));
2391 } 2470 }
2471 // Save caller's pool pointer
2472 pushq(PP);
2473
2474 // Load callee's pool pointer.
2475 movq(PP, Address(RSP, 1 * kWordSize));
2476 addq(PP, Immediate(-object_pool_pc_dist - offset));
2477 movq(PP, Address(PP, 0));
Ivan Posva 2013/08/22 05:26:25 Wouldn't this be shorter? movq(PP, Address(RSP, 1*
2478
2392 if (frame_size != 0) { 2479 if (frame_size != 0) {
2393 subq(RSP, Immediate(frame_size)); 2480 subq(RSP, Immediate(frame_size));
2394 } 2481 }
2395 } 2482 }
2396 2483
2397 2484
2398 // On entry to a function compiled for OSR, the caller's frame pointer, the 2485 // On entry to a function compiled for OSR, the caller's frame pointer, the
2399 // stack locals, and any copied parameters are already in place. The frame 2486 // stack locals, and any copied parameters are already in place. The frame
2400 // pointer is already set up. The PC marker is not correct for the 2487 // pointer is already set up. The PC marker is not correct for the
2401 // optimized function and there may be extra space for spill slots to 2488 // optimized function and there may be extra space for spill slots to
2402 // allocate. 2489 // allocate.
2403 void Assembler::EnterOsrFrame(intptr_t extra_size) { 2490 void Assembler::EnterOsrFrame(intptr_t extra_size) {
2404 Label dart_entry; 2491 Label dart_entry;
2405 call(&dart_entry); 2492 call(&dart_entry);
2406 Bind(&dart_entry); 2493 Bind(&dart_entry);
2407 // The runtime system assumes that the code marker address is 2494 // The runtime system assumes that the code marker address is
2408 // kEntryPointToPcMarkerOffset bytes from the entry. Since there is no 2495 // kEntryPointToPcMarkerOffset bytes from the entry. Since there is no
2409 // code to set up the frame pointer, the address needs to be adjusted. 2496 // code to set up the frame pointer, the address needs to be adjusted.
2497 const intptr_t object_pool_pc_dist =
2498 Instructions::HeaderSize() - Instructions::object_pool_offset() +
2499 CodeSize();
2410 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); 2500 const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize();
2411 if (offset != 0) { 2501 if (offset != 0) {
2412 addq(Address(RSP, 0), Immediate(offset)); 2502 addq(Address(RSP, 0), Immediate(offset));
2413 } 2503 }
2504
2505 // Load callee's pool pointer.
2506 movq(PP, Address(RSP, 0));
2507 addq(PP, Immediate(-object_pool_pc_dist - offset));
2508 movq(PP, Address(PP, 0));
2509
2414 popq(Address(RBP, kPcMarkerSlotFromFp * kWordSize)); 2510 popq(Address(RBP, kPcMarkerSlotFromFp * kWordSize));
2511
2415 if (extra_size != 0) { 2512 if (extra_size != 0) {
2416 subq(RSP, Immediate(extra_size)); 2513 subq(RSP, Immediate(extra_size));
2417 } 2514 }
2418 } 2515 }
2419 2516
2420 2517
2421 void Assembler::EnterStubFrame() { 2518 void Assembler::EnterStubFrame(bool save_pp) {
2422 EnterFrame(0); 2519 if (save_pp) {
2423 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. 2520 EnterFrame(0);
2521 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames.
2522 pushq(PP); // Save caller's pool pointer
2523 LoadPoolPointer();
2524 } else {
2525 EnterFrame(0);
2526 pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames.
2527 }
2424 } 2528 }
2425 2529
2426 2530
2427 void Assembler::TryAllocate(const Class& cls, 2531 void Assembler::TryAllocate(const Class& cls,
2428 Label* failure, 2532 Label* failure,
2429 bool near_jump, 2533 bool near_jump,
2430 Register instance_reg) { 2534 Register instance_reg) {
2431 ASSERT(failure != NULL); 2535 ASSERT(failure != NULL);
2432 if (FLAG_inline_alloc) { 2536 if (FLAG_inline_alloc) {
2433 Heap* heap = Isolate::Current()->heap(); 2537 Heap* heap = Isolate::Current()->heap();
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after
2643 2747
2644 const char* Assembler::FpuRegisterName(FpuRegister reg) { 2748 const char* Assembler::FpuRegisterName(FpuRegister reg) {
2645 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters)); 2749 ASSERT((0 <= reg) && (reg < kNumberOfXmmRegisters));
2646 return xmm_reg_names[reg]; 2750 return xmm_reg_names[reg];
2647 } 2751 }
2648 2752
2649 2753
2650 } // namespace dart 2754 } // namespace dart
2651 2755
2652 #endif // defined TARGET_ARCH_X64 2756 #endif // defined TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « runtime/vm/assembler_x64.h ('k') | runtime/vm/code_patcher_x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698