Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(990)

Side by Side Diff: runtime/vm/intermediate_language_x64.cc

Issue 1263513002: VM: Load allocation-top and -end via Thread. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: arm, arm64 and mips Created 5 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64.
6 #if defined(TARGET_ARCH_X64) 6 #if defined(TARGET_ARCH_X64)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/dart_entry.h" 10 #include "vm/dart_entry.h"
(...skipping 1667 matching lines...) Expand 10 before | Expand all | Expand 10 after
1678 RawPcDescriptors::kOther, 1678 RawPcDescriptors::kOther,
1679 locs); 1679 locs);
1680 __ MoveRegister(result_, RAX); 1680 __ MoveRegister(result_, RAX);
1681 compiler->RestoreLiveRegisters(locs); 1681 compiler->RestoreLiveRegisters(locs);
1682 __ jmp(exit_label()); 1682 __ jmp(exit_label());
1683 } 1683 }
1684 1684
1685 static void Allocate(FlowGraphCompiler* compiler, 1685 static void Allocate(FlowGraphCompiler* compiler,
1686 Instruction* instruction, 1686 Instruction* instruction,
1687 const Class& cls, 1687 const Class& cls,
1688 Register result) { 1688 Register result,
1689 Register temp) {
1689 if (compiler->intrinsic_mode()) { 1690 if (compiler->intrinsic_mode()) {
1690 __ TryAllocate(cls, 1691 __ TryAllocate(cls,
1691 compiler->intrinsic_slow_path_label(), 1692 compiler->intrinsic_slow_path_label(),
1692 Assembler::kFarJump, 1693 Assembler::kFarJump,
1693 result, 1694 result,
1694 PP); 1695 PP,
1696 temp);
1695 } else { 1697 } else {
1696 BoxAllocationSlowPath* slow_path = 1698 BoxAllocationSlowPath* slow_path =
1697 new BoxAllocationSlowPath(instruction, cls, result); 1699 new BoxAllocationSlowPath(instruction, cls, result);
1698 compiler->AddSlowPathCode(slow_path); 1700 compiler->AddSlowPathCode(slow_path);
1699 1701
1700 __ TryAllocate(cls, 1702 __ TryAllocate(cls,
1701 slow_path->entry_label(), 1703 slow_path->entry_label(),
1702 Assembler::kFarJump, 1704 Assembler::kFarJump,
1703 result, 1705 result,
1704 PP); 1706 PP,
1707 temp);
1705 __ Bind(slow_path->exit_label()); 1708 __ Bind(slow_path->exit_label());
1706 } 1709 }
1707 } 1710 }
1708 1711
1709 private: 1712 private:
1710 Instruction* instruction_; 1713 Instruction* instruction_;
1711 const Class& cls_; 1714 const Class& cls_;
1712 const Register result_; 1715 const Register result_;
1713 }; 1716 };
1714 1717
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
1752 StoreInstanceFieldInstr* instruction, 1755 StoreInstanceFieldInstr* instruction,
1753 Register box_reg, 1756 Register box_reg,
1754 const Class& cls, 1757 const Class& cls,
1755 Register instance_reg, 1758 Register instance_reg,
1756 intptr_t offset, 1759 intptr_t offset,
1757 Register temp) { 1760 Register temp) {
1758 Label done; 1761 Label done;
1759 __ movq(box_reg, FieldAddress(instance_reg, offset)); 1762 __ movq(box_reg, FieldAddress(instance_reg, offset));
1760 __ CompareObject(box_reg, Object::null_object(), PP); 1763 __ CompareObject(box_reg, Object::null_object(), PP);
1761 __ j(NOT_EQUAL, &done); 1764 __ j(NOT_EQUAL, &done);
1762 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg); 1765 BoxAllocationSlowPath::Allocate(compiler, instruction, cls, box_reg, temp);
1763 __ movq(temp, box_reg); 1766 __ movq(temp, box_reg);
1764 __ StoreIntoObject(instance_reg, 1767 __ StoreIntoObject(instance_reg,
1765 FieldAddress(instance_reg, offset), 1768 FieldAddress(instance_reg, offset),
1766 temp); 1769 temp);
1767 1770
1768 __ Bind(&done); 1771 __ Bind(&done);
1769 } 1772 }
1770 1773
1771 1774
1772 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1775 void StoreInstanceFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
(...skipping 17 matching lines...) Expand all
1790 case kFloat32x4Cid: 1793 case kFloat32x4Cid:
1791 cls = &compiler->float32x4_class(); 1794 cls = &compiler->float32x4_class();
1792 break; 1795 break;
1793 case kFloat64x2Cid: 1796 case kFloat64x2Cid:
1794 cls = &compiler->float64x2_class(); 1797 cls = &compiler->float64x2_class();
1795 break; 1798 break;
1796 default: 1799 default:
1797 UNREACHABLE(); 1800 UNREACHABLE();
1798 } 1801 }
1799 1802
1800 BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp); 1803 BoxAllocationSlowPath::Allocate(compiler, this, *cls, temp, temp2);
1801 __ movq(temp2, temp); 1804 __ movq(temp2, temp);
1802 __ StoreIntoObject(instance_reg, 1805 __ StoreIntoObject(instance_reg,
1803 FieldAddress(instance_reg, offset_in_bytes_), 1806 FieldAddress(instance_reg, offset_in_bytes_),
1804 temp2); 1807 temp2);
1805 } else { 1808 } else {
1806 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_)); 1809 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes_));
1807 } 1810 }
1808 switch (cid) { 1811 switch (cid) {
1809 case kDoubleCid: 1812 case kDoubleCid:
1810 __ Comment("UnboxedDoubleStoreInstanceFieldInstr"); 1813 __ Comment("UnboxedDoubleStoreInstanceFieldInstr");
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after
2043 intptr_t num_elements, 2046 intptr_t num_elements,
2044 Label* slow_path, 2047 Label* slow_path,
2045 Label* done) { 2048 Label* done) {
2046 const int kInlineArraySize = 12; // Same as kInlineInstanceSize. 2049 const int kInlineArraySize = 12; // Same as kInlineInstanceSize.
2047 const Register kLengthReg = R10; 2050 const Register kLengthReg = R10;
2048 const Register kElemTypeReg = RBX; 2051 const Register kElemTypeReg = RBX;
2049 const intptr_t instance_size = Array::InstanceSize(num_elements); 2052 const intptr_t instance_size = Array::InstanceSize(num_elements);
2050 2053
2051 __ TryAllocateArray(kArrayCid, instance_size, slow_path, Assembler::kFarJump, 2054 __ TryAllocateArray(kArrayCid, instance_size, slow_path, Assembler::kFarJump,
2052 RAX, // instance 2055 RAX, // instance
2053 RCX); // end address 2056 RCX, // end address
2057 R13); // temp
2054 2058
2055 // RAX: new object start as a tagged pointer. 2059 // RAX: new object start as a tagged pointer.
2056 // Store the type argument field. 2060 // Store the type argument field.
2057 __ InitializeFieldNoBarrier(RAX, 2061 __ InitializeFieldNoBarrier(RAX,
2058 FieldAddress(RAX, Array::type_arguments_offset()), 2062 FieldAddress(RAX, Array::type_arguments_offset()),
2059 kElemTypeReg); 2063 kElemTypeReg);
2060 2064
2061 // Set the length field. 2065 // Set the length field.
2062 __ InitializeFieldNoBarrier(RAX, 2066 __ InitializeFieldNoBarrier(RAX,
2063 FieldAddress(RAX, Array::length_offset()), 2067 FieldAddress(RAX, Array::length_offset()),
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
2219 // Fall through. 2223 // Fall through.
2220 __ jmp(&load_pointer); 2224 __ jmp(&load_pointer);
2221 2225
2222 if (!compiler->is_optimizing()) { 2226 if (!compiler->is_optimizing()) {
2223 locs()->live_registers()->Add(locs()->in(0)); 2227 locs()->live_registers()->Add(locs()->in(0));
2224 } 2228 }
2225 2229
2226 { 2230 {
2227 __ Bind(&load_double); 2231 __ Bind(&load_double);
2228 BoxAllocationSlowPath::Allocate( 2232 BoxAllocationSlowPath::Allocate(
2229 compiler, this, compiler->double_class(), result); 2233 compiler, this, compiler->double_class(), result, temp);
2230 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); 2234 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2231 __ movsd(value, FieldAddress(temp, Double::value_offset())); 2235 __ movsd(value, FieldAddress(temp, Double::value_offset()));
2232 __ movsd(FieldAddress(result, Double::value_offset()), value); 2236 __ movsd(FieldAddress(result, Double::value_offset()), value);
2233 __ jmp(&done); 2237 __ jmp(&done);
2234 } 2238 }
2235 2239
2236 { 2240 {
2237 __ Bind(&load_float32x4); 2241 __ Bind(&load_float32x4);
2238 BoxAllocationSlowPath::Allocate( 2242 BoxAllocationSlowPath::Allocate(
2239 compiler, this, compiler->float32x4_class(), result); 2243 compiler, this, compiler->float32x4_class(), result, temp);
2240 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); 2244 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2241 __ movups(value, FieldAddress(temp, Float32x4::value_offset())); 2245 __ movups(value, FieldAddress(temp, Float32x4::value_offset()));
2242 __ movups(FieldAddress(result, Float32x4::value_offset()), value); 2246 __ movups(FieldAddress(result, Float32x4::value_offset()), value);
2243 __ jmp(&done); 2247 __ jmp(&done);
2244 } 2248 }
2245 2249
2246 { 2250 {
2247 __ Bind(&load_float64x2); 2251 __ Bind(&load_float64x2);
2248 BoxAllocationSlowPath::Allocate( 2252 BoxAllocationSlowPath::Allocate(
2249 compiler, this, compiler->float64x2_class(), result); 2253 compiler, this, compiler->float64x2_class(), result, temp);
2250 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes())); 2254 __ movq(temp, FieldAddress(instance_reg, offset_in_bytes()));
2251 __ movups(value, FieldAddress(temp, Float64x2::value_offset())); 2255 __ movups(value, FieldAddress(temp, Float64x2::value_offset()));
2252 __ movups(FieldAddress(result, Float64x2::value_offset()), value); 2256 __ movups(FieldAddress(result, Float64x2::value_offset()), value);
2253 __ jmp(&done); 2257 __ jmp(&done);
2254 } 2258 }
2255 2259
2256 __ Bind(&load_pointer); 2260 __ Bind(&load_pointer);
2257 } 2261 }
2258 __ movq(result, FieldAddress(instance_reg, offset_in_bytes())); 2262 __ movq(result, FieldAddress(instance_reg, offset_in_bytes()));
2259 __ Bind(&done); 2263 __ Bind(&done);
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
2362 __ Bind(&type_arguments_instantiated); 2366 __ Bind(&type_arguments_instantiated);
2363 ASSERT(instantiator_reg == result_reg); 2367 ASSERT(instantiator_reg == result_reg);
2364 } 2368 }
2365 2369
2366 2370
2367 LocationSummary* AllocateUninitializedContextInstr::MakeLocationSummary( 2371 LocationSummary* AllocateUninitializedContextInstr::MakeLocationSummary(
2368 Zone* zone, 2372 Zone* zone,
2369 bool opt) const { 2373 bool opt) const {
2370 ASSERT(opt); 2374 ASSERT(opt);
2371 const intptr_t kNumInputs = 0; 2375 const intptr_t kNumInputs = 0;
2372 const intptr_t kNumTemps = 1; 2376 const intptr_t kNumTemps = 2;
2373 LocationSummary* locs = new(zone) LocationSummary( 2377 LocationSummary* locs = new(zone) LocationSummary(
2374 zone, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); 2378 zone, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath);
2375 locs->set_temp(0, Location::RegisterLocation(R10)); 2379 locs->set_temp(0, Location::RegisterLocation(R10));
2380 locs->set_temp(1, Location::RegisterLocation(R13));
2376 locs->set_out(0, Location::RegisterLocation(RAX)); 2381 locs->set_out(0, Location::RegisterLocation(RAX));
2377 return locs; 2382 return locs;
2378 } 2383 }
2379 2384
2380 2385
2381 class AllocateContextSlowPath : public SlowPathCode { 2386 class AllocateContextSlowPath : public SlowPathCode {
2382 public: 2387 public:
2383 explicit AllocateContextSlowPath( 2388 explicit AllocateContextSlowPath(
2384 AllocateUninitializedContextInstr* instruction) 2389 AllocateUninitializedContextInstr* instruction)
2385 : instruction_(instruction) { } 2390 : instruction_(instruction) { }
(...skipping 29 matching lines...) Expand all
2415 Register temp = locs()->temp(0).reg(); 2420 Register temp = locs()->temp(0).reg();
2416 Register result = locs()->out(0).reg(); 2421 Register result = locs()->out(0).reg();
2417 // Try allocate the object. 2422 // Try allocate the object.
2418 AllocateContextSlowPath* slow_path = new AllocateContextSlowPath(this); 2423 AllocateContextSlowPath* slow_path = new AllocateContextSlowPath(this);
2419 compiler->AddSlowPathCode(slow_path); 2424 compiler->AddSlowPathCode(slow_path);
2420 intptr_t instance_size = Context::InstanceSize(num_context_variables()); 2425 intptr_t instance_size = Context::InstanceSize(num_context_variables());
2421 2426
2422 __ TryAllocateArray(kContextCid, instance_size, slow_path->entry_label(), 2427 __ TryAllocateArray(kContextCid, instance_size, slow_path->entry_label(),
2423 Assembler::kFarJump, 2428 Assembler::kFarJump,
2424 result, // instance 2429 result, // instance
2425 temp); // end address 2430 temp, // end address
2431 locs()->temp(1).reg());
2426 2432
2427 // Setup up number of context variables field. 2433 // Setup up number of context variables field.
2428 __ movq(FieldAddress(result, Context::num_variables_offset()), 2434 __ movq(FieldAddress(result, Context::num_variables_offset()),
2429 Immediate(num_context_variables())); 2435 Immediate(num_context_variables()));
2430 2436
2431 __ Bind(slow_path->exit_label()); 2437 __ Bind(slow_path->exit_label());
2432 } 2438 }
2433 2439
2434 2440
2435 LocationSummary* AllocateContextInstr::MakeLocationSummary(Zone* zone, 2441 LocationSummary* AllocateContextInstr::MakeLocationSummary(Zone* zone,
(...skipping 820 matching lines...) Expand 10 before | Expand all | Expand 10 after
3256 __ orq(temp, right); 3262 __ orq(temp, right);
3257 __ testq(temp, Immediate(kSmiTagMask)); 3263 __ testq(temp, Immediate(kSmiTagMask));
3258 } 3264 }
3259 __ j(ZERO, deopt); 3265 __ j(ZERO, deopt);
3260 } 3266 }
3261 3267
3262 3268
3263 LocationSummary* BoxInstr::MakeLocationSummary(Zone* zone, 3269 LocationSummary* BoxInstr::MakeLocationSummary(Zone* zone,
3264 bool opt) const { 3270 bool opt) const {
3265 const intptr_t kNumInputs = 1; 3271 const intptr_t kNumInputs = 1;
3266 const intptr_t kNumTemps = 0; 3272 const intptr_t kNumTemps = 1;
3267 LocationSummary* summary = new(zone) LocationSummary( 3273 LocationSummary* summary = new(zone) LocationSummary(
3268 zone, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); 3274 zone, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath);
3269 summary->set_in(0, Location::RequiresFpuRegister()); 3275 summary->set_in(0, Location::RequiresFpuRegister());
3276 summary->set_temp(0, Location::RequiresRegister());
3270 summary->set_out(0, Location::RequiresRegister()); 3277 summary->set_out(0, Location::RequiresRegister());
3271 return summary; 3278 return summary;
3272 } 3279 }
3273 3280
3274 3281
3275 void BoxInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 3282 void BoxInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
3276 Register out_reg = locs()->out(0).reg(); 3283 Register out_reg = locs()->out(0).reg();
3284 Register temp = locs()->temp(0).reg();
3277 XmmRegister value = locs()->in(0).fpu_reg(); 3285 XmmRegister value = locs()->in(0).fpu_reg();
3278 3286
3279 BoxAllocationSlowPath::Allocate( 3287 BoxAllocationSlowPath::Allocate(
3280 compiler, this, compiler->BoxClassFor(from_representation()), out_reg); 3288 compiler, this, compiler->BoxClassFor(from_representation()), out_reg,
3289 temp);
3281 __ movsd(FieldAddress(out_reg, Double::value_offset()), value); 3290 __ movsd(FieldAddress(out_reg, Double::value_offset()), value);
3282 switch (from_representation()) { 3291 switch (from_representation()) {
3283 case kUnboxedDouble: 3292 case kUnboxedDouble:
3284 __ movsd(FieldAddress(out_reg, ValueOffset()), value); 3293 __ movsd(FieldAddress(out_reg, ValueOffset()), value);
3285 break; 3294 break;
3286 case kUnboxedFloat32x4: 3295 case kUnboxedFloat32x4:
3287 case kUnboxedFloat64x2: 3296 case kUnboxedFloat64x2:
3288 case kUnboxedInt32x4: 3297 case kUnboxedInt32x4:
3289 __ movups(FieldAddress(out_reg, ValueOffset()), value); 3298 __ movups(FieldAddress(out_reg, ValueOffset()), value);
3290 break; 3299 break;
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after
3499 ASSERT(from_representation() == kUnboxedUint32); 3508 ASSERT(from_representation() == kUnboxedUint32);
3500 __ movl(out, value); 3509 __ movl(out, value);
3501 } 3510 }
3502 __ SmiTag(out); 3511 __ SmiTag(out);
3503 } 3512 }
3504 3513
3505 3514
3506 LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone, 3515 LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
3507 bool opt) const { 3516 bool opt) const {
3508 const intptr_t kNumInputs = 1; 3517 const intptr_t kNumInputs = 1;
3509 const intptr_t kNumTemps = 0; 3518 const intptr_t kNumTemps = ValueFitsSmi() ? 0 : 1;
3510 LocationSummary* summary = new(zone) LocationSummary( 3519 LocationSummary* summary = new(zone) LocationSummary(
3511 zone, 3520 zone,
3512 kNumInputs, 3521 kNumInputs,
3513 kNumTemps, 3522 kNumTemps,
3514 ValueFitsSmi() ? LocationSummary::kNoCall 3523 ValueFitsSmi() ? LocationSummary::kNoCall
3515 : LocationSummary::kCallOnSlowPath); 3524 : LocationSummary::kCallOnSlowPath);
3516 summary->set_in(0, Location::RequiresRegister()); 3525 summary->set_in(0, Location::RequiresRegister());
3526 if (!ValueFitsSmi()) {
3527 summary->set_temp(0, Location::RequiresRegister());
3528 }
3517 summary->set_out(0, Location::RequiresRegister()); 3529 summary->set_out(0, Location::RequiresRegister());
3518 return summary; 3530 return summary;
3519 } 3531 }
3520 3532
3521 3533
3522 void BoxInt64Instr::EmitNativeCode(FlowGraphCompiler* compiler) { 3534 void BoxInt64Instr::EmitNativeCode(FlowGraphCompiler* compiler) {
3523 const Register out = locs()->out(0).reg(); 3535 const Register out = locs()->out(0).reg();
3524 const Register value = locs()->in(0).reg(); 3536 const Register value = locs()->in(0).reg();
3537 const Register temp = locs()->temp(0).reg();
3525 __ MoveRegister(out, value); 3538 __ MoveRegister(out, value);
3526 __ SmiTag(out); 3539 __ SmiTag(out);
3527 if (!ValueFitsSmi()) { 3540 if (!ValueFitsSmi()) {
3528 Label done; 3541 Label done;
3529 __ j(NO_OVERFLOW, &done); 3542 __ j(NO_OVERFLOW, &done);
3530 BoxAllocationSlowPath::Allocate( 3543 BoxAllocationSlowPath::Allocate(
3531 compiler, this, compiler->mint_class(), out); 3544 compiler, this, compiler->mint_class(), out, temp);
3532 __ movq(FieldAddress(out, Mint::value_offset()), value); 3545 __ movq(FieldAddress(out, Mint::value_offset()), value);
3533 __ Bind(&done); 3546 __ Bind(&done);
3534 } 3547 }
3535 } 3548 }
3536 3549
3537 3550
3538 LocationSummary* BinaryDoubleOpInstr::MakeLocationSummary(Zone* zone, 3551 LocationSummary* BinaryDoubleOpInstr::MakeLocationSummary(Zone* zone,
3539 bool opt) const { 3552 bool opt) const {
3540 const intptr_t kNumInputs = 2; 3553 const intptr_t kNumInputs = 2;
3541 const intptr_t kNumTemps = 0; 3554 const intptr_t kNumTemps = 0;
(...skipping 2880 matching lines...) Expand 10 before | Expand all | Expand 10 after
6422 __ Drop(1); 6435 __ Drop(1);
6423 __ popq(result); 6436 __ popq(result);
6424 } 6437 }
6425 6438
6426 6439
6427 } // namespace dart 6440 } // namespace dart
6428 6441
6429 #undef __ 6442 #undef __
6430 6443
6431 #endif // defined TARGET_ARCH_X64 6444 #endif // defined TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698