Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(529)

Side by Side Diff: src/a64/code-stubs-a64.cc

Issue 136643008: A64: Synchronize with r18256. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/code-stubs-a64.h ('k') | src/a64/codegen-a64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 199 matching lines...) Expand 10 before | Expand all | Expand 10 after
210 static Register registers[] = { x0 }; 210 static Register registers[] = { x0 };
211 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); 211 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]);
212 descriptor->register_params_ = registers; 212 descriptor->register_params_ = registers;
213 descriptor->deoptimization_handler_ = 213 descriptor->deoptimization_handler_ =
214 FUNCTION_ADDR(CompareNilIC_Miss); 214 FUNCTION_ADDR(CompareNilIC_Miss);
215 descriptor->SetMissHandler( 215 descriptor->SetMissHandler(
216 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate)); 216 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
217 } 217 }
218 218
219 219
220 void BinaryOpStub::InitializeInterfaceDescriptor( 220 void BinaryOpICStub::InitializeInterfaceDescriptor(
221 Isolate* isolate, 221 Isolate* isolate,
222 CodeStubInterfaceDescriptor* descriptor) { 222 CodeStubInterfaceDescriptor* descriptor) {
223 // x1: left operand 223 // x1: left operand
224 // x0: right operand 224 // x0: right operand
225 static Register registers[] = { x1, x0 }; 225 static Register registers[] = { x1, x0 };
226 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]); 226 descriptor->register_param_count_ = sizeof(registers) / sizeof(registers[0]);
227 descriptor->register_params_ = registers; 227 descriptor->register_params_ = registers;
228 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss); 228 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
229 descriptor->SetMissHandler( 229 descriptor->SetMissHandler(
230 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate)); 230 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
(...skipping 788 matching lines...) Expand 10 before | Expand all | Expand 10 after
1019 __ PopCPURegList(kCallerSavedFP); 1019 __ PopCPURegList(kCallerSavedFP);
1020 } 1020 }
1021 __ PopCPURegList(saved_regs); 1021 __ PopCPURegList(saved_regs);
1022 __ Ret(); 1022 __ Ret();
1023 } 1023 }
1024 1024
1025 1025
1026 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( 1026 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
1027 Isolate* isolate) { 1027 Isolate* isolate) {
1028 StoreBufferOverflowStub stub1(kDontSaveFPRegs); 1028 StoreBufferOverflowStub stub1(kDontSaveFPRegs);
1029 stub1.GetCode(isolate)->set_is_pregenerated(true); 1029 stub1.GetCode(isolate);
1030 StoreBufferOverflowStub stub2(kSaveFPRegs); 1030 StoreBufferOverflowStub stub2(kSaveFPRegs);
1031 stub2.GetCode(isolate)->set_is_pregenerated(true); 1031 stub2.GetCode(isolate);
1032 } 1032 }
1033 1033
1034 1034
1035 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { 1035 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
1036 // Untagged case: 1036 // Untagged case:
1037 // Input: double in d0 1037 // Input: double in d0
1038 // Result: double in d0 1038 // Result: double in d0
1039 // 1039 //
1040 // Tagged case: 1040 // Tagged case:
1041 // Input: tagged value in jssp[0] 1041 // Input: tagged value in jssp[0]
(...skipping 181 matching lines...) Expand 10 before | Expand all | Expand 10 after
1223 } 1223 }
1224 1224
1225 1225
1226 ExternalReference TranscendentalCacheStub::CFunction(Isolate* isolate) { 1226 ExternalReference TranscendentalCacheStub::CFunction(Isolate* isolate) {
1227 switch (type_) { 1227 switch (type_) {
1228 // Add more cases when necessary. 1228 // Add more cases when necessary.
1229 default: 1229 default:
1230 // There's no NULL ExternalReference, so fall into an existing case to 1230 // There's no NULL ExternalReference, so fall into an existing case to
1231 // avoid compiler warnings about not having a return value. 1231 // avoid compiler warnings about not having a return value.
1232 UNIMPLEMENTED(); 1232 UNIMPLEMENTED();
1233 case TranscendentalCache::SIN:
1234 return ExternalReference::math_sin_double_function(isolate);
1235 case TranscendentalCache::COS:
1236 return ExternalReference::math_cos_double_function(isolate);
1237 case TranscendentalCache::TAN:
1238 return ExternalReference::math_tan_double_function(isolate);
1239 case TranscendentalCache::LOG: 1233 case TranscendentalCache::LOG:
1240 return ExternalReference::math_log_double_function(isolate); 1234 return ExternalReference::math_log_double_function(isolate);
1241 } 1235 }
1242 } 1236 }
1243 1237
1244 1238
1245 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { 1239 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1246 switch (type_) { 1240 switch (type_) {
1247 // Add more cases when necessary. 1241 // Add more cases when necessary.
1248 case TranscendentalCache::SIN: return Runtime::kMath_sin;
1249 case TranscendentalCache::COS: return Runtime::kMath_cos;
1250 case TranscendentalCache::TAN: return Runtime::kMath_tan;
1251 case TranscendentalCache::LOG: return Runtime::kMath_log; 1242 case TranscendentalCache::LOG: return Runtime::kMath_log;
1252 default: 1243 default:
1253 UNIMPLEMENTED(); 1244 UNIMPLEMENTED();
1254 return Runtime::kAbort; 1245 return Runtime::kAbort;
1255 } 1246 }
1256 } 1247 }
1257 1248
1258 1249
1259 void MathPowStub::Generate(MacroAssembler* masm) { 1250 void MathPowStub::Generate(MacroAssembler* masm) {
1260 // Stack on entry: 1251 // Stack on entry:
(...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after
1509 1500
1510 1501
1511 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 1502 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1512 // It is important that the following stubs are generated in this order 1503 // It is important that the following stubs are generated in this order
1513 // because pregenerated stubs can only call other pregenerated stubs. 1504 // because pregenerated stubs can only call other pregenerated stubs.
1514 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses 1505 // RecordWriteStub uses StoreBufferOverflowStub, which in turn uses
1515 // CEntryStub. 1506 // CEntryStub.
1516 CEntryStub::GenerateAheadOfTime(isolate); 1507 CEntryStub::GenerateAheadOfTime(isolate);
1517 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 1508 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1518 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 1509 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1519 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
1520 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 1510 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1521 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 1511 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
1522 BinaryOpStub::GenerateAheadOfTime(isolate); 1512 BinaryOpICStub::GenerateAheadOfTime(isolate);
1523 } 1513 }
1524 1514
1525 1515
1526 void CodeStub::GenerateFPStubs(Isolate* isolate) { 1516 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1527 // Floating-point code doesn't get special handling in A64, so there's 1517 // Floating-point code doesn't get special handling in A64, so there's
1528 // nothing to do here. 1518 // nothing to do here.
1529 USE(isolate); 1519 USE(isolate);
1530 } 1520 }
1531 1521
1532 1522
(...skipping 15 matching lines...) Expand all
1548 // when the C++ code returns to the stub because LR holds the return address 1538 // when the C++ code returns to the stub because LR holds the return address
1549 // in AAPCS64. If the stub is moved (perhaps during a GC), we could end up 1539 // in AAPCS64. If the stub is moved (perhaps during a GC), we could end up
1550 // returning to dead code. 1540 // returning to dead code.
1551 // TODO(jbramley): Whilst this is the only analysis that makes sense, I can't 1541 // TODO(jbramley): Whilst this is the only analysis that makes sense, I can't
1552 // find any comment to confirm this, and I don't hit any crashes whatever 1542 // find any comment to confirm this, and I don't hit any crashes whatever
1553 // this function returns. The anaylsis should be properly confirmed. 1543 // this function returns. The anaylsis should be properly confirmed.
1554 return true; 1544 return true;
1555 } 1545 }
1556 1546
1557 1547
1558 bool CEntryStub::IsPregenerated(Isolate* isolate) {
1559 USE(isolate);
1560 return result_size_ == 1;
1561 }
1562
1563
1564 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 1548 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1565 CEntryStub stub(1, kDontSaveFPRegs); 1549 CEntryStub stub(1, kDontSaveFPRegs);
1566 stub.GetCode(isolate)->set_is_pregenerated(true); 1550 stub.GetCode(isolate);
1567 CEntryStub stub_fp(1, kSaveFPRegs); 1551 CEntryStub stub_fp(1, kSaveFPRegs);
1568 stub_fp.GetCode(isolate)->set_is_pregenerated(true); 1552 stub_fp.GetCode(isolate);
1569 } 1553 }
1570 1554
1571 1555
1572 void CEntryStub::GenerateCore(MacroAssembler* masm, 1556 void CEntryStub::GenerateCore(MacroAssembler* masm,
1573 Label* throw_normal, 1557 Label* throw_normal,
1574 Label* throw_termination, 1558 Label* throw_termination,
1575 Label* throw_out_of_memory, 1559 Label* throw_out_of_memory,
1576 bool do_gc, 1560 bool do_gc,
1577 bool always_allocate) { 1561 bool always_allocate) {
1578 // x0 : Result parameter for PerformGC, if do_gc is true. 1562 // x0 : Result parameter for PerformGC, if do_gc is true.
(...skipping 3409 matching lines...) Expand 10 before | Expand all | Expand 10 after
4988 __ Cbz(x3, &skip_write_barrier); 4972 __ Cbz(x3, &skip_write_barrier);
4989 4973
4990 __ Str(left, FieldMemOperand(result, ConsString::kFirstOffset)); 4974 __ Str(left, FieldMemOperand(result, ConsString::kFirstOffset));
4991 __ RecordWriteField(result, 4975 __ RecordWriteField(result,
4992 ConsString::kFirstOffset, 4976 ConsString::kFirstOffset,
4993 left, 4977 left,
4994 x3, 4978 x3,
4995 kLRHasNotBeenSaved, 4979 kLRHasNotBeenSaved,
4996 kDontSaveFPRegs, 4980 kDontSaveFPRegs,
4997 EMIT_REMEMBERED_SET, 4981 EMIT_REMEMBERED_SET,
4998 INLINE_SMI_CHECK, 4982 INLINE_SMI_CHECK);
4999 EXPECT_PREGENERATED);
5000 __ Str(right, FieldMemOperand(result, ConsString::kSecondOffset)); 4983 __ Str(right, FieldMemOperand(result, ConsString::kSecondOffset));
5001 __ RecordWriteField(result, 4984 __ RecordWriteField(result,
5002 ConsString::kSecondOffset, 4985 ConsString::kSecondOffset,
5003 right, 4986 right,
5004 x3, 4987 x3,
5005 kLRHasNotBeenSaved, 4988 kLRHasNotBeenSaved,
5006 kDontSaveFPRegs, 4989 kDontSaveFPRegs,
5007 EMIT_REMEMBERED_SET, 4990 EMIT_REMEMBERED_SET,
5008 INLINE_SMI_CHECK, 4991 INLINE_SMI_CHECK);
5009 EXPECT_PREGENERATED);
5010 __ B(&after_writing); 4992 __ B(&after_writing);
5011 __ Bind(&skip_write_barrier); 4993 __ Bind(&skip_write_barrier);
5012 4994
5013 __ Str(left, FieldMemOperand(result, ConsString::kFirstOffset)); 4995 __ Str(left, FieldMemOperand(result, ConsString::kFirstOffset));
5014 __ Str(right, FieldMemOperand(result, ConsString::kSecondOffset)); 4996 __ Str(right, FieldMemOperand(result, ConsString::kSecondOffset));
5015 __ Bind(&after_writing); 4997 __ Bind(&after_writing);
5016 4998
5017 __ IncrementCounter(counters->string_add_native(), 1, x3, x4); 4999 __ IncrementCounter(counters->string_add_native(), 1, x3, x4);
5018 __ Ret(); 5000 __ Ret();
5019 5001
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
5173 5155
5174 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) { 5156 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
5175 __ Push(x0, x1); 5157 __ Push(x0, x1);
5176 } 5158 }
5177 5159
5178 5160
5179 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm) { 5161 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm) {
5180 __ Pop(x1, x0); 5162 __ Pop(x1, x0);
5181 } 5163 }
5182 5164
5183 #define MINOR_KEY_FOR(obj, value, addr, action, fp_mode) \
5184 ((obj) | ((value) << 5) | ((addr) << 10) | ((action) << 15) | \
5185 ((fp_mode) << 16))
5186
5187 const int RecordWriteStub::kAheadOfTime[] = {
5188 // Arguments to MinorKeyFor() are object, value and address registers.
5189
5190 // Used in StoreArrayLiteralElementStub::Generate.
5191 MINOR_KEY_FOR(10, 0, 11, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5192
5193 // Used in FastNewClosure::Generate.
5194 MINOR_KEY_FOR(5, 4, 1, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5195
5196 // Used in KeyedStoreStubCompiler::GenerateStoreFastElement.
5197 MINOR_KEY_FOR(3, 2, 10, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5198
5199 // Used in KeyedStoreStubCompiler::GenerateStoreFastDoubleElement.
5200 MINOR_KEY_FOR(2, 3, 10, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5201
5202 // Used in ElementsTransitionGenerator::GenerateSmiToDouble.
5203 MINOR_KEY_FOR(2, 3, 6, OMIT_REMEMBERED_SET, kDontSaveFPRegs),
5204 MINOR_KEY_FOR(2, 10, 6, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5205
5206 // Used in ElementsTransitionGenerator::GenerateDoubleToObject.
5207 MINOR_KEY_FOR(7, 5, 13, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5208 MINOR_KEY_FOR(2, 7, 13, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5209 MINOR_KEY_FOR(2, 3, 13, OMIT_REMEMBERED_SET, kDontSaveFPRegs),
5210
5211 // Used in KeyedStoreIC::GenerateGeneric helper function.
5212 MINOR_KEY_FOR(4, 10, 11, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5213
5214 // Used in RegExpExecStub::Generate.
5215 MINOR_KEY_FOR(21, 10, 11, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5216
5217 // Used in StringAddStub::Generate.
5218 MINOR_KEY_FOR(0, 10, 3, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5219 MINOR_KEY_FOR(0, 11, 3, EMIT_REMEMBERED_SET, kDontSaveFPRegs),
5220
5221 // TODO(jbramley): There are many more sites that want a pregenerated
5222 // instance of this stub, but they are currently unimplemented. Once they are
5223 // implemented, they should be added to this list.
5224
5225 // Null termination.
5226 // It is safe to encode this as 0 because the three registers used for
5227 // RecordWriteStub must not be aliased, and 0 represents (x0, x0, x0).
5228 0
5229 };
5230
5231
5232 #undef MINOR_KEY_FOR
5233
5234
5235 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
5236 // Pregenerate all of the stub variants in the kAheadOfTime list.
5237 for (const int* entry = kAheadOfTime; *entry != 0; entry++) {
5238 // kAheadOfTime is a list of minor keys, so extract the relevant fields
5239 // from the minor key.
5240 Register object = Register::XRegFromCode(ObjectBits::decode(*entry));
5241 Register value = Register::XRegFromCode(ValueBits::decode(*entry));
5242 Register address = Register::XRegFromCode(AddressBits::decode(*entry));
5243 RememberedSetAction action = RememberedSetActionBits::decode(*entry);
5244 SaveFPRegsMode fp_mode = SaveFPRegsModeBits::decode(*entry);
5245
5246 RecordWriteStub stub(object, value, address, action, fp_mode);
5247 stub.GetCode(isolate)->set_is_pregenerated(true);
5248 }
5249 }
5250
5251 5165
5252 bool CodeStub::CanUseFPRegisters() { 5166 bool CodeStub::CanUseFPRegisters() {
5253 // FP registers always available on A64. 5167 // FP registers always available on A64.
5254 return true; 5168 return true;
5255 } 5169 }
5256 5170
5257 5171
5258 bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
5259 USE(isolate);
5260 // If the stub exists in the kAheadOfTime list, it is pregenerated.
5261 for (const int* entry = kAheadOfTime; *entry != 0; entry++) {
5262 if (*entry == MinorKeyFor(object_, value_, address_,
5263 remembered_set_action_, save_fp_regs_mode_)) {
5264 return true;
5265 }
5266 }
5267 return false;
5268 }
5269
5270
5271 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 5172 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
5272 // We need some extra registers for this stub, they have been allocated 5173 // We need some extra registers for this stub, they have been allocated
5273 // but we need to save them before using them. 5174 // but we need to save them before using them.
5274 regs_.Save(masm); 5175 regs_.Save(masm);
5275 5176
5276 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { 5177 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5277 Label dont_need_remembered_set; 5178 Label dont_need_remembered_set;
5278 5179
5279 Register value = regs_.scratch0(); 5180 Register value = regs_.scratch0();
5280 __ Ldr(value, MemOperand(regs_.address())); 5181 __ Ldr(value, MemOperand(regs_.address()));
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
5482 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); 5383 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
5483 5384
5484 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object. 5385 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
5485 __ Bind(&fast_elements); 5386 __ Bind(&fast_elements);
5486 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset)); 5387 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset));
5487 __ Add(x11, x10, Operand::UntagSmiAndScale(index_smi, kPointerSizeLog2)); 5388 __ Add(x11, x10, Operand::UntagSmiAndScale(index_smi, kPointerSizeLog2));
5488 __ Add(x11, x11, FixedArray::kHeaderSize - kHeapObjectTag); 5389 __ Add(x11, x11, FixedArray::kHeaderSize - kHeapObjectTag);
5489 __ Str(value, MemOperand(x11)); 5390 __ Str(value, MemOperand(x11));
5490 // Update the write barrier for the array store. 5391 // Update the write barrier for the array store.
5491 __ RecordWrite(x10, x11, value, kLRHasNotBeenSaved, kDontSaveFPRegs, 5392 __ RecordWrite(x10, x11, value, kLRHasNotBeenSaved, kDontSaveFPRegs,
5492 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK, EXPECT_PREGENERATED); 5393 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
5493 __ Ret(); 5394 __ Ret();
5494 5395
5495 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS, 5396 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
5496 // and value is Smi. 5397 // and value is Smi.
5497 __ Bind(&smi_element); 5398 __ Bind(&smi_element);
5498 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset)); 5399 __ Ldr(x10, FieldMemOperand(array, JSObject::kElementsOffset));
5499 __ Add(x11, x10, Operand::UntagSmiAndScale(index_smi, kPointerSizeLog2)); 5400 __ Add(x11, x10, Operand::UntagSmiAndScale(index_smi, kPointerSizeLog2));
5500 __ Str(value, FieldMemOperand(x11, FixedArray::kHeaderSize)); 5401 __ Str(value, FieldMemOperand(x11, FixedArray::kHeaderSize));
5501 __ Ret(); 5402 __ Ret();
5502 5403
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
5541 __ InvokeFunction( 5442 __ InvokeFunction(
5542 x1, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); 5443 x1, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
5543 } 5444 }
5544 5445
5545 5446
5546 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 5447 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
5547 if (masm->isolate()->function_entry_hook() != NULL) { 5448 if (masm->isolate()->function_entry_hook() != NULL) {
5548 // TODO(all): This needs to be reliably consistent with 5449 // TODO(all): This needs to be reliably consistent with
5549 // kReturnAddressDistanceFromFunctionStart in ::Generate. 5450 // kReturnAddressDistanceFromFunctionStart in ::Generate.
5550 Assembler::BlockConstPoolScope no_const_pools(masm); 5451 Assembler::BlockConstPoolScope no_const_pools(masm);
5551 AllowStubCallsScope allow_stub_calls(masm, true);
5552 ProfileEntryHookStub stub; 5452 ProfileEntryHookStub stub;
5553 __ Push(lr); 5453 __ Push(lr);
5554 __ CallStub(&stub); 5454 __ CallStub(&stub);
5555 __ Pop(lr); 5455 __ Pop(lr);
5556 } 5456 }
5557 } 5457 }
5558 5458
5559 5459
5560 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 5460 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
5561 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm); 5461 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
(...skipping 445 matching lines...) Expand 10 before | Expand all | Expand 10 after
6007 template<class T> 5907 template<class T>
6008 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 5908 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
6009 ElementsKind initial_kind = GetInitialFastElementsKind(); 5909 ElementsKind initial_kind = GetInitialFastElementsKind();
6010 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind); 5910 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind);
6011 5911
6012 int to_index = GetSequenceIndexFromFastElementsKind( 5912 int to_index = GetSequenceIndexFromFastElementsKind(
6013 TERMINAL_FAST_ELEMENTS_KIND); 5913 TERMINAL_FAST_ELEMENTS_KIND);
6014 for (int i = 0; i <= to_index; ++i) { 5914 for (int i = 0; i <= to_index; ++i) {
6015 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 5915 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
6016 T stub(kind); 5916 T stub(kind);
6017 stub.GetCode(isolate)->set_is_pregenerated(true); 5917 stub.GetCode(isolate);
6018 if ((AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) || 5918 if ((AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) ||
6019 (!FLAG_track_allocation_sites && 5919 (!FLAG_track_allocation_sites &&
6020 ((kind == initial_kind) || (kind == initial_holey_kind)))) { 5920 ((kind == initial_kind) || (kind == initial_holey_kind)))) {
6021 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES); 5921 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
6022 stub1.GetCode(isolate)->set_is_pregenerated(true); 5922 stub1.GetCode(isolate);
6023 } 5923 }
6024 } 5924 }
6025 } 5925 }
6026 5926
6027 5927
6028 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 5928 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
6029 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 5929 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
6030 isolate); 5930 isolate);
6031 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 5931 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
6032 isolate); 5932 isolate);
6033 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>( 5933 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
6034 isolate); 5934 isolate);
6035 } 5935 }
6036 5936
6037 5937
6038 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime( 5938 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
6039 Isolate* isolate) { 5939 Isolate* isolate) {
6040 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; 5940 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
6041 for (int i = 0; i < 2; i++) { 5941 for (int i = 0; i < 2; i++) {
6042 // For internal arrays we only need a few things 5942 // For internal arrays we only need a few things
6043 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]); 5943 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
6044 stubh1.GetCode(isolate)->set_is_pregenerated(true); 5944 stubh1.GetCode(isolate);
6045 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]); 5945 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
6046 stubh2.GetCode(isolate)->set_is_pregenerated(true); 5946 stubh2.GetCode(isolate);
6047 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]); 5947 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
6048 stubh3.GetCode(isolate)->set_is_pregenerated(true); 5948 stubh3.GetCode(isolate);
6049 } 5949 }
6050 } 5950 }
6051 5951
6052 5952
6053 void ArrayConstructorStub::GenerateDispatchToArrayStub( 5953 void ArrayConstructorStub::GenerateDispatchToArrayStub(
6054 MacroAssembler* masm, 5954 MacroAssembler* masm,
6055 AllocationSiteOverrideMode mode) { 5955 AllocationSiteOverrideMode mode) {
6056 Register argc = x0; 5956 Register argc = x0;
6057 if (argument_count_ == ANY) { 5957 if (argument_count_ == ANY) {
6058 Label zero_case, n_case; 5958 Label zero_case, n_case;
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
6232 __ Bind(&fast_elements_case); 6132 __ Bind(&fast_elements_case);
6233 GenerateCase(masm, FAST_ELEMENTS); 6133 GenerateCase(masm, FAST_ELEMENTS);
6234 } 6134 }
6235 6135
6236 6136
6237 #undef __ 6137 #undef __
6238 6138
6239 } } // namespace v8::internal 6139 } } // namespace v8::internal
6240 6140
6241 #endif // V8_TARGET_ARCH_A64 6141 #endif // V8_TARGET_ARCH_A64
OLDNEW
« no previous file with comments | « src/a64/code-stubs-a64.h ('k') | src/a64/codegen-a64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698