Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 157503002: A64: Synchronize with r18444. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 175 matching lines...) Expand 10 before | Expand all | Expand 10 after
186 static Register registers[] = { r0 }; 186 static Register registers[] = { r0 };
187 descriptor->register_param_count_ = 1; 187 descriptor->register_param_count_ = 1;
188 descriptor->register_params_ = registers; 188 descriptor->register_params_ = registers;
189 descriptor->deoptimization_handler_ = 189 descriptor->deoptimization_handler_ =
190 FUNCTION_ADDR(CompareNilIC_Miss); 190 FUNCTION_ADDR(CompareNilIC_Miss);
191 descriptor->SetMissHandler( 191 descriptor->SetMissHandler(
192 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate)); 192 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
193 } 193 }
194 194
195 195
196 void BinaryOpICStub::InitializeInterfaceDescriptor(
197 Isolate* isolate,
198 CodeStubInterfaceDescriptor* descriptor) {
199 static Register registers[] = { r1, r0 };
200 descriptor->register_param_count_ = 2;
201 descriptor->register_params_ = registers;
202 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
203 descriptor->SetMissHandler(
204 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
205 }
206
207
208 static void InitializeArrayConstructorDescriptor( 196 static void InitializeArrayConstructorDescriptor(
209 Isolate* isolate, 197 Isolate* isolate,
210 CodeStubInterfaceDescriptor* descriptor, 198 CodeStubInterfaceDescriptor* descriptor,
211 int constant_stack_parameter_count) { 199 int constant_stack_parameter_count) {
212 // register state 200 // register state
213 // r0 -- number of arguments 201 // r0 -- number of arguments
214 // r1 -- function 202 // r1 -- function
215 // r2 -- type info cell with elements kind 203 // r2 -- type info cell with elements kind
216 static Register registers_variable_args[] = { r1, r2, r0 }; 204 static Register registers_variable_args[] = { r1, r2, r0 };
217 static Register registers_no_args[] = { r1, r2 }; 205 static Register registers_no_args[] = { r1, r2 };
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
332 Isolate* isolate, 320 Isolate* isolate,
333 CodeStubInterfaceDescriptor* descriptor) { 321 CodeStubInterfaceDescriptor* descriptor) {
334 static Register registers[] = { r0, r3, r1, r2 }; 322 static Register registers[] = { r0, r3, r1, r2 };
335 descriptor->register_param_count_ = 4; 323 descriptor->register_param_count_ = 4;
336 descriptor->register_params_ = registers; 324 descriptor->register_params_ = registers;
337 descriptor->deoptimization_handler_ = 325 descriptor->deoptimization_handler_ =
338 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss); 326 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
339 } 327 }
340 328
341 329
330 void BinaryOpICStub::InitializeInterfaceDescriptor(
331 Isolate* isolate,
332 CodeStubInterfaceDescriptor* descriptor) {
333 static Register registers[] = { r1, r0 };
334 descriptor->register_param_count_ = 2;
335 descriptor->register_params_ = registers;
336 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
337 descriptor->SetMissHandler(
338 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
339 }
340
341
342 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
343 Isolate* isolate,
344 CodeStubInterfaceDescriptor* descriptor) {
345 static Register registers[] = { r2, r1, r0 };
346 descriptor->register_param_count_ = 3;
347 descriptor->register_params_ = registers;
348 descriptor->deoptimization_handler_ =
349 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite);
350 }
351
352
342 void NewStringAddStub::InitializeInterfaceDescriptor( 353 void NewStringAddStub::InitializeInterfaceDescriptor(
343 Isolate* isolate, 354 Isolate* isolate,
344 CodeStubInterfaceDescriptor* descriptor) { 355 CodeStubInterfaceDescriptor* descriptor) {
345 static Register registers[] = { r1, r0 }; 356 static Register registers[] = { r1, r0 };
346 descriptor->register_param_count_ = 2; 357 descriptor->register_param_count_ = 2;
347 descriptor->register_params_ = registers; 358 descriptor->register_params_ = registers;
348 descriptor->deoptimization_handler_ = 359 descriptor->deoptimization_handler_ =
349 Runtime::FunctionForId(Runtime::kStringAdd)->entry; 360 Runtime::FunctionForId(Runtime::kStringAdd)->entry;
350 } 361 }
351 362
(...skipping 872 matching lines...) Expand 10 before | Expand all | Expand 10 after
1224 __ CallCFunction( 1235 __ CallCFunction(
1225 ExternalReference::store_buffer_overflow_function(masm->isolate()), 1236 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1226 argument_count); 1237 argument_count);
1227 if (save_doubles_ == kSaveFPRegs) { 1238 if (save_doubles_ == kSaveFPRegs) {
1228 __ RestoreFPRegs(sp, scratch); 1239 __ RestoreFPRegs(sp, scratch);
1229 } 1240 }
1230 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0). 1241 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0).
1231 } 1242 }
1232 1243
1233 1244
1234 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
1235 // Untagged case: double input in d2, double result goes
1236 // into d2.
1237 // Tagged case: tagged input on top of stack and in r0,
1238 // tagged result (heap number) goes into r0.
1239
1240 Label input_not_smi;
1241 Label loaded;
1242 Label calculate;
1243 Label invalid_cache;
1244 const Register scratch0 = r9;
1245 Register scratch1 = no_reg; // will be r4
1246 const Register cache_entry = r0;
1247 const bool tagged = (argument_type_ == TAGGED);
1248
1249 if (tagged) {
1250 // Argument is a number and is on stack and in r0.
1251 // Load argument and check if it is a smi.
1252 __ JumpIfNotSmi(r0, &input_not_smi);
1253
1254 // Input is a smi. Convert to double and load the low and high words
1255 // of the double into r2, r3.
1256 __ SmiToDouble(d7, r0);
1257 __ vmov(r2, r3, d7);
1258 __ b(&loaded);
1259
1260 __ bind(&input_not_smi);
1261 // Check if input is a HeapNumber.
1262 __ CheckMap(r0,
1263 r1,
1264 Heap::kHeapNumberMapRootIndex,
1265 &calculate,
1266 DONT_DO_SMI_CHECK);
1267 // Input is a HeapNumber. Load it to a double register and store the
1268 // low and high words into r2, r3.
1269 __ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset));
1270 __ vmov(r2, r3, d0);
1271 } else {
1272 // Input is untagged double in d2. Output goes to d2.
1273 __ vmov(r2, r3, d2);
1274 }
1275 __ bind(&loaded);
1276 // r2 = low 32 bits of double value
1277 // r3 = high 32 bits of double value
1278 // Compute hash (the shifts are arithmetic):
1279 // h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
1280 __ eor(r1, r2, Operand(r3));
1281 __ eor(r1, r1, Operand(r1, ASR, 16));
1282 __ eor(r1, r1, Operand(r1, ASR, 8));
1283 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
1284 __ And(r1, r1, Operand(TranscendentalCache::SubCache::kCacheSize - 1));
1285
1286 // r2 = low 32 bits of double value.
1287 // r3 = high 32 bits of double value.
1288 // r1 = TranscendentalCache::hash(double value).
1289 Isolate* isolate = masm->isolate();
1290 ExternalReference cache_array =
1291 ExternalReference::transcendental_cache_array_address(isolate);
1292 __ mov(cache_entry, Operand(cache_array));
1293 // cache_entry points to cache array.
1294 int cache_array_index
1295 = type_ * sizeof(isolate->transcendental_cache()->caches_[0]);
1296 __ ldr(cache_entry, MemOperand(cache_entry, cache_array_index));
1297 // r0 points to the cache for the type type_.
1298 // If NULL, the cache hasn't been initialized yet, so go through runtime.
1299 __ cmp(cache_entry, Operand::Zero());
1300 __ b(eq, &invalid_cache);
1301
1302 #ifdef DEBUG
1303 // Check that the layout of cache elements match expectations.
1304 { TranscendentalCache::SubCache::Element test_elem[2];
1305 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
1306 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
1307 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
1308 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
1309 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
1310 CHECK_EQ(12, elem2_start - elem_start); // Two uint_32's and a pointer.
1311 CHECK_EQ(0, elem_in0 - elem_start);
1312 CHECK_EQ(kIntSize, elem_in1 - elem_start);
1313 CHECK_EQ(2 * kIntSize, elem_out - elem_start);
1314 }
1315 #endif
1316
1317 // Find the address of the r1'st entry in the cache, i.e., &r0[r1*12].
1318 __ add(r1, r1, Operand(r1, LSL, 1));
1319 __ add(cache_entry, cache_entry, Operand(r1, LSL, 2));
1320 // Check if cache matches: Double value is stored in uint32_t[2] array.
1321 __ ldm(ia, cache_entry, r4.bit() | r5.bit() | r6.bit());
1322 __ cmp(r2, r4);
1323 __ cmp(r3, r5, eq);
1324 __ b(ne, &calculate);
1325
1326 scratch1 = r4; // Start of scratch1 range.
1327
1328 // Cache hit. Load result, cleanup and return.
1329 Counters* counters = masm->isolate()->counters();
1330 __ IncrementCounter(
1331 counters->transcendental_cache_hit(), 1, scratch0, scratch1);
1332 if (tagged) {
1333 // Pop input value from stack and load result into r0.
1334 __ pop();
1335 __ mov(r0, Operand(r6));
1336 } else {
1337 // Load result into d2.
1338 __ vldr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset));
1339 }
1340 __ Ret();
1341
1342 __ bind(&calculate);
1343 __ IncrementCounter(
1344 counters->transcendental_cache_miss(), 1, scratch0, scratch1);
1345 if (tagged) {
1346 __ bind(&invalid_cache);
1347 ExternalReference runtime_function =
1348 ExternalReference(RuntimeFunction(), masm->isolate());
1349 __ TailCallExternalReference(runtime_function, 1, 1);
1350 } else {
1351 Label no_update;
1352 Label skip_cache;
1353
1354 // Call C function to calculate the result and update the cache.
1355 // r0: precalculated cache entry address.
1356 // r2 and r3: parts of the double value.
1357 // Store r0, r2 and r3 on stack for later before calling C function.
1358 __ Push(r3, r2, cache_entry);
1359 GenerateCallCFunction(masm, scratch0);
1360 __ GetCFunctionDoubleResult(d2);
1361
1362 // Try to update the cache. If we cannot allocate a
1363 // heap number, we return the result without updating.
1364 __ Pop(r3, r2, cache_entry);
1365 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex);
1366 __ AllocateHeapNumber(r6, scratch0, scratch1, r5, &no_update);
1367 __ vstr(d2, FieldMemOperand(r6, HeapNumber::kValueOffset));
1368 __ stm(ia, cache_entry, r2.bit() | r3.bit() | r6.bit());
1369 __ Ret();
1370
1371 __ bind(&invalid_cache);
1372 // The cache is invalid. Call runtime which will recreate the
1373 // cache.
1374 __ LoadRoot(r5, Heap::kHeapNumberMapRootIndex);
1375 __ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache);
1376 __ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
1377 {
1378 FrameScope scope(masm, StackFrame::INTERNAL);
1379 __ push(r0);
1380 __ CallRuntime(RuntimeFunction(), 1);
1381 }
1382 __ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
1383 __ Ret();
1384
1385 __ bind(&skip_cache);
1386 // Call C function to calculate the result and answer directly
1387 // without updating the cache.
1388 GenerateCallCFunction(masm, scratch0);
1389 __ GetCFunctionDoubleResult(d2);
1390 __ bind(&no_update);
1391
1392 // We return the value in d2 without adding it to the cache, but
1393 // we cause a scavenging GC so that future allocations will succeed.
1394 {
1395 FrameScope scope(masm, StackFrame::INTERNAL);
1396
1397 // Allocate an aligned object larger than a HeapNumber.
1398 ASSERT(4 * kPointerSize >= HeapNumber::kSize);
1399 __ mov(scratch0, Operand(4 * kPointerSize));
1400 __ push(scratch0);
1401 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
1402 }
1403 __ Ret();
1404 }
1405 }
1406
1407
1408 void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
1409 Register scratch) {
1410 Isolate* isolate = masm->isolate();
1411
1412 __ push(lr);
1413 __ PrepareCallCFunction(0, 1, scratch);
1414 if (masm->use_eabi_hardfloat()) {
1415 __ vmov(d0, d2);
1416 } else {
1417 __ vmov(r0, r1, d2);
1418 }
1419 AllowExternalCallThatCantCauseGC scope(masm);
1420 switch (type_) {
1421 case TranscendentalCache::LOG:
1422 __ CallCFunction(ExternalReference::math_log_double_function(isolate),
1423 0, 1);
1424 break;
1425 default:
1426 UNIMPLEMENTED();
1427 break;
1428 }
1429 __ pop(lr);
1430 }
1431
1432
1433 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
1434 switch (type_) {
1435 // Add more cases when necessary.
1436 case TranscendentalCache::LOG: return Runtime::kMath_log;
1437 default:
1438 UNIMPLEMENTED();
1439 return Runtime::kAbort;
1440 }
1441 }
1442
1443
1444 void MathPowStub::Generate(MacroAssembler* masm) { 1245 void MathPowStub::Generate(MacroAssembler* masm) {
1445 const Register base = r1; 1246 const Register base = r1;
1446 const Register exponent = r2; 1247 const Register exponent = r2;
1447 const Register heapnumbermap = r5; 1248 const Register heapnumbermap = r5;
1448 const Register heapnumber = r0; 1249 const Register heapnumber = r0;
1449 const DwVfpRegister double_base = d0; 1250 const DwVfpRegister double_base = d0;
1450 const DwVfpRegister double_exponent = d1; 1251 const DwVfpRegister double_exponent = d1;
1451 const DwVfpRegister double_result = d2; 1252 const DwVfpRegister double_result = d2;
1452 const DwVfpRegister double_scratch = d3; 1253 const DwVfpRegister double_scratch = d3;
1453 const SwVfpRegister single_scratch = s6; 1254 const SwVfpRegister single_scratch = s6;
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after
1645 1446
1646 1447
1647 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 1448 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1648 CEntryStub::GenerateAheadOfTime(isolate); 1449 CEntryStub::GenerateAheadOfTime(isolate);
1649 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); 1450 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
1650 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 1451 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1651 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 1452 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1652 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 1453 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
1653 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 1454 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
1654 BinaryOpICStub::GenerateAheadOfTime(isolate); 1455 BinaryOpICStub::GenerateAheadOfTime(isolate);
1456 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1655 } 1457 }
1656 1458
1657 1459
1658 void CodeStub::GenerateFPStubs(Isolate* isolate) { 1460 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1659 SaveFPRegsMode mode = kSaveFPRegs; 1461 SaveFPRegsMode mode = kSaveFPRegs;
1660 CEntryStub save_doubles(1, mode); 1462 CEntryStub save_doubles(1, mode);
1661 StoreBufferOverflowStub stub(mode); 1463 StoreBufferOverflowStub stub(mode);
1662 // These stubs might already be in the snapshot, detect that and don't 1464 // These stubs might already be in the snapshot, detect that and don't
1663 // regenerate, which would lead to code stub initialization state being messed 1465 // regenerate, which would lead to code stub initialization state being messed
1664 // up. 1466 // up.
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after
1947 __ ldr(r4, MemOperand(sp, offset_to_argv)); 1749 __ ldr(r4, MemOperand(sp, offset_to_argv));
1948 1750
1949 // Push a frame with special values setup to mark it as an entry frame. 1751 // Push a frame with special values setup to mark it as an entry frame.
1950 // r0: code entry 1752 // r0: code entry
1951 // r1: function 1753 // r1: function
1952 // r2: receiver 1754 // r2: receiver
1953 // r3: argc 1755 // r3: argc
1954 // r4: argv 1756 // r4: argv
1955 Isolate* isolate = masm->isolate(); 1757 Isolate* isolate = masm->isolate();
1956 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; 1758 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
1957 __ mov(r8, Operand(Smi::FromInt(marker))); 1759 if (FLAG_enable_ool_constant_pool) {
1760 __ mov(r8, Operand(Smi::FromInt(marker)));
1761 }
1762 __ mov(r7, Operand(Smi::FromInt(marker)));
1958 __ mov(r6, Operand(Smi::FromInt(marker))); 1763 __ mov(r6, Operand(Smi::FromInt(marker)));
1959 __ mov(r5, 1764 __ mov(r5,
1960 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate))); 1765 Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
1961 __ ldr(r5, MemOperand(r5)); 1766 __ ldr(r5, MemOperand(r5));
1962 __ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used. 1767 __ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
1963 __ Push(ip, r8, r6, r5); 1768 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
1769 (FLAG_enable_ool_constant_pool ? r8.bit() : 0) |
1770 ip.bit());
1964 1771
1965 // Set up frame pointer for the frame to be pushed. 1772 // Set up frame pointer for the frame to be pushed.
1966 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); 1773 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
1967 1774
1968 // If this is the outermost JS call, set js_entry_sp value. 1775 // If this is the outermost JS call, set js_entry_sp value.
1969 Label non_outermost_js; 1776 Label non_outermost_js;
1970 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate); 1777 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1971 __ mov(r5, Operand(ExternalReference(js_entry_sp))); 1778 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
1972 __ ldr(r6, MemOperand(r5)); 1779 __ ldr(r6, MemOperand(r5));
1973 __ cmp(r6, Operand::Zero()); 1780 __ cmp(r6, Operand::Zero());
(...skipping 2386 matching lines...) Expand 10 before | Expand all | Expand 10 after
4360 __ add(sp, sp, Operand(2 * kPointerSize)); 4167 __ add(sp, sp, Operand(2 * kPointerSize));
4361 GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5); 4168 GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5);
4362 4169
4363 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 4170 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4364 // tagged as a small integer. 4171 // tagged as a small integer.
4365 __ bind(&runtime); 4172 __ bind(&runtime);
4366 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); 4173 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4367 } 4174 }
4368 4175
4369 4176
4177 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
4178 // ----------- S t a t e -------------
4179 // -- r1 : left
4180 // -- r0 : right
4181 // -- lr : return address
4182 // -----------------------------------
4183 Isolate* isolate = masm->isolate();
4184
4185 // Load r2 with the allocation site. We stick an undefined dummy value here
4186 // and replace it with the real allocation site later when we instantiate this
4187 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
4188 __ Move(r2, handle(isolate->heap()->undefined_value()));
4189
4190 // Make sure that we actually patched the allocation site.
4191 if (FLAG_debug_code) {
4192 __ tst(r2, Operand(kSmiTagMask));
4193 __ Assert(ne, kExpectedAllocationSite);
4194 __ push(r2);
4195 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
4196 __ LoadRoot(ip, Heap::kAllocationSiteMapRootIndex);
4197 __ cmp(r2, ip);
4198 __ pop(r2);
4199 __ Assert(eq, kExpectedAllocationSite);
4200 }
4201
4202 // Tail call into the stub that handles binary operations with allocation
4203 // sites.
4204 BinaryOpWithAllocationSiteStub stub(state_);
4205 __ TailCallStub(&stub);
4206 }
4207
4208
4370 void StringAddStub::Generate(MacroAssembler* masm) { 4209 void StringAddStub::Generate(MacroAssembler* masm) {
4371 Label call_runtime, call_builtin; 4210 Label call_runtime, call_builtin;
4372 Builtins::JavaScript builtin_id = Builtins::ADD; 4211 Builtins::JavaScript builtin_id = Builtins::ADD;
4373 4212
4374 Counters* counters = masm->isolate()->counters(); 4213 Counters* counters = masm->isolate()->counters();
4375 4214
4376 // Stack on entry: 4215 // Stack on entry:
4377 // sp[0]: second argument (right). 4216 // sp[0]: second argument (right).
4378 // sp[4]: first argument (left). 4217 // sp[4]: first argument (left).
4379 4218
(...skipping 1318 matching lines...) Expand 10 before | Expand all | Expand 10 after
5698 AllocationSiteOverrideMode mode) { 5537 AllocationSiteOverrideMode mode) {
5699 if (mode == DISABLE_ALLOCATION_SITES) { 5538 if (mode == DISABLE_ALLOCATION_SITES) {
5700 T stub(GetInitialFastElementsKind(), 5539 T stub(GetInitialFastElementsKind(),
5701 CONTEXT_CHECK_REQUIRED, 5540 CONTEXT_CHECK_REQUIRED,
5702 mode); 5541 mode);
5703 __ TailCallStub(&stub); 5542 __ TailCallStub(&stub);
5704 } else if (mode == DONT_OVERRIDE) { 5543 } else if (mode == DONT_OVERRIDE) {
5705 int last_index = GetSequenceIndexFromFastElementsKind( 5544 int last_index = GetSequenceIndexFromFastElementsKind(
5706 TERMINAL_FAST_ELEMENTS_KIND); 5545 TERMINAL_FAST_ELEMENTS_KIND);
5707 for (int i = 0; i <= last_index; ++i) { 5546 for (int i = 0; i <= last_index; ++i) {
5708 Label next;
5709 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 5547 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5710 __ cmp(r3, Operand(kind)); 5548 __ cmp(r3, Operand(kind));
5711 __ b(ne, &next);
5712 T stub(kind); 5549 T stub(kind);
5713 __ TailCallStub(&stub); 5550 __ TailCallStub(&stub, eq);
5714 __ bind(&next);
5715 } 5551 }
5716 5552
5717 // If we reached this point there is a problem. 5553 // If we reached this point there is a problem.
5718 __ Abort(kUnexpectedElementsKindInArrayConstructor); 5554 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5719 } else { 5555 } else {
5720 UNREACHABLE(); 5556 UNREACHABLE();
5721 } 5557 }
5722 } 5558 }
5723 5559
5724 5560
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
5780 // restricted to a portion of the field...upper bits need to be left alone. 5616 // restricted to a portion of the field...upper bits need to be left alone.
5781 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 5617 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5782 __ ldr(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); 5618 __ ldr(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
5783 __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); 5619 __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley)));
5784 __ str(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); 5620 __ str(r4, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset));
5785 5621
5786 __ bind(&normal_sequence); 5622 __ bind(&normal_sequence);
5787 int last_index = GetSequenceIndexFromFastElementsKind( 5623 int last_index = GetSequenceIndexFromFastElementsKind(
5788 TERMINAL_FAST_ELEMENTS_KIND); 5624 TERMINAL_FAST_ELEMENTS_KIND);
5789 for (int i = 0; i <= last_index; ++i) { 5625 for (int i = 0; i <= last_index; ++i) {
5790 Label next;
5791 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 5626 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5792 __ cmp(r3, Operand(kind)); 5627 __ cmp(r3, Operand(kind));
5793 __ b(ne, &next);
5794 ArraySingleArgumentConstructorStub stub(kind); 5628 ArraySingleArgumentConstructorStub stub(kind);
5795 __ TailCallStub(&stub); 5629 __ TailCallStub(&stub, eq);
5796 __ bind(&next);
5797 } 5630 }
5798 5631
5799 // If we reached this point there is a problem. 5632 // If we reached this point there is a problem.
5800 __ Abort(kUnexpectedElementsKindInArrayConstructor); 5633 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5801 } else { 5634 } else {
5802 UNREACHABLE(); 5635 UNREACHABLE();
5803 } 5636 }
5804 } 5637 }
5805 5638
5806 5639
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
5927 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask)); 5760 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask));
5928 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 5761 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5929 5762
5930 __ bind(&no_info); 5763 __ bind(&no_info);
5931 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 5764 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5932 } 5765 }
5933 5766
5934 5767
5935 void InternalArrayConstructorStub::GenerateCase( 5768 void InternalArrayConstructorStub::GenerateCase(
5936 MacroAssembler* masm, ElementsKind kind) { 5769 MacroAssembler* masm, ElementsKind kind) {
5937 Label not_zero_case, not_one_case; 5770 __ cmp(r0, Operand(1));
5938 Label normal_sequence;
5939 5771
5940 __ tst(r0, r0);
5941 __ b(ne, &not_zero_case);
5942 InternalArrayNoArgumentConstructorStub stub0(kind); 5772 InternalArrayNoArgumentConstructorStub stub0(kind);
5943 __ TailCallStub(&stub0); 5773 __ TailCallStub(&stub0, lo);
5944 5774
5945 __ bind(&not_zero_case); 5775 InternalArrayNArgumentsConstructorStub stubN(kind);
5946 __ cmp(r0, Operand(1)); 5776 __ TailCallStub(&stubN, hi);
5947 __ b(gt, &not_one_case);
5948 5777
5949 if (IsFastPackedElementsKind(kind)) { 5778 if (IsFastPackedElementsKind(kind)) {
5950 // We might need to create a holey array 5779 // We might need to create a holey array
5951 // look at the first argument 5780 // look at the first argument
5952 __ ldr(r3, MemOperand(sp, 0)); 5781 __ ldr(r3, MemOperand(sp, 0));
5953 __ cmp(r3, Operand::Zero()); 5782 __ cmp(r3, Operand::Zero());
5954 __ b(eq, &normal_sequence);
5955 5783
5956 InternalArraySingleArgumentConstructorStub 5784 InternalArraySingleArgumentConstructorStub
5957 stub1_holey(GetHoleyElementsKind(kind)); 5785 stub1_holey(GetHoleyElementsKind(kind));
5958 __ TailCallStub(&stub1_holey); 5786 __ TailCallStub(&stub1_holey, ne);
5959 } 5787 }
5960 5788
5961 __ bind(&normal_sequence);
5962 InternalArraySingleArgumentConstructorStub stub1(kind); 5789 InternalArraySingleArgumentConstructorStub stub1(kind);
5963 __ TailCallStub(&stub1); 5790 __ TailCallStub(&stub1);
5964
5965 __ bind(&not_one_case);
5966 InternalArrayNArgumentsConstructorStub stubN(kind);
5967 __ TailCallStub(&stubN);
5968 } 5791 }
5969 5792
5970 5793
5971 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 5794 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5972 // ----------- S t a t e ------------- 5795 // ----------- S t a t e -------------
5973 // -- r0 : argc 5796 // -- r0 : argc
5974 // -- r1 : constructor 5797 // -- r1 : constructor
5975 // -- sp[0] : return address 5798 // -- sp[0] : return address
5976 // -- sp[4] : last argument 5799 // -- sp[4] : last argument
5977 // ----------------------------------- 5800 // -----------------------------------
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
6015 __ bind(&fast_elements_case); 5838 __ bind(&fast_elements_case);
6016 GenerateCase(masm, FAST_ELEMENTS); 5839 GenerateCase(masm, FAST_ELEMENTS);
6017 } 5840 }
6018 5841
6019 5842
6020 #undef __ 5843 #undef __
6021 5844
6022 } } // namespace v8::internal 5845 } } // namespace v8::internal
6023 5846
6024 #endif // V8_TARGET_ARCH_ARM 5847 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698