Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(413)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 148503002: A64: Synchronize with r15545. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_ARM) 30 #if V8_TARGET_ARCH_ARM
31 31
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "regexp-macro-assembler.h" 34 #include "regexp-macro-assembler.h"
35 #include "stub-cache.h" 35 #include "stub-cache.h"
36 36
37 namespace v8 { 37 namespace v8 {
38 namespace internal { 38 namespace internal {
39 39
40 40
(...skipping 12 matching lines...) Expand all
53 Isolate* isolate, 53 Isolate* isolate,
54 CodeStubInterfaceDescriptor* descriptor) { 54 CodeStubInterfaceDescriptor* descriptor) {
55 static Register registers[] = { r3, r2, r1, r0 }; 55 static Register registers[] = { r3, r2, r1, r0 };
56 descriptor->register_param_count_ = 4; 56 descriptor->register_param_count_ = 4;
57 descriptor->register_params_ = registers; 57 descriptor->register_params_ = registers;
58 descriptor->deoptimization_handler_ = 58 descriptor->deoptimization_handler_ =
59 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry; 59 Runtime::FunctionForId(Runtime::kCreateObjectLiteralShallow)->entry;
60 } 60 }
61 61
62 62
63 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
64 Isolate* isolate,
65 CodeStubInterfaceDescriptor* descriptor) {
66 static Register registers[] = { r2 };
67 descriptor->register_param_count_ = 1;
68 descriptor->register_params_ = registers;
69 descriptor->deoptimization_handler_ = NULL;
70 }
71
72
63 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor( 73 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
64 Isolate* isolate, 74 Isolate* isolate,
65 CodeStubInterfaceDescriptor* descriptor) { 75 CodeStubInterfaceDescriptor* descriptor) {
66 static Register registers[] = { r1, r0 }; 76 static Register registers[] = { r1, r0 };
67 descriptor->register_param_count_ = 2; 77 descriptor->register_param_count_ = 2;
68 descriptor->register_params_ = registers; 78 descriptor->register_params_ = registers;
69 descriptor->deoptimization_handler_ = 79 descriptor->deoptimization_handler_ =
70 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure); 80 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
71 } 81 }
72 82
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
219 } 229 }
220 230
221 231
222 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( 232 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
223 Isolate* isolate, 233 Isolate* isolate,
224 CodeStubInterfaceDescriptor* descriptor) { 234 CodeStubInterfaceDescriptor* descriptor) {
225 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); 235 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
226 } 236 }
227 237
228 238
239 void UnaryOpStub::InitializeInterfaceDescriptor(
240 Isolate* isolate,
241 CodeStubInterfaceDescriptor* descriptor) {
242 static Register registers[] = { r0 };
243 descriptor->register_param_count_ = 1;
244 descriptor->register_params_ = registers;
245 descriptor->deoptimization_handler_ =
246 FUNCTION_ADDR(UnaryOpIC_Miss);
247 }
248
249
250 void StoreGlobalStub::InitializeInterfaceDescriptor(
251 Isolate* isolate,
252 CodeStubInterfaceDescriptor* descriptor) {
253 static Register registers[] = { r1, r2, r0 };
254 descriptor->register_param_count_ = 3;
255 descriptor->register_params_ = registers;
256 descriptor->deoptimization_handler_ =
257 FUNCTION_ADDR(StoreIC_MissFromStubFailure);
258 }
259
260
229 #define __ ACCESS_MASM(masm) 261 #define __ ACCESS_MASM(masm)
230 262
263
231 static void EmitIdenticalObjectComparison(MacroAssembler* masm, 264 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
232 Label* slow, 265 Label* slow,
233 Condition cond); 266 Condition cond);
234 static void EmitSmiNonsmiComparison(MacroAssembler* masm, 267 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
235 Register lhs, 268 Register lhs,
236 Register rhs, 269 Register rhs,
237 Label* lhs_not_nan, 270 Label* lhs_not_nan,
238 Label* slow, 271 Label* slow,
239 bool strict); 272 bool strict);
240 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 273 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
(...skipping 1041 matching lines...) Expand 10 before | Expand all | Expand 10 after
1282 __ CallCFunction( 1315 __ CallCFunction(
1283 ExternalReference::store_buffer_overflow_function(masm->isolate()), 1316 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1284 argument_count); 1317 argument_count);
1285 if (save_doubles_ == kSaveFPRegs) { 1318 if (save_doubles_ == kSaveFPRegs) {
1286 __ RestoreFPRegs(sp, scratch); 1319 __ RestoreFPRegs(sp, scratch);
1287 } 1320 }
1288 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0). 1321 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0).
1289 } 1322 }
1290 1323
1291 1324
1292 void UnaryOpStub::PrintName(StringStream* stream) {
1293 const char* op_name = Token::Name(op_);
1294 const char* overwrite_name = NULL; // Make g++ happy.
1295 switch (mode_) {
1296 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
1297 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
1298 }
1299 stream->Add("UnaryOpStub_%s_%s_%s",
1300 op_name,
1301 overwrite_name,
1302 UnaryOpIC::GetName(operand_type_));
1303 }
1304
1305
1306 // TODO(svenpanne): Use virtual functions instead of switch.
1307 void UnaryOpStub::Generate(MacroAssembler* masm) {
1308 switch (operand_type_) {
1309 case UnaryOpIC::UNINITIALIZED:
1310 GenerateTypeTransition(masm);
1311 break;
1312 case UnaryOpIC::SMI:
1313 GenerateSmiStub(masm);
1314 break;
1315 case UnaryOpIC::NUMBER:
1316 GenerateNumberStub(masm);
1317 break;
1318 case UnaryOpIC::GENERIC:
1319 GenerateGenericStub(masm);
1320 break;
1321 }
1322 }
1323
1324
1325 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1326 __ mov(r3, Operand(r0)); // the operand
1327 __ mov(r2, Operand(Smi::FromInt(op_)));
1328 __ mov(r1, Operand(Smi::FromInt(mode_)));
1329 __ mov(r0, Operand(Smi::FromInt(operand_type_)));
1330 __ Push(r3, r2, r1, r0);
1331
1332 __ TailCallExternalReference(
1333 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
1334 }
1335
1336
1337 // TODO(svenpanne): Use virtual functions instead of switch.
1338 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1339 switch (op_) {
1340 case Token::SUB:
1341 GenerateSmiStubSub(masm);
1342 break;
1343 case Token::BIT_NOT:
1344 GenerateSmiStubBitNot(masm);
1345 break;
1346 default:
1347 UNREACHABLE();
1348 }
1349 }
1350
1351
1352 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
1353 Label non_smi, slow;
1354 GenerateSmiCodeSub(masm, &non_smi, &slow);
1355 __ bind(&non_smi);
1356 __ bind(&slow);
1357 GenerateTypeTransition(masm);
1358 }
1359
1360
1361 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
1362 Label non_smi;
1363 GenerateSmiCodeBitNot(masm, &non_smi);
1364 __ bind(&non_smi);
1365 GenerateTypeTransition(masm);
1366 }
1367
1368
1369 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
1370 Label* non_smi,
1371 Label* slow) {
1372 __ JumpIfNotSmi(r0, non_smi);
1373
1374 // The result of negating zero or the smallest negative smi is not a smi.
1375 __ bic(ip, r0, Operand(0x80000000), SetCC);
1376 __ b(eq, slow);
1377
1378 // Return '0 - value'.
1379 __ rsb(r0, r0, Operand::Zero());
1380 __ Ret();
1381 }
1382
1383
1384 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
1385 Label* non_smi) {
1386 __ JumpIfNotSmi(r0, non_smi);
1387
1388 // Flip bits and revert inverted smi-tag.
1389 __ mvn(r0, Operand(r0));
1390 __ bic(r0, r0, Operand(kSmiTagMask));
1391 __ Ret();
1392 }
1393
1394
1395 // TODO(svenpanne): Use virtual functions instead of switch.
1396 void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
1397 switch (op_) {
1398 case Token::SUB:
1399 GenerateNumberStubSub(masm);
1400 break;
1401 case Token::BIT_NOT:
1402 GenerateNumberStubBitNot(masm);
1403 break;
1404 default:
1405 UNREACHABLE();
1406 }
1407 }
1408
1409
1410 void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
1411 Label non_smi, slow, call_builtin;
1412 GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
1413 __ bind(&non_smi);
1414 GenerateHeapNumberCodeSub(masm, &slow);
1415 __ bind(&slow);
1416 GenerateTypeTransition(masm);
1417 __ bind(&call_builtin);
1418 GenerateGenericCodeFallback(masm);
1419 }
1420
1421
1422 void UnaryOpStub::GenerateNumberStubBitNot(MacroAssembler* masm) {
1423 Label non_smi, slow;
1424 GenerateSmiCodeBitNot(masm, &non_smi);
1425 __ bind(&non_smi);
1426 GenerateHeapNumberCodeBitNot(masm, &slow);
1427 __ bind(&slow);
1428 GenerateTypeTransition(masm);
1429 }
1430
1431 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
1432 Label* slow) {
1433 EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
1434 // r0 is a heap number. Get a new heap number in r1.
1435 if (mode_ == UNARY_OVERWRITE) {
1436 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1437 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1438 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1439 } else {
1440 Label slow_allocate_heapnumber, heapnumber_allocated;
1441 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber);
1442 __ jmp(&heapnumber_allocated);
1443
1444 __ bind(&slow_allocate_heapnumber);
1445 {
1446 FrameScope scope(masm, StackFrame::INTERNAL);
1447 __ push(r0);
1448 __ CallRuntime(Runtime::kNumberAlloc, 0);
1449 __ mov(r1, Operand(r0));
1450 __ pop(r0);
1451 }
1452
1453 __ bind(&heapnumber_allocated);
1454 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
1455 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1456 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
1457 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1458 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
1459 __ mov(r0, Operand(r1));
1460 }
1461 __ Ret();
1462 }
1463
1464
1465 void UnaryOpStub::GenerateHeapNumberCodeBitNot(MacroAssembler* masm,
1466 Label* slow) {
1467 EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
1468
1469 // Convert the heap number in r0 to an untagged integer in r1.
1470 __ vldr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset));
1471 __ ECMAToInt32(r1, d0, r2, r3, r4, d1);
1472
1473 // Do the bitwise operation and check if the result fits in a smi.
1474 Label try_float;
1475 __ mvn(r1, Operand(r1));
1476 __ cmn(r1, Operand(0x40000000));
1477 __ b(mi, &try_float);
1478
1479 // Tag the result as a smi and we're done.
1480 __ SmiTag(r0, r1);
1481 __ Ret();
1482
1483 // Try to store the result in a heap number.
1484 __ bind(&try_float);
1485 if (mode_ == UNARY_NO_OVERWRITE) {
1486 Label slow_allocate_heapnumber, heapnumber_allocated;
1487 __ AllocateHeapNumber(r0, r3, r4, r6, &slow_allocate_heapnumber);
1488 __ jmp(&heapnumber_allocated);
1489
1490 __ bind(&slow_allocate_heapnumber);
1491 {
1492 FrameScope scope(masm, StackFrame::INTERNAL);
1493 // Push the lower bit of the result (left shifted to look like a smi).
1494 __ mov(r2, Operand(r1, LSL, 31));
1495 // Push the 31 high bits (bit 0 cleared to look like a smi).
1496 __ bic(r1, r1, Operand(1));
1497 __ Push(r2, r1);
1498 __ CallRuntime(Runtime::kNumberAlloc, 0);
1499 __ Pop(r2, r1); // Restore the result.
1500 __ orr(r1, r1, Operand(r2, LSR, 31));
1501 }
1502 __ bind(&heapnumber_allocated);
1503 }
1504
1505 __ vmov(s0, r1);
1506 __ vcvt_f64_s32(d0, s0);
1507 __ vstr(d0, FieldMemOperand(r0, HeapNumber::kValueOffset));
1508 __ Ret();
1509 }
1510
1511
1512 // TODO(svenpanne): Use virtual functions instead of switch.
1513 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
1514 switch (op_) {
1515 case Token::SUB:
1516 GenerateGenericStubSub(masm);
1517 break;
1518 case Token::BIT_NOT:
1519 GenerateGenericStubBitNot(masm);
1520 break;
1521 default:
1522 UNREACHABLE();
1523 }
1524 }
1525
1526
1527 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
1528 Label non_smi, slow;
1529 GenerateSmiCodeSub(masm, &non_smi, &slow);
1530 __ bind(&non_smi);
1531 GenerateHeapNumberCodeSub(masm, &slow);
1532 __ bind(&slow);
1533 GenerateGenericCodeFallback(masm);
1534 }
1535
1536
1537 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
1538 Label non_smi, slow;
1539 GenerateSmiCodeBitNot(masm, &non_smi);
1540 __ bind(&non_smi);
1541 GenerateHeapNumberCodeBitNot(masm, &slow);
1542 __ bind(&slow);
1543 GenerateGenericCodeFallback(masm);
1544 }
1545
1546
1547 void UnaryOpStub::GenerateGenericCodeFallback(MacroAssembler* masm) {
1548 // Handle the slow case by jumping to the JavaScript builtin.
1549 __ push(r0);
1550 switch (op_) {
1551 case Token::SUB:
1552 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
1553 break;
1554 case Token::BIT_NOT:
1555 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
1556 break;
1557 default:
1558 UNREACHABLE();
1559 }
1560 }
1561
1562
1563 // Generates code to call a C function to do a double operation. 1325 // Generates code to call a C function to do a double operation.
1564 // This code never falls through, but returns with a heap number containing 1326 // This code never falls through, but returns with a heap number containing
1565 // the result in r0. 1327 // the result in r0.
1566 // Register heapnumber_result must be a heap number in which the 1328 // Register heapnumber_result must be a heap number in which the
1567 // result of the operation will be stored. 1329 // result of the operation will be stored.
1568 // Requires the following layout on entry: 1330 // Requires the following layout on entry:
1569 // d0: Left value. 1331 // d0: Left value.
1570 // d1: Right value. 1332 // d1: Right value.
1571 // If soft float ABI, use also r0, r1, r2, r3. 1333 // If soft float ABI, use also r0, r1, r2, r3.
1572 static void CallCCodeForDoubleOperation(MacroAssembler* masm, 1334 static void CallCCodeForDoubleOperation(MacroAssembler* masm,
(...skipping 1421 matching lines...) Expand 10 before | Expand all | Expand 10 after
2994 } 2756 }
2995 2757
2996 2758
2997 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2759 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2998 CEntryStub::GenerateAheadOfTime(isolate); 2760 CEntryStub::GenerateAheadOfTime(isolate);
2999 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate); 2761 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
3000 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 2762 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
3001 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 2763 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
3002 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate); 2764 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
3003 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); 2765 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2766 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
3004 } 2767 }
3005 2768
3006 2769
3007 void CodeStub::GenerateFPStubs(Isolate* isolate) { 2770 void CodeStub::GenerateFPStubs(Isolate* isolate) {
3008 SaveFPRegsMode mode = kSaveFPRegs; 2771 SaveFPRegsMode mode = kSaveFPRegs;
3009 CEntryStub save_doubles(1, mode); 2772 CEntryStub save_doubles(1, mode);
3010 StoreBufferOverflowStub stub(mode); 2773 StoreBufferOverflowStub stub(mode);
3011 // These stubs might already be in the snapshot, detect that and don't 2774 // These stubs might already be in the snapshot, detect that and don't
3012 // regenerate, which would lead to code stub initialization state being messed 2775 // regenerate, which would lead to code stub initialization state being messed
3013 // up. 2776 // up.
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
3070 __ ldr(r1, MemOperand(r0)); 2833 __ ldr(r1, MemOperand(r0));
3071 __ add(r1, r1, Operand(1)); 2834 __ add(r1, r1, Operand(1));
3072 __ str(r1, MemOperand(r0)); 2835 __ str(r1, MemOperand(r0));
3073 } 2836 }
3074 2837
3075 // Call C built-in. 2838 // Call C built-in.
3076 // r0 = argc, r1 = argv 2839 // r0 = argc, r1 = argv
3077 __ mov(r0, Operand(r4)); 2840 __ mov(r0, Operand(r4));
3078 __ mov(r1, Operand(r6)); 2841 __ mov(r1, Operand(r6));
3079 2842
3080 #if defined(V8_HOST_ARCH_ARM) 2843 #if V8_HOST_ARCH_ARM
3081 int frame_alignment = MacroAssembler::ActivationFrameAlignment(); 2844 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
3082 int frame_alignment_mask = frame_alignment - 1; 2845 int frame_alignment_mask = frame_alignment - 1;
3083 if (FLAG_debug_code) { 2846 if (FLAG_debug_code) {
3084 if (frame_alignment > kPointerSize) { 2847 if (frame_alignment > kPointerSize) {
3085 Label alignment_as_expected; 2848 Label alignment_as_expected;
3086 ASSERT(IsPowerOf2(frame_alignment)); 2849 ASSERT(IsPowerOf2(frame_alignment));
3087 __ tst(sp, Operand(frame_alignment_mask)); 2850 __ tst(sp, Operand(frame_alignment_mask));
3088 __ b(eq, &alignment_as_expected); 2851 __ b(eq, &alignment_as_expected);
3089 // Don't use Check here, as it will call Runtime_Abort re-entering here. 2852 // Don't use Check here, as it will call Runtime_Abort re-entering here.
3090 __ stop("Unexpected alignment"); 2853 __ stop("Unexpected alignment");
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
3174 2937
3175 2938
3176 void CEntryStub::Generate(MacroAssembler* masm) { 2939 void CEntryStub::Generate(MacroAssembler* masm) {
3177 // Called from JavaScript; parameters are on stack as if calling JS function 2940 // Called from JavaScript; parameters are on stack as if calling JS function
3178 // r0: number of arguments including receiver 2941 // r0: number of arguments including receiver
3179 // r1: pointer to builtin function 2942 // r1: pointer to builtin function
3180 // fp: frame pointer (restored after C call) 2943 // fp: frame pointer (restored after C call)
3181 // sp: stack pointer (restored as callee's sp after C call) 2944 // sp: stack pointer (restored as callee's sp after C call)
3182 // cp: current context (C callee-saved) 2945 // cp: current context (C callee-saved)
3183 2946
2947 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2948
3184 // Result returned in r0 or r0+r1 by default. 2949 // Result returned in r0 or r0+r1 by default.
3185 2950
3186 // NOTE: Invocations of builtins may return failure objects 2951 // NOTE: Invocations of builtins may return failure objects
3187 // instead of a proper result. The builtin entry handles 2952 // instead of a proper result. The builtin entry handles
3188 // this by performing a garbage collection and retrying the 2953 // this by performing a garbage collection and retrying the
3189 // builtin once. 2954 // builtin once.
3190 2955
3191 // Compute the argv pointer in a callee-saved register. 2956 // Compute the argv pointer in a callee-saved register.
3192 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2)); 2957 __ add(r6, sp, Operand(r0, LSL, kPointerSizeLog2));
3193 __ sub(r6, r6, Operand(kPointerSize)); 2958 __ sub(r6, r6, Operand(kPointerSize));
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
3264 3029
3265 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { 3030 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3266 // r0: code entry 3031 // r0: code entry
3267 // r1: function 3032 // r1: function
3268 // r2: receiver 3033 // r2: receiver
3269 // r3: argc 3034 // r3: argc
3270 // [sp+0]: argv 3035 // [sp+0]: argv
3271 3036
3272 Label invoke, handler_entry, exit; 3037 Label invoke, handler_entry, exit;
3273 3038
3039 ProfileEntryHookStub::MaybeCallEntryHook(masm);
3040
3274 // Called from C, so do not pop argc and args on exit (preserve sp) 3041 // Called from C, so do not pop argc and args on exit (preserve sp)
3275 // No need to save register-passed args 3042 // No need to save register-passed args
3276 // Save callee-saved registers (incl. cp and fp), sp, and lr 3043 // Save callee-saved registers (incl. cp and fp), sp, and lr
3277 __ stm(db_w, sp, kCalleeSaved | lr.bit()); 3044 __ stm(db_w, sp, kCalleeSaved | lr.bit());
3278 3045
3279 // Save callee-saved vfp registers. 3046 // Save callee-saved vfp registers.
3280 __ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg); 3047 __ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
3281 // Set up the reserved register for 0.0. 3048 // Set up the reserved register for 0.0.
3282 __ vmov(kDoubleRegZero, 0.0); 3049 __ vmov(kDoubleRegZero, 0.0);
3283 __ VFPEnsureFPSCRState(r4); 3050 __ VFPEnsureFPSCRState(r4);
(...skipping 1354 matching lines...) Expand 10 before | Expand all | Expand 10 after
4638 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()), 4405 ASSERT_EQ(*TypeFeedbackCells::UninitializedSentinel(masm->isolate()),
4639 masm->isolate()->heap()->the_hole_value()); 4406 masm->isolate()->heap()->the_hole_value());
4640 4407
4641 // Load the cache state into r3. 4408 // Load the cache state into r3.
4642 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 4409 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
4643 4410
4644 // A monomorphic cache hit or an already megamorphic state: invoke the 4411 // A monomorphic cache hit or an already megamorphic state: invoke the
4645 // function without changing the state. 4412 // function without changing the state.
4646 __ cmp(r3, r1); 4413 __ cmp(r3, r1);
4647 __ b(eq, &done); 4414 __ b(eq, &done);
4648 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
4649 __ b(eq, &done);
4650 4415
4651 // Special handling of the Array() function, which caches not only the 4416 // If we came here, we need to see if we are the array function.
4652 // monomorphic Array function but the initial ElementsKind with special 4417 // If we didn't have a matching function, and we didn't find the megamorph
4653 // sentinels 4418 // sentinel, then we have in the cell either some other function or an
4654 Handle<Object> terminal_kind_sentinel = 4419 // AllocationSite. Do a map check on the object in ecx.
4655 TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(), 4420 Handle<Map> allocation_site_map(
4656 LAST_FAST_ELEMENTS_KIND); 4421 masm->isolate()->heap()->allocation_site_map(),
4657 __ JumpIfNotSmi(r3, &miss); 4422 masm->isolate());
4658 __ cmp(r3, Operand(terminal_kind_sentinel)); 4423 __ ldr(r5, FieldMemOperand(r3, 0));
4659 __ b(gt, &miss); 4424 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
4425 __ b(ne, &miss);
4426
4660 // Make sure the function is the Array() function 4427 // Make sure the function is the Array() function
4661 __ LoadArrayFunction(r3); 4428 __ LoadArrayFunction(r3);
4662 __ cmp(r1, r3); 4429 __ cmp(r1, r3);
4663 __ b(ne, &megamorphic); 4430 __ b(ne, &megamorphic);
4664 __ jmp(&done); 4431 __ jmp(&done);
4665 4432
4666 __ bind(&miss); 4433 __ bind(&miss);
4667 4434
4668 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 4435 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
4669 // megamorphic. 4436 // megamorphic.
4670 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); 4437 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
4671 __ b(eq, &initialize); 4438 __ b(eq, &initialize);
4672 // MegamorphicSentinel is an immortal immovable object (undefined) so no 4439 // MegamorphicSentinel is an immortal immovable object (undefined) so no
4673 // write-barrier is needed. 4440 // write-barrier is needed.
4674 __ bind(&megamorphic); 4441 __ bind(&megamorphic);
4675 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 4442 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
4676 __ str(ip, FieldMemOperand(r2, Cell::kValueOffset)); 4443 __ str(ip, FieldMemOperand(r2, Cell::kValueOffset));
4677 __ jmp(&done); 4444 __ jmp(&done);
4678 4445
4679 // An uninitialized cache is patched with the function or sentinel to 4446 // An uninitialized cache is patched with the function or sentinel to
4680 // indicate the ElementsKind if function is the Array constructor. 4447 // indicate the ElementsKind if function is the Array constructor.
4681 __ bind(&initialize); 4448 __ bind(&initialize);
4682 // Make sure the function is the Array() function 4449 // Make sure the function is the Array() function
4683 __ LoadArrayFunction(r3); 4450 __ LoadArrayFunction(r3);
4684 __ cmp(r1, r3); 4451 __ cmp(r1, r3);
4685 __ b(ne, &not_array_function); 4452 __ b(ne, &not_array_function);
4686 4453
4687 // The target function is the Array constructor, install a sentinel value in 4454 // The target function is the Array constructor,
4688 // the constructor's type info cell that will track the initial ElementsKind 4455 // Create an AllocationSite if we don't already have it, store it in the cell
4689 // that should be used for the array when its constructed. 4456 {
4690 Handle<Object> initial_kind_sentinel = 4457 FrameScope scope(masm, StackFrame::INTERNAL);
4691 TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(), 4458
4692 GetInitialFastElementsKind()); 4459 __ push(r0);
4693 __ mov(r3, Operand(initial_kind_sentinel)); 4460 __ push(r1);
4694 __ str(r3, FieldMemOperand(r2, Cell::kValueOffset)); 4461 __ push(r2);
4462
4463 CreateAllocationSiteStub create_stub;
4464 __ CallStub(&create_stub);
4465
4466 __ pop(r2);
4467 __ pop(r1);
4468 __ pop(r0);
4469 }
4695 __ b(&done); 4470 __ b(&done);
4696 4471
4697 __ bind(&not_array_function); 4472 __ bind(&not_array_function);
4698 __ str(r1, FieldMemOperand(r2, Cell::kValueOffset)); 4473 __ str(r1, FieldMemOperand(r2, Cell::kValueOffset));
4699 // No need for a write barrier here - cells are rescanned. 4474 // No need for a write barrier here - cells are rescanned.
4700 4475
4701 __ bind(&done); 4476 __ bind(&done);
4702 } 4477 }
4703 4478
4704 4479
(...skipping 1992 matching lines...) Expand 10 before | Expand all | Expand 10 after
6697 __ mov(result, Operand::Zero()); 6472 __ mov(result, Operand::Zero());
6698 __ Ret(); 6473 __ Ret();
6699 } 6474 }
6700 6475
6701 6476
6702 struct AheadOfTimeWriteBarrierStubList { 6477 struct AheadOfTimeWriteBarrierStubList {
6703 Register object, value, address; 6478 Register object, value, address;
6704 RememberedSetAction action; 6479 RememberedSetAction action;
6705 }; 6480 };
6706 6481
6482
6707 #define REG(Name) { kRegister_ ## Name ## _Code } 6483 #define REG(Name) { kRegister_ ## Name ## _Code }
6708 6484
6709 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { 6485 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
6710 // Used in RegExpExecStub. 6486 // Used in RegExpExecStub.
6711 { REG(r6), REG(r4), REG(r7), EMIT_REMEMBERED_SET }, 6487 { REG(r6), REG(r4), REG(r7), EMIT_REMEMBERED_SET },
6712 // Used in CompileArrayPushCall. 6488 // Used in CompileArrayPushCall.
6713 // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore. 6489 // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
6714 // Also used in KeyedStoreIC::GenerateGeneric. 6490 // Also used in KeyedStoreIC::GenerateGeneric.
6715 { REG(r3), REG(r4), REG(r5), EMIT_REMEMBERED_SET }, 6491 { REG(r3), REG(r4), REG(r5), EMIT_REMEMBERED_SET },
6716 // Used in CompileStoreGlobal. 6492 // Used in CompileStoreGlobal.
(...skipping 347 matching lines...) Expand 10 before | Expand all | Expand 10 after
7064 __ add(r1, r1, Operand(1)); 6840 __ add(r1, r1, Operand(1));
7065 } 6841 }
7066 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 6842 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
7067 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2)); 6843 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2));
7068 __ add(sp, sp, r1); 6844 __ add(sp, sp, r1);
7069 __ Ret(); 6845 __ Ret();
7070 } 6846 }
7071 6847
7072 6848
7073 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 6849 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
7074 if (entry_hook_ != NULL) { 6850 if (masm->isolate()->function_entry_hook() != NULL) {
7075 PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize); 6851 PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize);
6852 AllowStubCallsScope allow_stub_calls(masm, true);
7076 ProfileEntryHookStub stub; 6853 ProfileEntryHookStub stub;
7077 __ push(lr); 6854 __ push(lr);
7078 __ CallStub(&stub); 6855 __ CallStub(&stub);
7079 __ pop(lr); 6856 __ pop(lr);
7080 } 6857 }
7081 } 6858 }
7082 6859
7083 6860
7084 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 6861 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
7085 // The entry hook is a "push lr" instruction, followed by a call. 6862 // The entry hook is a "push lr" instruction, followed by a call.
7086 const int32_t kReturnAddressDistanceFromFunctionStart = 6863 const int32_t kReturnAddressDistanceFromFunctionStart =
7087 3 * Assembler::kInstrSize; 6864 3 * Assembler::kInstrSize;
7088 6865
7089 // Save live volatile registers. 6866 // This should contain all kCallerSaved registers.
7090 __ Push(lr, r5, r1); 6867 const RegList kSavedRegs =
7091 const int32_t kNumSavedRegs = 3; 6868 1 << 0 | // r0
6869 1 << 1 | // r1
6870 1 << 2 | // r2
6871 1 << 3 | // r3
6872 1 << 5 | // r5
6873 1 << 9; // r9
6874 // We also save lr, so the count here is one higher than the mask indicates.
6875 const int32_t kNumSavedRegs = 7;
6876
6877 ASSERT((kCallerSaved & kSavedRegs) == kCallerSaved);
6878
6879 // Save all caller-save registers as this may be called from anywhere.
6880 __ stm(db_w, sp, kSavedRegs | lr.bit());
7092 6881
7093 // Compute the function's address for the first argument. 6882 // Compute the function's address for the first argument.
7094 __ sub(r0, lr, Operand(kReturnAddressDistanceFromFunctionStart)); 6883 __ sub(r0, lr, Operand(kReturnAddressDistanceFromFunctionStart));
7095 6884
7096 // The caller's return address is above the saved temporaries. 6885 // The caller's return address is above the saved temporaries.
7097 // Grab that for the second argument to the hook. 6886 // Grab that for the second argument to the hook.
7098 __ add(r1, sp, Operand(kNumSavedRegs * kPointerSize)); 6887 __ add(r1, sp, Operand(kNumSavedRegs * kPointerSize));
7099 6888
7100 // Align the stack if necessary. 6889 // Align the stack if necessary.
7101 int frame_alignment = masm->ActivationFrameAlignment(); 6890 int frame_alignment = masm->ActivationFrameAlignment();
7102 if (frame_alignment > kPointerSize) { 6891 if (frame_alignment > kPointerSize) {
7103 __ mov(r5, sp); 6892 __ mov(r5, sp);
7104 ASSERT(IsPowerOf2(frame_alignment)); 6893 ASSERT(IsPowerOf2(frame_alignment));
7105 __ and_(sp, sp, Operand(-frame_alignment)); 6894 __ and_(sp, sp, Operand(-frame_alignment));
7106 } 6895 }
7107 6896
7108 #if defined(V8_HOST_ARCH_ARM) 6897 #if V8_HOST_ARCH_ARM
7109 __ mov(ip, Operand(reinterpret_cast<int32_t>(&entry_hook_))); 6898 int32_t entry_hook =
7110 __ ldr(ip, MemOperand(ip)); 6899 reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
6900 __ mov(ip, Operand(entry_hook));
7111 #else 6901 #else
7112 // Under the simulator we need to indirect the entry hook through a 6902 // Under the simulator we need to indirect the entry hook through a
7113 // trampoline function at a known address. 6903 // trampoline function at a known address.
7114 Address trampoline_address = reinterpret_cast<Address>( 6904 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
7115 reinterpret_cast<intptr_t>(EntryHookTrampoline));
7116 ApiFunction dispatcher(trampoline_address);
7117 __ mov(ip, Operand(ExternalReference(&dispatcher, 6905 __ mov(ip, Operand(ExternalReference(&dispatcher,
7118 ExternalReference::BUILTIN_CALL, 6906 ExternalReference::BUILTIN_CALL,
7119 masm->isolate()))); 6907 masm->isolate())));
7120 #endif 6908 #endif
7121 __ Call(ip); 6909 __ Call(ip);
7122 6910
7123 // Restore the stack pointer if needed. 6911 // Restore the stack pointer if needed.
7124 if (frame_alignment > kPointerSize) { 6912 if (frame_alignment > kPointerSize) {
7125 __ mov(sp, r5); 6913 __ mov(sp, r5);
7126 } 6914 }
7127 6915
7128 __ Pop(lr, r5, r1); 6916 // Also pop pc to get Ret(0).
7129 __ Ret(); 6917 __ ldm(ia_w, sp, kSavedRegs | pc.bit());
7130 } 6918 }
7131 6919
7132 6920
7133 template<class T> 6921 template<class T>
7134 static void CreateArrayDispatch(MacroAssembler* masm) { 6922 static void CreateArrayDispatch(MacroAssembler* masm) {
7135 int last_index = GetSequenceIndexFromFastElementsKind( 6923 int last_index = GetSequenceIndexFromFastElementsKind(
7136 TERMINAL_FAST_ELEMENTS_KIND); 6924 TERMINAL_FAST_ELEMENTS_KIND);
7137 for (int i = 0; i <= last_index; ++i) { 6925 for (int i = 0; i <= last_index; ++i) {
7138 Label next; 6926 Label next;
7139 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 6927 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
(...skipping 15 matching lines...) Expand all
7155 // r0 - number of arguments 6943 // r0 - number of arguments
7156 // r1 - constructor? 6944 // r1 - constructor?
7157 // sp[0] - last argument 6945 // sp[0] - last argument
7158 ASSERT(FAST_SMI_ELEMENTS == 0); 6946 ASSERT(FAST_SMI_ELEMENTS == 0);
7159 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 6947 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
7160 ASSERT(FAST_ELEMENTS == 2); 6948 ASSERT(FAST_ELEMENTS == 2);
7161 ASSERT(FAST_HOLEY_ELEMENTS == 3); 6949 ASSERT(FAST_HOLEY_ELEMENTS == 3);
7162 ASSERT(FAST_DOUBLE_ELEMENTS == 4); 6950 ASSERT(FAST_DOUBLE_ELEMENTS == 4);
7163 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); 6951 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
7164 6952
7165 Handle<Object> undefined_sentinel(
7166 masm->isolate()->heap()->undefined_value(),
7167 masm->isolate());
7168
7169 // is the low bit set? If so, we are holey and that is good. 6953 // is the low bit set? If so, we are holey and that is good.
7170 __ tst(r3, Operand(1)); 6954 __ tst(r3, Operand(1));
7171 Label normal_sequence; 6955 Label normal_sequence;
7172 __ b(ne, &normal_sequence); 6956 __ b(ne, &normal_sequence);
7173 6957
7174 // look at the first argument 6958 // look at the first argument
7175 __ ldr(r5, MemOperand(sp, 0)); 6959 __ ldr(r5, MemOperand(sp, 0));
7176 __ cmp(r5, Operand::Zero()); 6960 __ cmp(r5, Operand::Zero());
7177 __ b(eq, &normal_sequence); 6961 __ b(eq, &normal_sequence);
7178 6962
7179 // We are going to create a holey array, but our kind is non-holey. 6963 // We are going to create a holey array, but our kind is non-holey.
7180 // Fix kind and retry 6964 // Fix kind and retry (only if we have an allocation site in the cell).
7181 __ add(r3, r3, Operand(1)); 6965 __ add(r3, r3, Operand(1));
7182 __ cmp(r2, Operand(undefined_sentinel)); 6966 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
7183 __ b(eq, &normal_sequence); 6967 __ b(eq, &normal_sequence);
6968 __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
6969 __ ldr(r5, FieldMemOperand(r5, 0));
6970 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
6971 __ b(ne, &normal_sequence);
7184 6972
7185 // Save the resulting elements kind in type info 6973 // Save the resulting elements kind in type info
7186 __ SmiTag(r3); 6974 __ SmiTag(r3);
7187 __ str(r3, FieldMemOperand(r2, kPointerSize)); 6975 __ ldr(r5, FieldMemOperand(r2, Cell::kValueOffset));
6976 __ str(r3, FieldMemOperand(r5, AllocationSite::kPayloadOffset));
7188 __ SmiUntag(r3); 6977 __ SmiUntag(r3);
7189 6978
7190 __ bind(&normal_sequence); 6979 __ bind(&normal_sequence);
7191 int last_index = GetSequenceIndexFromFastElementsKind( 6980 int last_index = GetSequenceIndexFromFastElementsKind(
7192 TERMINAL_FAST_ELEMENTS_KIND); 6981 TERMINAL_FAST_ELEMENTS_KIND);
7193 for (int i = 0; i <= last_index; ++i) { 6982 for (int i = 0; i <= last_index; ++i) {
7194 Label next; 6983 Label next;
7195 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 6984 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
7196 __ cmp(r3, Operand(kind)); 6985 __ cmp(r3, Operand(kind));
7197 __ b(ne, &next); 6986 __ b(ne, &next);
7198 ArraySingleArgumentConstructorStub stub(kind); 6987 ArraySingleArgumentConstructorStub stub(kind);
7199 __ TailCallStub(&stub); 6988 __ TailCallStub(&stub);
7200 __ bind(&next); 6989 __ bind(&next);
7201 } 6990 }
7202 6991
7203 // If we reached this point there is a problem. 6992 // If we reached this point there is a problem.
7204 __ Abort("Unexpected ElementsKind in array constructor"); 6993 __ Abort("Unexpected ElementsKind in array constructor");
7205 } 6994 }
7206 6995
7207 6996
7208 template<class T> 6997 template<class T>
7209 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 6998 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
7210 int to_index = GetSequenceIndexFromFastElementsKind( 6999 int to_index = GetSequenceIndexFromFastElementsKind(
7211 TERMINAL_FAST_ELEMENTS_KIND); 7000 TERMINAL_FAST_ELEMENTS_KIND);
7212 for (int i = 0; i <= to_index; ++i) { 7001 for (int i = 0; i <= to_index; ++i) {
7213 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 7002 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
7214 T stub(kind); 7003 T stub(kind);
7215 stub.GetCode(isolate)->set_is_pregenerated(true); 7004 stub.GetCode(isolate)->set_is_pregenerated(true);
7216 if (AllocationSiteInfo::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 7005 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
7217 T stub1(kind, true); 7006 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
7218 stub1.GetCode(isolate)->set_is_pregenerated(true); 7007 stub1.GetCode(isolate)->set_is_pregenerated(true);
7219 } 7008 }
7220 } 7009 }
7221 } 7010 }
7222 7011
7223 7012
7224 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 7013 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
7225 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 7014 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
7226 isolate); 7015 isolate);
7227 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 7016 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
(...skipping 19 matching lines...) Expand all
7247 7036
7248 7037
7249 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 7038 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
7250 // ----------- S t a t e ------------- 7039 // ----------- S t a t e -------------
7251 // -- r0 : argc (only if argument_count_ == ANY) 7040 // -- r0 : argc (only if argument_count_ == ANY)
7252 // -- r1 : constructor 7041 // -- r1 : constructor
7253 // -- r2 : type info cell 7042 // -- r2 : type info cell
7254 // -- sp[0] : return address 7043 // -- sp[0] : return address
7255 // -- sp[4] : last argument 7044 // -- sp[4] : last argument
7256 // ----------------------------------- 7045 // -----------------------------------
7257 Handle<Object> undefined_sentinel(
7258 masm->isolate()->heap()->undefined_value(),
7259 masm->isolate());
7260
7261 if (FLAG_debug_code) { 7046 if (FLAG_debug_code) {
7262 // The array construct code is only set for the global and natives 7047 // The array construct code is only set for the global and natives
7263 // builtin Array functions which always have maps. 7048 // builtin Array functions which always have maps.
7264 7049
7265 // Initial map for the builtin Array function should be a map. 7050 // Initial map for the builtin Array function should be a map.
7266 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); 7051 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
7267 // Will both indicate a NULL and a Smi. 7052 // Will both indicate a NULL and a Smi.
7268 __ tst(r3, Operand(kSmiTagMask)); 7053 __ tst(r3, Operand(kSmiTagMask));
7269 __ Assert(ne, "Unexpected initial map for Array function"); 7054 __ Assert(ne, "Unexpected initial map for Array function");
7270 __ CompareObjectType(r3, r3, r4, MAP_TYPE); 7055 __ CompareObjectType(r3, r3, r4, MAP_TYPE);
7271 __ Assert(eq, "Unexpected initial map for Array function"); 7056 __ Assert(eq, "Unexpected initial map for Array function");
7272 7057
7273 // We should either have undefined in ebx or a valid cell 7058 // We should either have undefined in ebx or a valid cell
7274 Label okay_here; 7059 Label okay_here;
7275 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 7060 Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
7276 __ cmp(r2, Operand(undefined_sentinel)); 7061 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
7277 __ b(eq, &okay_here); 7062 __ b(eq, &okay_here);
7278 __ ldr(r3, FieldMemOperand(r2, 0)); 7063 __ ldr(r3, FieldMemOperand(r2, 0));
7279 __ cmp(r3, Operand(cell_map)); 7064 __ cmp(r3, Operand(cell_map));
7280 __ Assert(eq, "Expected property cell in register ebx"); 7065 __ Assert(eq, "Expected property cell in register ebx");
7281 __ bind(&okay_here); 7066 __ bind(&okay_here);
7282 } 7067 }
7283 7068
7284 Label no_info, switch_ready; 7069 Label no_info, switch_ready;
7285 // Get the elements kind and case on that. 7070 // Get the elements kind and case on that.
7286 __ cmp(r2, Operand(undefined_sentinel)); 7071 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
7287 __ b(eq, &no_info); 7072 __ b(eq, &no_info);
7288 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset)); 7073 __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
7289 __ JumpIfNotSmi(r3, &no_info); 7074
7075 // The type cell may have undefined in its value.
7076 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
7077 __ b(eq, &no_info);
7078
7079 // We should have an allocation site object
7080 if (FLAG_debug_code) {
7081 __ push(r3);
7082 __ ldr(r3, FieldMemOperand(r3, 0));
7083 __ CompareRoot(r3, Heap::kAllocationSiteMapRootIndex);
7084 __ Assert(eq, "Expected AllocationSite object in register edx");
7085 }
7086
7087 __ ldr(r3, FieldMemOperand(r3, AllocationSite::kPayloadOffset));
7290 __ SmiUntag(r3); 7088 __ SmiUntag(r3);
7291 __ jmp(&switch_ready); 7089 __ jmp(&switch_ready);
7292 __ bind(&no_info); 7090 __ bind(&no_info);
7293 __ mov(r3, Operand(GetInitialFastElementsKind())); 7091 __ mov(r3, Operand(GetInitialFastElementsKind()));
7294 __ bind(&switch_ready); 7092 __ bind(&switch_ready);
7295 7093
7296 if (argument_count_ == ANY) { 7094 if (argument_count_ == ANY) {
7297 Label not_zero_case, not_one_case; 7095 Label not_zero_case, not_one_case;
7298 __ tst(r0, r0); 7096 __ tst(r0, r0);
7299 __ b(ne, &not_zero_case); 7097 __ b(ne, &not_zero_case);
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
7401 __ bind(&fast_elements_case); 7199 __ bind(&fast_elements_case);
7402 GenerateCase(masm, FAST_ELEMENTS); 7200 GenerateCase(masm, FAST_ELEMENTS);
7403 } 7201 }
7404 7202
7405 7203
7406 #undef __ 7204 #undef __
7407 7205
7408 } } // namespace v8::internal 7206 } } // namespace v8::internal
7409 7207
7410 #endif // V8_TARGET_ARCH_ARM 7208 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/codegen-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698