| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1331 } | 1331 } |
| 1332 // No fall-through from this generated code. | 1332 // No fall-through from this generated code. |
| 1333 if (FLAG_debug_code) { | 1333 if (FLAG_debug_code) { |
| 1334 __ Abort("Unexpected fall-through in " | 1334 __ Abort("Unexpected fall-through in " |
| 1335 "TypeRecordingBinaryStub::GenerateFloatingPointCode."); | 1335 "TypeRecordingBinaryStub::GenerateFloatingPointCode."); |
| 1336 } | 1336 } |
| 1337 } | 1337 } |
| 1338 | 1338 |
| 1339 | 1339 |
| 1340 void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { | 1340 void TypeRecordingBinaryOpStub::GenerateStringAddCode(MacroAssembler* masm) { |
| 1341 ASSERT(op_ == Token::ADD); |
| 1342 NearLabel left_not_string, call_runtime; |
| 1343 |
| 1344 // Registers containing left and right operands respectively. |
| 1345 Register left = rdx; |
| 1346 Register right = rax; |
| 1347 |
| 1348 // Test if left operand is a string. |
| 1349 __ JumpIfSmi(left, &left_not_string); |
| 1350 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); |
| 1351 __ j(above_equal, &left_not_string); |
| 1352 StringAddStub string_add_left_stub(NO_STRING_CHECK_LEFT_IN_STUB); |
| 1341 GenerateRegisterArgsPush(masm); | 1353 GenerateRegisterArgsPush(masm); |
| 1342 // Registers containing left and right operands respectively. | 1354 __ TailCallStub(&string_add_left_stub); |
| 1343 Register lhs = rdx; | |
| 1344 Register rhs = rax; | |
| 1345 | 1355 |
| 1346 // Test for string arguments before calling runtime. | 1356 // Left operand is not a string, test right. |
| 1347 Label not_strings, both_strings, not_string1, string1, string1_smi2; | 1357 __ bind(&left_not_string); |
| 1358 __ JumpIfSmi(right, &call_runtime); |
| 1359 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); |
| 1360 __ j(above_equal, &call_runtime); |
| 1348 | 1361 |
| 1349 __ JumpIfNotString(lhs, r8, ¬_string1); | 1362 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); |
| 1363 GenerateRegisterArgsPush(masm); |
| 1364 __ TailCallStub(&string_add_right_stub); |
| 1350 | 1365 |
| 1351 // First argument is a a string, test second. | |
| 1352 __ JumpIfSmi(rhs, &string1_smi2); | |
| 1353 __ CmpObjectType(rhs, FIRST_NONSTRING_TYPE, r9); | |
| 1354 __ j(above_equal, &string1); | |
| 1355 | |
| 1356 // First and second argument are strings. | |
| 1357 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); | |
| 1358 __ TailCallStub(&string_add_stub); | |
| 1359 | |
| 1360 __ bind(&string1_smi2); | |
| 1361 // First argument is a string, second is a smi. Try to lookup the number | |
| 1362 // string for the smi in the number string cache. | |
| 1363 NumberToStringStub::GenerateLookupNumberStringCache( | |
| 1364 masm, rhs, rbx, rcx, r8, true, &string1); | |
| 1365 | |
| 1366 // Replace second argument on stack and tailcall string add stub to make | |
| 1367 // the result. | |
| 1368 __ movq(Operand(rsp, 1 * kPointerSize), rbx); | |
| 1369 __ TailCallStub(&string_add_stub); | |
| 1370 | |
| 1371 // Only first argument is a string. | |
| 1372 __ bind(&string1); | |
| 1373 __ InvokeBuiltin(Builtins::STRING_ADD_LEFT, JUMP_FUNCTION); | |
| 1374 | |
| 1375 // First argument was not a string, test second. | |
| 1376 __ bind(¬_string1); | |
| 1377 __ JumpIfNotString(rhs, rhs, ¬_strings); | |
| 1378 | |
| 1379 // Only second argument is a string. | |
| 1380 __ InvokeBuiltin(Builtins::STRING_ADD_RIGHT, JUMP_FUNCTION); | |
| 1381 | |
| 1382 __ bind(¬_strings); | |
| 1383 // Neither argument is a string. | 1366 // Neither argument is a string. |
| 1384 // Pop arguments, because CallRuntimeCode wants to push them again. | 1367 __ bind(&call_runtime); |
| 1385 __ pop(rcx); | |
| 1386 __ pop(rax); | |
| 1387 __ pop(rdx); | |
| 1388 __ push(rcx); | |
| 1389 } | 1368 } |
| 1390 | 1369 |
| 1391 | 1370 |
| 1392 void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { | 1371 void TypeRecordingBinaryOpStub::GenerateCallRuntimeCode(MacroAssembler* masm) { |
| 1393 GenerateRegisterArgsPush(masm); | 1372 GenerateRegisterArgsPush(masm); |
| 1394 switch (op_) { | 1373 switch (op_) { |
| 1395 case Token::ADD: | 1374 case Token::ADD: |
| 1396 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); | 1375 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); |
| 1397 break; | 1376 break; |
| 1398 case Token::SUB: | 1377 case Token::SUB: |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1435 Label not_smi; | 1414 Label not_smi; |
| 1436 | 1415 |
| 1437 GenerateSmiCode(masm, ¬_smi, NO_HEAPNUMBER_RESULTS); | 1416 GenerateSmiCode(masm, ¬_smi, NO_HEAPNUMBER_RESULTS); |
| 1438 | 1417 |
| 1439 __ bind(¬_smi); | 1418 __ bind(¬_smi); |
| 1440 GenerateTypeTransition(masm); | 1419 GenerateTypeTransition(masm); |
| 1441 } | 1420 } |
| 1442 | 1421 |
| 1443 | 1422 |
| 1444 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { | 1423 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
| 1424 ASSERT(operands_type_ == TRBinaryOpIC::STRING); |
| 1445 ASSERT(op_ == Token::ADD); | 1425 ASSERT(op_ == Token::ADD); |
| 1446 GenerateStringAddCode(masm); | 1426 GenerateStringAddCode(masm); |
| 1447 | 1427 // Try to add arguments as strings, otherwise, transition to the generic |
| 1428 // TRBinaryOpIC type. |
| 1448 GenerateTypeTransition(masm); | 1429 GenerateTypeTransition(masm); |
| 1449 } | 1430 } |
| 1450 | 1431 |
| 1451 | 1432 |
| 1452 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 1433 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 1453 Label gc_required, not_number; | 1434 Label gc_required, not_number; |
| 1454 GenerateFloatingPointCode(masm, &gc_required, ¬_number); | 1435 GenerateFloatingPointCode(masm, &gc_required, ¬_number); |
| 1455 | 1436 |
| 1456 __ bind(¬_number); | 1437 __ bind(¬_number); |
| 1457 GenerateTypeTransition(masm); | 1438 GenerateTypeTransition(masm); |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1520 | 1501 |
| 1521 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 1502 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
| 1522 __ pop(rcx); | 1503 __ pop(rcx); |
| 1523 __ push(rdx); | 1504 __ push(rdx); |
| 1524 __ push(rax); | 1505 __ push(rax); |
| 1525 __ push(rcx); | 1506 __ push(rcx); |
| 1526 } | 1507 } |
| 1527 | 1508 |
| 1528 | 1509 |
| 1529 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { | 1510 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { |
| 1530 // Input on stack: | 1511 // TAGGED case: |
| 1531 // rsp[8]: argument (should be number). | 1512 // Input: |
| 1532 // rsp[0]: return address. | 1513 // rsp[8]: argument (should be number). |
| 1514 // rsp[0]: return address. |
| 1515 // Output: |
| 1516 // rax: tagged double result. |
| 1517 // UNTAGGED case: |
| 1518 // Input:: |
| 1519 // rsp[0]: return address. |
| 1520 // xmm1: untagged double input argument |
| 1521 // Output: |
| 1522 // xmm1: untagged double result. |
| 1523 |
| 1533 Label runtime_call; | 1524 Label runtime_call; |
| 1534 Label runtime_call_clear_stack; | 1525 Label runtime_call_clear_stack; |
| 1535 Label input_not_smi; | 1526 Label skip_cache; |
| 1536 NearLabel loaded; | 1527 const bool tagged = (argument_type_ == TAGGED); |
| 1537 // Test that rax is a number. | 1528 if (tagged) { |
| 1538 __ movq(rax, Operand(rsp, kPointerSize)); | 1529 NearLabel input_not_smi; |
| 1539 __ JumpIfNotSmi(rax, &input_not_smi); | 1530 NearLabel loaded; |
| 1540 // Input is a smi. Untag and load it onto the FPU stack. | 1531 // Test that rax is a number. |
| 1541 // Then load the bits of the double into rbx. | 1532 __ movq(rax, Operand(rsp, kPointerSize)); |
| 1542 __ SmiToInteger32(rax, rax); | 1533 __ JumpIfNotSmi(rax, &input_not_smi); |
| 1543 __ subq(rsp, Immediate(kPointerSize)); | 1534 // Input is a smi. Untag and load it onto the FPU stack. |
| 1544 __ cvtlsi2sd(xmm1, rax); | 1535 // Then load the bits of the double into rbx. |
| 1545 __ movsd(Operand(rsp, 0), xmm1); | 1536 __ SmiToInteger32(rax, rax); |
| 1546 __ movq(rbx, xmm1); | 1537 __ subq(rsp, Immediate(kDoubleSize)); |
| 1547 __ movq(rdx, xmm1); | 1538 __ cvtlsi2sd(xmm1, rax); |
| 1548 __ fld_d(Operand(rsp, 0)); | 1539 __ movsd(Operand(rsp, 0), xmm1); |
| 1549 __ addq(rsp, Immediate(kPointerSize)); | 1540 __ movq(rbx, xmm1); |
| 1550 __ jmp(&loaded); | 1541 __ movq(rdx, xmm1); |
| 1542 __ fld_d(Operand(rsp, 0)); |
| 1543 __ addq(rsp, Immediate(kDoubleSize)); |
| 1544 __ jmp(&loaded); |
| 1551 | 1545 |
| 1552 __ bind(&input_not_smi); | 1546 __ bind(&input_not_smi); |
| 1553 // Check if input is a HeapNumber. | 1547 // Check if input is a HeapNumber. |
| 1554 __ Move(rbx, FACTORY->heap_number_map()); | 1548 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex); |
| 1555 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 1549 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); |
| 1556 __ j(not_equal, &runtime_call); | 1550 __ j(not_equal, &runtime_call); |
| 1557 // Input is a HeapNumber. Push it on the FPU stack and load its | 1551 // Input is a HeapNumber. Push it on the FPU stack and load its |
| 1558 // bits into rbx. | 1552 // bits into rbx. |
| 1559 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); | 1553 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1560 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); | 1554 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1561 __ movq(rdx, rbx); | 1555 __ movq(rdx, rbx); |
| 1562 __ bind(&loaded); | 1556 |
| 1563 // ST[0] == double value | 1557 __ bind(&loaded); |
| 1558 } else { // UNTAGGED. |
| 1559 __ movq(rbx, xmm1); |
| 1560 __ movq(rdx, xmm1); |
| 1561 } |
| 1562 |
| 1563 // ST[0] == double value, if TAGGED. |
| 1564 // rbx = bits of double value. | 1564 // rbx = bits of double value. |
| 1565 // rdx = also bits of double value. | 1565 // rdx = also bits of double value. |
| 1566 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic): | 1566 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic): |
| 1567 // h = h0 = bits ^ (bits >> 32); | 1567 // h = h0 = bits ^ (bits >> 32); |
| 1568 // h ^= h >> 16; | 1568 // h ^= h >> 16; |
| 1569 // h ^= h >> 8; | 1569 // h ^= h >> 8; |
| 1570 // h = h & (cacheSize - 1); | 1570 // h = h & (cacheSize - 1); |
| 1571 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1) | 1571 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1) |
| 1572 __ sar(rdx, Immediate(32)); | 1572 __ sar(rdx, Immediate(32)); |
| 1573 __ xorl(rdx, rbx); | 1573 __ xorl(rdx, rbx); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1586 // ST[0] == double value. | 1586 // ST[0] == double value. |
| 1587 // rbx = bits of double value. | 1587 // rbx = bits of double value. |
| 1588 // rcx = TranscendentalCache::hash(double value). | 1588 // rcx = TranscendentalCache::hash(double value). |
| 1589 __ movq(rax, ExternalReference::transcendental_cache_array_address()); | 1589 __ movq(rax, ExternalReference::transcendental_cache_array_address()); |
| 1590 // rax points to cache array. | 1590 // rax points to cache array. |
| 1591 __ movq(rax, Operand(rax, type_ * sizeof( | 1591 __ movq(rax, Operand(rax, type_ * sizeof( |
| 1592 Isolate::Current()->transcendental_cache()->caches_[0]))); | 1592 Isolate::Current()->transcendental_cache()->caches_[0]))); |
| 1593 // rax points to the cache for the type type_. | 1593 // rax points to the cache for the type type_. |
| 1594 // If NULL, the cache hasn't been initialized yet, so go through runtime. | 1594 // If NULL, the cache hasn't been initialized yet, so go through runtime. |
| 1595 __ testq(rax, rax); | 1595 __ testq(rax, rax); |
| 1596 __ j(zero, &runtime_call_clear_stack); | 1596 __ j(zero, &runtime_call_clear_stack); // Only clears stack if TAGGED. |
| 1597 #ifdef DEBUG | 1597 #ifdef DEBUG |
| 1598 // Check that the layout of cache elements match expectations. | 1598 // Check that the layout of cache elements match expectations. |
| 1599 { // NOLINT - doesn't like a single brace on a line. | 1599 { // NOLINT - doesn't like a single brace on a line. |
| 1600 TranscendentalCache::SubCache::Element test_elem[2]; | 1600 TranscendentalCache::SubCache::Element test_elem[2]; |
| 1601 char* elem_start = reinterpret_cast<char*>(&test_elem[0]); | 1601 char* elem_start = reinterpret_cast<char*>(&test_elem[0]); |
| 1602 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]); | 1602 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]); |
| 1603 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0])); | 1603 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0])); |
| 1604 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1])); | 1604 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1])); |
| 1605 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output)); | 1605 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output)); |
| 1606 // Two uint_32's and a pointer per element. | 1606 // Two uint_32's and a pointer per element. |
| 1607 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start)); | 1607 CHECK_EQ(16, static_cast<int>(elem2_start - elem_start)); |
| 1608 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start)); | 1608 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start)); |
| 1609 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start)); | 1609 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start)); |
| 1610 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start)); | 1610 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start)); |
| 1611 } | 1611 } |
| 1612 #endif | 1612 #endif |
| 1613 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16]. | 1613 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16]. |
| 1614 __ addl(rcx, rcx); | 1614 __ addl(rcx, rcx); |
| 1615 __ lea(rcx, Operand(rax, rcx, times_8, 0)); | 1615 __ lea(rcx, Operand(rax, rcx, times_8, 0)); |
| 1616 // Check if cache matches: Double value is stored in uint32_t[2] array. | 1616 // Check if cache matches: Double value is stored in uint32_t[2] array. |
| 1617 NearLabel cache_miss; | 1617 NearLabel cache_miss; |
| 1618 __ cmpq(rbx, Operand(rcx, 0)); | 1618 __ cmpq(rbx, Operand(rcx, 0)); |
| 1619 __ j(not_equal, &cache_miss); | 1619 __ j(not_equal, &cache_miss); |
| 1620 // Cache hit! | 1620 // Cache hit! |
| 1621 __ movq(rax, Operand(rcx, 2 * kIntSize)); | 1621 __ movq(rax, Operand(rcx, 2 * kIntSize)); |
| 1622 __ fstp(0); // Clear FPU stack. | 1622 if (tagged) { |
| 1623 __ ret(kPointerSize); | 1623 __ fstp(0); // Clear FPU stack. |
| 1624 __ ret(kPointerSize); |
| 1625 } else { // UNTAGGED. |
| 1626 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1627 __ Ret(); |
| 1628 } |
| 1624 | 1629 |
| 1625 __ bind(&cache_miss); | 1630 __ bind(&cache_miss); |
| 1626 // Update cache with new value. | 1631 // Update cache with new value. |
| 1627 Label nan_result; | 1632 if (tagged) { |
| 1628 GenerateOperation(masm, &nan_result); | |
| 1629 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack); | 1633 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack); |
| 1634 } else { // UNTAGGED. |
| 1635 __ AllocateHeapNumber(rax, rdi, &skip_cache); |
| 1636 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); |
| 1637 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1638 } |
| 1639 GenerateOperation(masm); |
| 1630 __ movq(Operand(rcx, 0), rbx); | 1640 __ movq(Operand(rcx, 0), rbx); |
| 1631 __ movq(Operand(rcx, 2 * kIntSize), rax); | 1641 __ movq(Operand(rcx, 2 * kIntSize), rax); |
| 1632 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset)); | 1642 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1633 __ ret(kPointerSize); | 1643 if (tagged) { |
| 1644 __ ret(kPointerSize); |
| 1645 } else { // UNTAGGED. |
| 1646 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1647 __ Ret(); |
| 1634 | 1648 |
| 1635 __ bind(&runtime_call_clear_stack); | 1649 // Skip cache and return answer directly, only in untagged case. |
| 1636 __ fstp(0); | 1650 __ bind(&skip_cache); |
| 1637 __ bind(&runtime_call); | 1651 __ subq(rsp, Immediate(kDoubleSize)); |
| 1638 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1); | 1652 __ movsd(Operand(rsp, 0), xmm1); |
| 1653 __ fld_d(Operand(rsp, 0)); |
| 1654 GenerateOperation(masm); |
| 1655 __ fstp_d(Operand(rsp, 0)); |
| 1656 __ movsd(xmm1, Operand(rsp, 0)); |
| 1657 __ addq(rsp, Immediate(kDoubleSize)); |
| 1658 // We return the value in xmm1 without adding it to the cache, but |
| 1659 // we cause a scavenging GC so that future allocations will succeed. |
| 1660 __ EnterInternalFrame(); |
| 1661 // Allocate an unused object bigger than a HeapNumber. |
| 1662 __ Push(Smi::FromInt(2 * kDoubleSize)); |
| 1663 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
| 1664 __ LeaveInternalFrame(); |
| 1665 __ Ret(); |
| 1666 } |
| 1639 | 1667 |
| 1640 __ bind(&nan_result); | 1668 // Call runtime, doing whatever allocation and cleanup is necessary. |
| 1641 __ fstp(0); // Remove argument from FPU stack. | 1669 if (tagged) { |
| 1642 __ LoadRoot(rax, Heap::kNanValueRootIndex); | 1670 __ bind(&runtime_call_clear_stack); |
| 1643 __ movq(Operand(rcx, 0), rbx); | 1671 __ fstp(0); |
| 1644 __ movq(Operand(rcx, 2 * kIntSize), rax); | 1672 __ bind(&runtime_call); |
| 1645 __ ret(kPointerSize); | 1673 __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1); |
| 1674 } else { // UNTAGGED. |
| 1675 __ bind(&runtime_call_clear_stack); |
| 1676 __ bind(&runtime_call); |
| 1677 __ AllocateHeapNumber(rax, rdi, &skip_cache); |
| 1678 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); |
| 1679 __ EnterInternalFrame(); |
| 1680 __ push(rax); |
| 1681 __ CallRuntime(RuntimeFunction(), 1); |
| 1682 __ LeaveInternalFrame(); |
| 1683 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1684 __ Ret(); |
| 1685 } |
| 1646 } | 1686 } |
| 1647 | 1687 |
| 1648 | 1688 |
| 1649 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { | 1689 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { |
| 1650 switch (type_) { | 1690 switch (type_) { |
| 1651 // Add more cases when necessary. | 1691 // Add more cases when necessary. |
| 1652 case TranscendentalCache::SIN: return Runtime::kMath_sin; | 1692 case TranscendentalCache::SIN: return Runtime::kMath_sin; |
| 1653 case TranscendentalCache::COS: return Runtime::kMath_cos; | 1693 case TranscendentalCache::COS: return Runtime::kMath_cos; |
| 1654 case TranscendentalCache::LOG: return Runtime::kMath_log; | 1694 case TranscendentalCache::LOG: return Runtime::kMath_log; |
| 1655 default: | 1695 default: |
| 1656 UNIMPLEMENTED(); | 1696 UNIMPLEMENTED(); |
| 1657 return Runtime::kAbort; | 1697 return Runtime::kAbort; |
| 1658 } | 1698 } |
| 1659 } | 1699 } |
| 1660 | 1700 |
| 1661 | 1701 |
| 1662 void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm, | 1702 void TranscendentalCacheStub::GenerateOperation(MacroAssembler* masm) { |
| 1663 Label* on_nan_result) { | |
| 1664 // Registers: | 1703 // Registers: |
| 1704 // rax: Newly allocated HeapNumber, which must be preserved. |
| 1665 // rbx: Bits of input double. Must be preserved. | 1705 // rbx: Bits of input double. Must be preserved. |
| 1666 // rcx: Pointer to cache entry. Must be preserved. | 1706 // rcx: Pointer to cache entry. Must be preserved. |
| 1667 // st(0): Input double | 1707 // st(0): Input double |
| 1668 Label done; | 1708 Label done; |
| 1669 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) { | 1709 if (type_ == TranscendentalCache::SIN || type_ == TranscendentalCache::COS) { |
| 1670 // Both fsin and fcos require arguments in the range +/-2^63 and | 1710 // Both fsin and fcos require arguments in the range +/-2^63 and |
| 1671 // return NaN for infinities and NaN. They can share all code except | 1711 // return NaN for infinities and NaN. They can share all code except |
| 1672 // the actual fsin/fcos operation. | 1712 // the actual fsin/fcos operation. |
| 1673 Label in_range; | 1713 Label in_range; |
| 1674 // If argument is outside the range -2^63..2^63, fsin/cos doesn't | 1714 // If argument is outside the range -2^63..2^63, fsin/cos doesn't |
| 1675 // work. We must reduce it to the appropriate range. | 1715 // work. We must reduce it to the appropriate range. |
| 1676 __ movq(rdi, rbx); | 1716 __ movq(rdi, rbx); |
| 1677 // Move exponent and sign bits to low bits. | 1717 // Move exponent and sign bits to low bits. |
| 1678 __ shr(rdi, Immediate(HeapNumber::kMantissaBits)); | 1718 __ shr(rdi, Immediate(HeapNumber::kMantissaBits)); |
| 1679 // Remove sign bit. | 1719 // Remove sign bit. |
| 1680 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1)); | 1720 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1)); |
| 1681 int supported_exponent_limit = (63 + HeapNumber::kExponentBias); | 1721 int supported_exponent_limit = (63 + HeapNumber::kExponentBias); |
| 1682 __ cmpl(rdi, Immediate(supported_exponent_limit)); | 1722 __ cmpl(rdi, Immediate(supported_exponent_limit)); |
| 1683 __ j(below, &in_range); | 1723 __ j(below, &in_range); |
| 1684 // Check for infinity and NaN. Both return NaN for sin. | 1724 // Check for infinity and NaN. Both return NaN for sin. |
| 1685 __ cmpl(rdi, Immediate(0x7ff)); | 1725 __ cmpl(rdi, Immediate(0x7ff)); |
| 1686 __ j(equal, on_nan_result); | 1726 NearLabel non_nan_result; |
| 1727 __ j(not_equal, &non_nan_result); |
| 1728 // Input is +/-Infinity or NaN. Result is NaN. |
| 1729 __ fstp(0); |
| 1730 __ LoadRoot(kScratchRegister, Heap::kNanValueRootIndex); |
| 1731 __ fld_d(FieldOperand(kScratchRegister, HeapNumber::kValueOffset)); |
| 1732 __ jmp(&done); |
| 1733 |
| 1734 __ bind(&non_nan_result); |
| 1687 | 1735 |
| 1688 // Use fpmod to restrict argument to the range +/-2*PI. | 1736 // Use fpmod to restrict argument to the range +/-2*PI. |
| 1737 __ movq(rdi, rax); // Save rax before using fnstsw_ax. |
| 1689 __ fldpi(); | 1738 __ fldpi(); |
| 1690 __ fadd(0); | 1739 __ fadd(0); |
| 1691 __ fld(1); | 1740 __ fld(1); |
| 1692 // FPU Stack: input, 2*pi, input. | 1741 // FPU Stack: input, 2*pi, input. |
| 1693 { | 1742 { |
| 1694 Label no_exceptions; | 1743 Label no_exceptions; |
| 1695 __ fwait(); | 1744 __ fwait(); |
| 1696 __ fnstsw_ax(); | 1745 __ fnstsw_ax(); |
| 1697 // Clear if Illegal Operand or Zero Division exceptions are set. | 1746 // Clear if Illegal Operand or Zero Division exceptions are set. |
| 1698 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word. | 1747 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word. |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1711 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word. | 1760 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word. |
| 1712 // If C2 is set, computation only has partial result. Loop to | 1761 // If C2 is set, computation only has partial result. Loop to |
| 1713 // continue computation. | 1762 // continue computation. |
| 1714 __ j(not_zero, &partial_remainder_loop); | 1763 __ j(not_zero, &partial_remainder_loop); |
| 1715 } | 1764 } |
| 1716 // FPU Stack: input, 2*pi, input % 2*pi | 1765 // FPU Stack: input, 2*pi, input % 2*pi |
| 1717 __ fstp(2); | 1766 __ fstp(2); |
| 1718 // FPU Stack: input % 2*pi, 2*pi, | 1767 // FPU Stack: input % 2*pi, 2*pi, |
| 1719 __ fstp(0); | 1768 __ fstp(0); |
| 1720 // FPU Stack: input % 2*pi | 1769 // FPU Stack: input % 2*pi |
| 1770 __ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber. |
| 1721 __ bind(&in_range); | 1771 __ bind(&in_range); |
| 1722 switch (type_) { | 1772 switch (type_) { |
| 1723 case TranscendentalCache::SIN: | 1773 case TranscendentalCache::SIN: |
| 1724 __ fsin(); | 1774 __ fsin(); |
| 1725 break; | 1775 break; |
| 1726 case TranscendentalCache::COS: | 1776 case TranscendentalCache::COS: |
| 1727 __ fcos(); | 1777 __ fcos(); |
| 1728 break; | 1778 break; |
| 1729 default: | 1779 default: |
| 1730 UNREACHABLE(); | 1780 UNREACHABLE(); |
| (...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1963 } | 2013 } |
| 1964 __ SmiNeg(rax, rax, &done); | 2014 __ SmiNeg(rax, rax, &done); |
| 1965 __ jmp(&slow); // zero, if not handled above, and Smi::kMinValue. | 2015 __ jmp(&slow); // zero, if not handled above, and Smi::kMinValue. |
| 1966 | 2016 |
| 1967 // Try floating point case. | 2017 // Try floating point case. |
| 1968 __ bind(&try_float); | 2018 __ bind(&try_float); |
| 1969 } else if (FLAG_debug_code) { | 2019 } else if (FLAG_debug_code) { |
| 1970 __ AbortIfSmi(rax); | 2020 __ AbortIfSmi(rax); |
| 1971 } | 2021 } |
| 1972 | 2022 |
| 1973 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); | 2023 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
| 1974 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex); | 2024 Heap::kHeapNumberMapRootIndex); |
| 1975 __ j(not_equal, &slow); | 2025 __ j(not_equal, &slow); |
| 1976 // Operand is a float, negate its value by flipping sign bit. | 2026 // Operand is a float, negate its value by flipping sign bit. |
| 1977 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); | 2027 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 1978 __ movq(kScratchRegister, Immediate(0x01)); | 2028 __ movq(kScratchRegister, Immediate(0x01)); |
| 1979 __ shl(kScratchRegister, Immediate(63)); | 2029 __ shl(kScratchRegister, Immediate(63)); |
| 1980 __ xor_(rdx, kScratchRegister); // Flip sign. | 2030 __ xor_(rdx, kScratchRegister); // Flip sign. |
| 1981 // rdx is value to store. | 2031 // rdx is value to store. |
| 1982 if (overwrite_ == UNARY_OVERWRITE) { | 2032 if (overwrite_ == UNARY_OVERWRITE) { |
| 1983 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx); | 2033 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rdx); |
| 1984 } else { | 2034 } else { |
| 1985 __ AllocateHeapNumber(rcx, rbx, &slow); | 2035 __ AllocateHeapNumber(rcx, rbx, &slow); |
| 1986 // rcx: allocated 'empty' number | 2036 // rcx: allocated 'empty' number |
| 1987 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); | 2037 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); |
| 1988 __ movq(rax, rcx); | 2038 __ movq(rax, rcx); |
| 1989 } | 2039 } |
| 1990 } else if (op_ == Token::BIT_NOT) { | 2040 } else if (op_ == Token::BIT_NOT) { |
| 1991 if (include_smi_code_) { | 2041 if (include_smi_code_) { |
| 1992 Label try_float; | 2042 Label try_float; |
| 1993 __ JumpIfNotSmi(rax, &try_float); | 2043 __ JumpIfNotSmi(rax, &try_float); |
| 1994 __ SmiNot(rax, rax); | 2044 __ SmiNot(rax, rax); |
| 1995 __ jmp(&done); | 2045 __ jmp(&done); |
| 1996 // Try floating point case. | 2046 // Try floating point case. |
| 1997 __ bind(&try_float); | 2047 __ bind(&try_float); |
| 1998 } else if (FLAG_debug_code) { | 2048 } else if (FLAG_debug_code) { |
| 1999 __ AbortIfSmi(rax); | 2049 __ AbortIfSmi(rax); |
| 2000 } | 2050 } |
| 2001 | 2051 |
| 2002 // Check if the operand is a heap number. | 2052 // Check if the operand is a heap number. |
| 2003 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); | 2053 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
| 2004 __ CompareRoot(rdx, Heap::kHeapNumberMapRootIndex); | 2054 Heap::kHeapNumberMapRootIndex); |
| 2005 __ j(not_equal, &slow); | 2055 __ j(not_equal, &slow); |
| 2006 | 2056 |
| 2007 // Convert the heap number in rax to an untagged integer in rcx. | 2057 // Convert the heap number in rax to an untagged integer in rcx. |
| 2008 IntegerConvert(masm, rax, rax); | 2058 IntegerConvert(masm, rax, rax); |
| 2009 | 2059 |
| 2010 // Do the bitwise operation and smi tag the result. | 2060 // Do the bitwise operation and smi tag the result. |
| 2011 __ notl(rax); | 2061 __ notl(rax); |
| 2012 __ Integer32ToSmi(rax, rax); | 2062 __ Integer32ToSmi(rax, rax); |
| 2013 } | 2063 } |
| 2014 | 2064 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 2027 break; | 2077 break; |
| 2028 case Token::BIT_NOT: | 2078 case Token::BIT_NOT: |
| 2029 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); | 2079 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); |
| 2030 break; | 2080 break; |
| 2031 default: | 2081 default: |
| 2032 UNREACHABLE(); | 2082 UNREACHABLE(); |
| 2033 } | 2083 } |
| 2034 } | 2084 } |
| 2035 | 2085 |
| 2036 | 2086 |
| 2087 void MathPowStub::Generate(MacroAssembler* masm) { |
| 2088 // Registers are used as follows: |
| 2089 // rdx = base |
| 2090 // rax = exponent |
| 2091 // rcx = temporary, result |
| 2092 |
| 2093 Label allocate_return, call_runtime; |
| 2094 |
| 2095 // Load input parameters. |
| 2096 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); |
| 2097 __ movq(rax, Operand(rsp, 1 * kPointerSize)); |
| 2098 |
| 2099 // Save 1 in xmm3 - we need this several times later on. |
| 2100 __ movl(rcx, Immediate(1)); |
| 2101 __ cvtlsi2sd(xmm3, rcx); |
| 2102 |
| 2103 Label exponent_nonsmi; |
| 2104 Label base_nonsmi; |
| 2105 // If the exponent is a heap number go to that specific case. |
| 2106 __ JumpIfNotSmi(rax, &exponent_nonsmi); |
| 2107 __ JumpIfNotSmi(rdx, &base_nonsmi); |
| 2108 |
| 2109 // Optimized version when both exponent and base are smis. |
| 2110 Label powi; |
| 2111 __ SmiToInteger32(rdx, rdx); |
| 2112 __ cvtlsi2sd(xmm0, rdx); |
| 2113 __ jmp(&powi); |
| 2114 // Exponent is a smi and base is a heapnumber. |
| 2115 __ bind(&base_nonsmi); |
| 2116 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset), |
| 2117 Heap::kHeapNumberMapRootIndex); |
| 2118 __ j(not_equal, &call_runtime); |
| 2119 |
| 2120 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); |
| 2121 |
| 2122 // Optimized version of pow if exponent is a smi. |
| 2123 // xmm0 contains the base. |
| 2124 __ bind(&powi); |
| 2125 __ SmiToInteger32(rax, rax); |
| 2126 |
| 2127 // Save exponent in base as we need to check if exponent is negative later. |
| 2128 // We know that base and exponent are in different registers. |
| 2129 __ movq(rdx, rax); |
| 2130 |
| 2131 // Get absolute value of exponent. |
| 2132 NearLabel no_neg; |
| 2133 __ cmpl(rax, Immediate(0)); |
| 2134 __ j(greater_equal, &no_neg); |
| 2135 __ negl(rax); |
| 2136 __ bind(&no_neg); |
| 2137 |
| 2138 // Load xmm1 with 1. |
| 2139 __ movsd(xmm1, xmm3); |
| 2140 NearLabel while_true; |
| 2141 NearLabel no_multiply; |
| 2142 |
| 2143 __ bind(&while_true); |
| 2144 __ shrl(rax, Immediate(1)); |
| 2145 __ j(not_carry, &no_multiply); |
| 2146 __ mulsd(xmm1, xmm0); |
| 2147 __ bind(&no_multiply); |
| 2148 __ mulsd(xmm0, xmm0); |
| 2149 __ j(not_zero, &while_true); |
| 2150 |
| 2151 // Base has the original value of the exponent - if the exponent is |
| 2152 // negative return 1/result. |
| 2153 __ testl(rdx, rdx); |
| 2154 __ j(positive, &allocate_return); |
| 2155 // Special case if xmm1 has reached infinity. |
| 2156 __ divsd(xmm3, xmm1); |
| 2157 __ movsd(xmm1, xmm3); |
| 2158 __ xorpd(xmm0, xmm0); |
| 2159 __ ucomisd(xmm0, xmm1); |
| 2160 __ j(equal, &call_runtime); |
| 2161 |
| 2162 __ jmp(&allocate_return); |
| 2163 |
| 2164 // Exponent (or both) is a heapnumber - no matter what we should now work |
| 2165 // on doubles. |
| 2166 __ bind(&exponent_nonsmi); |
| 2167 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), |
| 2168 Heap::kHeapNumberMapRootIndex); |
| 2169 __ j(not_equal, &call_runtime); |
| 2170 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 2171 // Test if exponent is nan. |
| 2172 __ ucomisd(xmm1, xmm1); |
| 2173 __ j(parity_even, &call_runtime); |
| 2174 |
| 2175 NearLabel base_not_smi; |
| 2176 NearLabel handle_special_cases; |
| 2177 __ JumpIfNotSmi(rdx, &base_not_smi); |
| 2178 __ SmiToInteger32(rdx, rdx); |
| 2179 __ cvtlsi2sd(xmm0, rdx); |
| 2180 __ jmp(&handle_special_cases); |
| 2181 |
| 2182 __ bind(&base_not_smi); |
| 2183 __ CompareRoot(FieldOperand(rdx, HeapObject::kMapOffset), |
| 2184 Heap::kHeapNumberMapRootIndex); |
| 2185 __ j(not_equal, &call_runtime); |
| 2186 __ movl(rcx, FieldOperand(rdx, HeapNumber::kExponentOffset)); |
| 2187 __ andl(rcx, Immediate(HeapNumber::kExponentMask)); |
| 2188 __ cmpl(rcx, Immediate(HeapNumber::kExponentMask)); |
| 2189 // base is NaN or +/-Infinity |
| 2190 __ j(greater_equal, &call_runtime); |
| 2191 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); |
| 2192 |
| 2193 // base is in xmm0 and exponent is in xmm1. |
| 2194 __ bind(&handle_special_cases); |
| 2195 NearLabel not_minus_half; |
| 2196 // Test for -0.5. |
| 2197 // Load xmm2 with -0.5. |
| 2198 __ movq(rcx, V8_UINT64_C(0xBFE0000000000000), RelocInfo::NONE); |
| 2199 __ movq(xmm2, rcx); |
| 2200 // xmm2 now has -0.5. |
| 2201 __ ucomisd(xmm2, xmm1); |
| 2202 __ j(not_equal, ¬_minus_half); |
| 2203 |
| 2204 // Calculates reciprocal of square root. |
| 2205 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. |
| 2206 __ xorpd(xmm1, xmm1); |
| 2207 __ addsd(xmm1, xmm0); |
| 2208 __ sqrtsd(xmm1, xmm1); |
| 2209 __ divsd(xmm3, xmm1); |
| 2210 __ movsd(xmm1, xmm3); |
| 2211 __ jmp(&allocate_return); |
| 2212 |
| 2213 // Test for 0.5. |
| 2214 __ bind(¬_minus_half); |
| 2215 // Load xmm2 with 0.5. |
| 2216 // Since xmm3 is 1 and xmm2 is -0.5 this is simply xmm2 + xmm3. |
| 2217 __ addsd(xmm2, xmm3); |
| 2218 // xmm2 now has 0.5. |
| 2219 __ ucomisd(xmm2, xmm1); |
| 2220 __ j(not_equal, &call_runtime); |
| 2221 // Calculates square root. |
| 2222 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. |
| 2223 __ xorpd(xmm1, xmm1); |
| 2224 __ addsd(xmm1, xmm0); |
| 2225 __ sqrtsd(xmm1, xmm1); |
| 2226 |
| 2227 __ bind(&allocate_return); |
| 2228 __ AllocateHeapNumber(rcx, rax, &call_runtime); |
| 2229 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm1); |
| 2230 __ movq(rax, rcx); |
| 2231 __ ret(2 * kPointerSize); |
| 2232 |
| 2233 __ bind(&call_runtime); |
| 2234 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); |
| 2235 } |
| 2236 |
| 2237 |
| 2037 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { | 2238 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { |
| 2038 // The key is in rdx and the parameter count is in rax. | 2239 // The key is in rdx and the parameter count is in rax. |
| 2039 | 2240 |
| 2040 // The displacement is used for skipping the frame pointer on the | 2241 // The displacement is used for skipping the frame pointer on the |
| 2041 // stack. It is the offset of the last parameter (if any) relative | 2242 // stack. It is the offset of the last parameter (if any) relative |
| 2042 // to the frame pointer. | 2243 // to the frame pointer. |
| 2043 static const int kDisplacement = 1 * kPointerSize; | 2244 static const int kDisplacement = 1 * kPointerSize; |
| 2044 | 2245 |
| 2045 // Check that the key is a smi. | 2246 // Check that the key is a smi. |
| 2046 Label slow; | 2247 Label slow; |
| (...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2264 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); | 2465 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); |
| 2265 // Calculate number of capture registers (number_of_captures + 1) * 2. | 2466 // Calculate number of capture registers (number_of_captures + 1) * 2. |
| 2266 __ leal(rdx, Operand(rdx, rdx, times_1, 2)); | 2467 __ leal(rdx, Operand(rdx, rdx, times_1, 2)); |
| 2267 // Check that the static offsets vector buffer is large enough. | 2468 // Check that the static offsets vector buffer is large enough. |
| 2268 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize)); | 2469 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize)); |
| 2269 __ j(above, &runtime); | 2470 __ j(above, &runtime); |
| 2270 | 2471 |
| 2271 // rcx: RegExp data (FixedArray) | 2472 // rcx: RegExp data (FixedArray) |
| 2272 // rdx: Number of capture registers | 2473 // rdx: Number of capture registers |
| 2273 // Check that the second argument is a string. | 2474 // Check that the second argument is a string. |
| 2274 __ movq(rax, Operand(rsp, kSubjectOffset)); | 2475 __ movq(rdi, Operand(rsp, kSubjectOffset)); |
| 2275 __ JumpIfSmi(rax, &runtime); | 2476 __ JumpIfSmi(rdi, &runtime); |
| 2276 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); | 2477 Condition is_string = masm->IsObjectStringType(rdi, rbx, rbx); |
| 2277 __ j(NegateCondition(is_string), &runtime); | 2478 __ j(NegateCondition(is_string), &runtime); |
| 2278 | 2479 |
| 2279 // rax: Subject string. | 2480 // rdi: Subject string. |
| 2280 // rcx: RegExp data (FixedArray). | 2481 // rax: RegExp data (FixedArray). |
| 2281 // rdx: Number of capture registers. | 2482 // rdx: Number of capture registers. |
| 2282 // Check that the third argument is a positive smi less than the string | 2483 // Check that the third argument is a positive smi less than the string |
| 2283 // length. A negative value will be greater (unsigned comparison). | 2484 // length. A negative value will be greater (unsigned comparison). |
| 2284 __ movq(rbx, Operand(rsp, kPreviousIndexOffset)); | 2485 __ movq(rbx, Operand(rsp, kPreviousIndexOffset)); |
| 2285 __ JumpIfNotSmi(rbx, &runtime); | 2486 __ JumpIfNotSmi(rbx, &runtime); |
| 2286 __ SmiCompare(rbx, FieldOperand(rax, String::kLengthOffset)); | 2487 __ SmiCompare(rbx, FieldOperand(rdi, String::kLengthOffset)); |
| 2287 __ j(above_equal, &runtime); | 2488 __ j(above_equal, &runtime); |
| 2288 | 2489 |
| 2289 // rcx: RegExp data (FixedArray) | 2490 // rax: RegExp data (FixedArray) |
| 2290 // rdx: Number of capture registers | 2491 // rdx: Number of capture registers |
| 2291 // Check that the fourth object is a JSArray object. | 2492 // Check that the fourth object is a JSArray object. |
| 2292 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); | 2493 __ movq(rdi, Operand(rsp, kLastMatchInfoOffset)); |
| 2293 __ JumpIfSmi(rax, &runtime); | 2494 __ JumpIfSmi(rdi, &runtime); |
| 2294 __ CmpObjectType(rax, JS_ARRAY_TYPE, kScratchRegister); | 2495 __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister); |
| 2295 __ j(not_equal, &runtime); | 2496 __ j(not_equal, &runtime); |
| 2296 // Check that the JSArray is in fast case. | 2497 // Check that the JSArray is in fast case. |
| 2297 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); | 2498 __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset)); |
| 2298 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset)); | 2499 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); |
| 2299 __ Cmp(rax, FACTORY->fixed_array_map()); | 2500 __ Cmp(rdi, FACTORY->fixed_array_map()); |
| 2300 __ j(not_equal, &runtime); | 2501 __ j(not_equal, &runtime); |
| 2301 // Check that the last match info has space for the capture registers and the | 2502 // Check that the last match info has space for the capture registers and the |
| 2302 // additional information. Ensure no overflow in add. | 2503 // additional information. Ensure no overflow in add. |
| 2303 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); | 2504 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); |
| 2304 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); | 2505 __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset)); |
| 2305 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); | 2506 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); |
| 2306 __ cmpl(rdx, rax); | 2507 __ cmpl(rdx, rdi); |
| 2307 __ j(greater, &runtime); | 2508 __ j(greater, &runtime); |
| 2308 | 2509 |
| 2309 // rcx: RegExp data (FixedArray) | 2510 // rax: RegExp data (FixedArray) |
| 2310 // Check the representation and encoding of the subject string. | 2511 // Check the representation and encoding of the subject string. |
| 2311 NearLabel seq_ascii_string, seq_two_byte_string, check_code; | 2512 NearLabel seq_ascii_string, seq_two_byte_string, check_code; |
| 2312 __ movq(rax, Operand(rsp, kSubjectOffset)); | 2513 __ movq(rdi, Operand(rsp, kSubjectOffset)); |
| 2313 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 2514 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
| 2314 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); | 2515 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); |
| 2315 // First check for flat two byte string. | 2516 // First check for flat two byte string. |
| 2316 __ andb(rbx, Immediate( | 2517 __ andb(rbx, Immediate( |
| 2317 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask)); | 2518 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask)); |
| 2318 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); | 2519 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); |
| 2319 __ j(zero, &seq_two_byte_string); | 2520 __ j(zero, &seq_two_byte_string); |
| 2320 // Any other flat string must be a flat ascii string. | 2521 // Any other flat string must be a flat ascii string. |
| 2321 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask)); | 2522 __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask)); |
| 2322 __ j(zero, &seq_ascii_string); | 2523 __ j(zero, &seq_ascii_string); |
| 2323 | 2524 |
| 2324 // Check for flat cons string. | 2525 // Check for flat cons string. |
| 2325 // A flat cons string is a cons string where the second part is the empty | 2526 // A flat cons string is a cons string where the second part is the empty |
| 2326 // string. In that case the subject string is just the first part of the cons | 2527 // string. In that case the subject string is just the first part of the cons |
| 2327 // string. Also in this case the first part of the cons string is known to be | 2528 // string. Also in this case the first part of the cons string is known to be |
| 2328 // a sequential string or an external string. | 2529 // a sequential string or an external string. |
| 2329 STATIC_ASSERT(kExternalStringTag !=0); | 2530 STATIC_ASSERT(kExternalStringTag !=0); |
| 2330 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); | 2531 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); |
| 2331 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag)); | 2532 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag)); |
| 2332 __ j(not_zero, &runtime); | 2533 __ j(not_zero, &runtime); |
| 2333 // String is a cons string. | 2534 // String is a cons string. |
| 2334 __ movq(rdx, FieldOperand(rax, ConsString::kSecondOffset)); | 2535 __ movq(rdx, FieldOperand(rdi, ConsString::kSecondOffset)); |
| 2335 __ Cmp(rdx, FACTORY->empty_string()); | 2536 __ Cmp(rdx, FACTORY->empty_string()); |
| 2336 __ j(not_equal, &runtime); | 2537 __ j(not_equal, &runtime); |
| 2337 __ movq(rax, FieldOperand(rax, ConsString::kFirstOffset)); | 2538 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); |
| 2338 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); | 2539 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); |
| 2339 // String is a cons string with empty second part. | 2540 // String is a cons string with empty second part. |
| 2340 // rax: first part of cons string. | 2541 // rdi: first part of cons string. |
| 2341 // rbx: map of first part of cons string. | 2542 // rbx: map of first part of cons string. |
| 2342 // Is first part a flat two byte string? | 2543 // Is first part a flat two byte string? |
| 2343 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), | 2544 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), |
| 2344 Immediate(kStringRepresentationMask | kStringEncodingMask)); | 2545 Immediate(kStringRepresentationMask | kStringEncodingMask)); |
| 2345 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); | 2546 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); |
| 2346 __ j(zero, &seq_two_byte_string); | 2547 __ j(zero, &seq_two_byte_string); |
| 2347 // Any other flat string must be ascii. | 2548 // Any other flat string must be ascii. |
| 2348 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), | 2549 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), |
| 2349 Immediate(kStringRepresentationMask)); | 2550 Immediate(kStringRepresentationMask)); |
| 2350 __ j(not_zero, &runtime); | 2551 __ j(not_zero, &runtime); |
| 2351 | 2552 |
| 2352 __ bind(&seq_ascii_string); | 2553 __ bind(&seq_ascii_string); |
| 2353 // rax: subject string (sequential ascii) | 2554 // rdi: subject string (sequential ascii) |
| 2354 // rcx: RegExp data (FixedArray) | 2555 // rax: RegExp data (FixedArray) |
| 2355 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataAsciiCodeOffset)); | 2556 __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset)); |
| 2356 __ Set(rdi, 1); // Type is ascii. | 2557 __ Set(rcx, 1); // Type is ascii. |
| 2357 __ jmp(&check_code); | 2558 __ jmp(&check_code); |
| 2358 | 2559 |
| 2359 __ bind(&seq_two_byte_string); | 2560 __ bind(&seq_two_byte_string); |
| 2360 // rax: subject string (flat two-byte) | 2561 // rdi: subject string (flat two-byte) |
| 2361 // rcx: RegExp data (FixedArray) | 2562 // rax: RegExp data (FixedArray) |
| 2362 __ movq(r11, FieldOperand(rcx, JSRegExp::kDataUC16CodeOffset)); | 2563 __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset)); |
| 2363 __ Set(rdi, 0); // Type is two byte. | 2564 __ Set(rcx, 0); // Type is two byte. |
| 2364 | 2565 |
| 2365 __ bind(&check_code); | 2566 __ bind(&check_code); |
| 2366 // Check that the irregexp code has been generated for the actual string | 2567 // Check that the irregexp code has been generated for the actual string |
| 2367 // encoding. If it has, the field contains a code object otherwise it contains | 2568 // encoding. If it has, the field contains a code object otherwise it contains |
| 2368 // the hole. | 2569 // the hole. |
| 2369 __ CmpObjectType(r11, CODE_TYPE, kScratchRegister); | 2570 __ CmpObjectType(r11, CODE_TYPE, kScratchRegister); |
| 2370 __ j(not_equal, &runtime); | 2571 __ j(not_equal, &runtime); |
| 2371 | 2572 |
| 2372 // rax: subject string | 2573 // rdi: subject string |
| 2373 // rdi: encoding of subject string (1 if ascii, 0 if two_byte); | 2574 // rcx: encoding of subject string (1 if ascii, 0 if two_byte); |
| 2374 // r11: code | 2575 // r11: code |
| 2375 // Load used arguments before starting to push arguments for call to native | 2576 // Load used arguments before starting to push arguments for call to native |
| 2376 // RegExp code to avoid handling changing stack height. | 2577 // RegExp code to avoid handling changing stack height. |
| 2377 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset)); | 2578 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset)); |
| 2378 | 2579 |
| 2379 // rax: subject string | 2580 // rdi: subject string |
| 2380 // rbx: previous index | 2581 // rbx: previous index |
| 2381 // rdi: encoding of subject string (1 if ascii 0 if two_byte); | 2582 // rcx: encoding of subject string (1 if ascii 0 if two_byte); |
| 2382 // r11: code | 2583 // r11: code |
| 2383 // All checks done. Now push arguments for native regexp code. | 2584 // All checks done. Now push arguments for native regexp code. |
| 2384 __ IncrementCounter(COUNTERS->regexp_entry_native(), 1); | 2585 __ IncrementCounter(COUNTERS->regexp_entry_native(), 1); |
| 2385 | 2586 |
| 2386 // rsi is caller save on Windows and used to pass parameter on Linux. | 2587 // Isolates: note we add an additional parameter here (isolate pointer). |
| 2387 __ push(rsi); | 2588 static const int kRegExpExecuteArguments = 8; |
| 2388 | |
| 2389 static const int kRegExpExecuteArguments = 7; | |
| 2390 __ PrepareCallCFunction(kRegExpExecuteArguments); | |
| 2391 int argument_slots_on_stack = | 2589 int argument_slots_on_stack = |
| 2392 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments); | 2590 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments); |
| 2591 __ EnterApiExitFrame(argument_slots_on_stack); // Clobbers rax! |
| 2592 |
| 2593 // Argument 8: Pass current isolate address. |
| 2594 // __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize), |
| 2595 // Immediate(ExternalReference::isolate_address())); |
| 2596 __ movq(kScratchRegister, ExternalReference::isolate_address()); |
| 2597 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize), |
| 2598 kScratchRegister); |
| 2393 | 2599 |
| 2394 // Argument 7: Indicate that this is a direct call from JavaScript. | 2600 // Argument 7: Indicate that this is a direct call from JavaScript. |
| 2395 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize), | 2601 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), |
| 2396 Immediate(1)); | 2602 Immediate(1)); |
| 2397 | 2603 |
| 2398 // Argument 6: Start (high end) of backtracking stack memory area. | 2604 // Argument 6: Start (high end) of backtracking stack memory area. |
| 2399 __ movq(kScratchRegister, address_of_regexp_stack_memory_address); | 2605 __ movq(kScratchRegister, address_of_regexp_stack_memory_address); |
| 2400 __ movq(r9, Operand(kScratchRegister, 0)); | 2606 __ movq(r9, Operand(kScratchRegister, 0)); |
| 2401 __ movq(kScratchRegister, address_of_regexp_stack_memory_size); | 2607 __ movq(kScratchRegister, address_of_regexp_stack_memory_size); |
| 2402 __ addq(r9, Operand(kScratchRegister, 0)); | 2608 __ addq(r9, Operand(kScratchRegister, 0)); |
| 2403 // Argument 6 passed in r9 on Linux and on the stack on Windows. | 2609 // Argument 6 passed in r9 on Linux and on the stack on Windows. |
| 2404 #ifdef _WIN64 | 2610 #ifdef _WIN64 |
| 2405 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), r9); | 2611 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9); |
| 2406 #endif | 2612 #endif |
| 2407 | 2613 |
| 2408 // Argument 5: static offsets vector buffer. | 2614 // Argument 5: static offsets vector buffer. |
| 2409 __ movq(r8, ExternalReference::address_of_static_offsets_vector()); | 2615 __ movq(r8, ExternalReference::address_of_static_offsets_vector()); |
| 2410 // Argument 5 passed in r8 on Linux and on the stack on Windows. | 2616 // Argument 5 passed in r8 on Linux and on the stack on Windows. |
| 2411 #ifdef _WIN64 | 2617 #ifdef _WIN64 |
| 2412 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r8); | 2618 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8); |
| 2413 #endif | 2619 #endif |
| 2414 | 2620 |
| 2415 // First four arguments are passed in registers on both Linux and Windows. | 2621 // First four arguments are passed in registers on both Linux and Windows. |
| 2416 #ifdef _WIN64 | 2622 #ifdef _WIN64 |
| 2417 Register arg4 = r9; | 2623 Register arg4 = r9; |
| 2418 Register arg3 = r8; | 2624 Register arg3 = r8; |
| 2419 Register arg2 = rdx; | 2625 Register arg2 = rdx; |
| 2420 Register arg1 = rcx; | 2626 Register arg1 = rcx; |
| 2421 #else | 2627 #else |
| 2422 Register arg4 = rcx; | 2628 Register arg4 = rcx; |
| 2423 Register arg3 = rdx; | 2629 Register arg3 = rdx; |
| 2424 Register arg2 = rsi; | 2630 Register arg2 = rsi; |
| 2425 Register arg1 = rdi; | 2631 Register arg1 = rdi; |
| 2426 #endif | 2632 #endif |
| 2427 | 2633 |
| 2428 // Keep track on aliasing between argX defined above and the registers used. | 2634 // Keep track on aliasing between argX defined above and the registers used. |
| 2429 // rax: subject string | 2635 // rdi: subject string |
| 2430 // rbx: previous index | 2636 // rbx: previous index |
| 2431 // rdi: encoding of subject string (1 if ascii 0 if two_byte); | 2637 // rcx: encoding of subject string (1 if ascii 0 if two_byte); |
| 2432 // r11: code | 2638 // r11: code |
| 2433 | 2639 |
| 2434 // Argument 4: End of string data | 2640 // Argument 4: End of string data |
| 2435 // Argument 3: Start of string data | 2641 // Argument 3: Start of string data |
| 2436 NearLabel setup_two_byte, setup_rest; | 2642 NearLabel setup_two_byte, setup_rest; |
| 2437 __ testb(rdi, rdi); | 2643 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string. |
| 2438 __ j(zero, &setup_two_byte); | 2644 __ j(zero, &setup_two_byte); |
| 2439 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); | 2645 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset)); |
| 2440 __ lea(arg4, FieldOperand(rax, rdi, times_1, SeqAsciiString::kHeaderSize)); | 2646 __ lea(arg4, FieldOperand(rdi, rcx, times_1, SeqAsciiString::kHeaderSize)); |
| 2441 __ lea(arg3, FieldOperand(rax, rbx, times_1, SeqAsciiString::kHeaderSize)); | 2647 __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize)); |
| 2442 __ jmp(&setup_rest); | 2648 __ jmp(&setup_rest); |
| 2443 __ bind(&setup_two_byte); | 2649 __ bind(&setup_two_byte); |
| 2444 __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); | 2650 __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset)); |
| 2445 __ lea(arg4, FieldOperand(rax, rdi, times_2, SeqTwoByteString::kHeaderSize)); | 2651 __ lea(arg4, FieldOperand(rdi, rcx, times_2, SeqTwoByteString::kHeaderSize)); |
| 2446 __ lea(arg3, FieldOperand(rax, rbx, times_2, SeqTwoByteString::kHeaderSize)); | 2652 __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize)); |
| 2447 | 2653 |
| 2448 __ bind(&setup_rest); | 2654 __ bind(&setup_rest); |
| 2449 // Argument 2: Previous index. | 2655 // Argument 2: Previous index. |
| 2450 __ movq(arg2, rbx); | 2656 __ movq(arg2, rbx); |
| 2451 | 2657 |
| 2452 // Argument 1: Subject string. | 2658 // Argument 1: Subject string. |
| 2453 __ movq(arg1, rax); | 2659 #ifdef WIN64_ |
| 2660 __ movq(arg1, rdi); |
| 2661 #else |
| 2662 // Already there in AMD64 calling convention. |
| 2663 ASSERT(arg1.is(rdi)); |
| 2664 #endif |
| 2454 | 2665 |
| 2455 // Locate the code entry and call it. | 2666 // Locate the code entry and call it. |
| 2456 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); | 2667 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); |
| 2457 __ CallCFunction(r11, kRegExpExecuteArguments); | 2668 __ call(r11); |
| 2458 | 2669 |
| 2459 // rsi is caller save, as it is used to pass parameter. | 2670 __ LeaveApiExitFrame(); |
| 2460 __ pop(rsi); | |
| 2461 | 2671 |
| 2462 // Check the result. | 2672 // Check the result. |
| 2463 NearLabel success; | 2673 NearLabel success; |
| 2674 Label exception; |
| 2464 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS)); | 2675 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS)); |
| 2465 __ j(equal, &success); | 2676 __ j(equal, &success); |
| 2466 NearLabel failure; | 2677 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); |
| 2678 __ j(equal, &exception); |
| 2467 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); | 2679 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); |
| 2468 __ j(equal, &failure); | 2680 // If none of the above, it can only be retry. |
| 2469 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); | 2681 // Handle that in the runtime system. |
| 2470 // If not exception it can only be retry. Handle that in the runtime system. | |
| 2471 __ j(not_equal, &runtime); | 2682 __ j(not_equal, &runtime); |
| 2472 // Result must now be exception. If there is no pending exception already a | 2683 |
| 2473 // stack overflow (on the backtrack stack) was detected in RegExp code but | 2684 // For failure return null. |
| 2474 // haven't created the exception yet. Handle that in the runtime system. | 2685 __ LoadRoot(rax, Heap::kNullValueRootIndex); |
| 2475 // TODO(592): Rerunning the RegExp to get the stack overflow exception. | |
| 2476 ExternalReference pending_exception_address( | |
| 2477 Isolate::k_pending_exception_address); | |
| 2478 __ movq(kScratchRegister, pending_exception_address); | |
| 2479 __ Cmp(kScratchRegister, FACTORY->the_hole_value()); | |
| 2480 __ j(equal, &runtime); | |
| 2481 __ bind(&failure); | |
| 2482 // For failure and exception return null. | |
| 2483 __ Move(rax, FACTORY->null_value()); | |
| 2484 __ ret(4 * kPointerSize); | 2686 __ ret(4 * kPointerSize); |
| 2485 | 2687 |
| 2486 // Load RegExp data. | 2688 // Load RegExp data. |
| 2487 __ bind(&success); | 2689 __ bind(&success); |
| 2488 __ movq(rax, Operand(rsp, kJSRegExpOffset)); | 2690 __ movq(rax, Operand(rsp, kJSRegExpOffset)); |
| 2489 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); | 2691 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); |
| 2490 __ SmiToInteger32(rax, | 2692 __ SmiToInteger32(rax, |
| 2491 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); | 2693 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); |
| 2492 // Calculate number of capture registers (number_of_captures + 1) * 2. | 2694 // Calculate number of capture registers (number_of_captures + 1) * 2. |
| 2493 __ leal(rdx, Operand(rax, rax, times_1, 2)); | 2695 __ leal(rdx, Operand(rax, rax, times_1, 2)); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2534 times_pointer_size, | 2736 times_pointer_size, |
| 2535 RegExpImpl::kFirstCaptureOffset), | 2737 RegExpImpl::kFirstCaptureOffset), |
| 2536 rdi); | 2738 rdi); |
| 2537 __ jmp(&next_capture); | 2739 __ jmp(&next_capture); |
| 2538 __ bind(&done); | 2740 __ bind(&done); |
| 2539 | 2741 |
| 2540 // Return last match info. | 2742 // Return last match info. |
| 2541 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); | 2743 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); |
| 2542 __ ret(4 * kPointerSize); | 2744 __ ret(4 * kPointerSize); |
| 2543 | 2745 |
| 2746 __ bind(&exception); |
| 2747 // Result must now be exception. If there is no pending exception already a |
| 2748 // stack overflow (on the backtrack stack) was detected in RegExp code but |
| 2749 // haven't created the exception yet. Handle that in the runtime system. |
| 2750 // TODO(592): Rerunning the RegExp to get the stack overflow exception. |
| 2751 ExternalReference pending_exception_address( |
| 2752 Isolate::k_pending_exception_address); |
| 2753 __ movq(rbx, pending_exception_address); |
| 2754 __ movq(rax, Operand(rbx, 0)); |
| 2755 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); |
| 2756 __ cmpq(rax, rdx); |
| 2757 __ j(equal, &runtime); |
| 2758 __ movq(Operand(rbx, 0), rdx); |
| 2759 |
| 2760 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); |
| 2761 NearLabel termination_exception; |
| 2762 __ j(equal, &termination_exception); |
| 2763 __ Throw(rax); |
| 2764 |
| 2765 __ bind(&termination_exception); |
| 2766 __ ThrowUncatchable(TERMINATION, rax); |
| 2767 |
| 2544 // Do the runtime call to execute the regexp. | 2768 // Do the runtime call to execute the regexp. |
| 2545 __ bind(&runtime); | 2769 __ bind(&runtime); |
| 2546 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); | 2770 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); |
| 2547 #endif // V8_INTERPRETED_REGEXP | 2771 #endif // V8_INTERPRETED_REGEXP |
| 2548 } | 2772 } |
| 2549 | 2773 |
| 2550 | 2774 |
| 2551 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { | 2775 void RegExpConstructResultStub::Generate(MacroAssembler* masm) { |
| 2552 const int kMaxInlineLength = 100; | 2776 const int kMaxInlineLength = 100; |
| 2553 Label slowcase; | 2777 Label slowcase; |
| (...skipping 529 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3083 __ Set(rax, argc_); | 3307 __ Set(rax, argc_); |
| 3084 __ Set(rbx, 0); | 3308 __ Set(rbx, 0); |
| 3085 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); | 3309 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
| 3086 Handle<Code> adaptor(Isolate::Current()->builtins()->builtin( | 3310 Handle<Code> adaptor(Isolate::Current()->builtins()->builtin( |
| 3087 Builtins::ArgumentsAdaptorTrampoline)); | 3311 Builtins::ArgumentsAdaptorTrampoline)); |
| 3088 __ Jump(adaptor, RelocInfo::CODE_TARGET); | 3312 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
| 3089 } | 3313 } |
| 3090 | 3314 |
| 3091 | 3315 |
| 3092 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { | 3316 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
| 3093 // Check that stack should contain next handler, frame pointer, state and | 3317 // Throw exception in eax. |
| 3094 // return address in that order. | 3318 __ Throw(rax); |
| 3095 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == | |
| 3096 StackHandlerConstants::kStateOffset); | |
| 3097 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == | |
| 3098 StackHandlerConstants::kPCOffset); | |
| 3099 | |
| 3100 ExternalReference handler_address(Isolate::k_handler_address); | |
| 3101 __ movq(kScratchRegister, handler_address); | |
| 3102 __ movq(rsp, Operand(kScratchRegister, 0)); | |
| 3103 // get next in chain | |
| 3104 __ pop(rcx); | |
| 3105 __ movq(Operand(kScratchRegister, 0), rcx); | |
| 3106 __ pop(rbp); // pop frame pointer | |
| 3107 __ pop(rdx); // remove state | |
| 3108 | |
| 3109 // Before returning we restore the context from the frame pointer if not NULL. | |
| 3110 // The frame pointer is NULL in the exception handler of a JS entry frame. | |
| 3111 __ Set(rsi, 0); // Tentatively set context pointer to NULL | |
| 3112 NearLabel skip; | |
| 3113 __ cmpq(rbp, Immediate(0)); | |
| 3114 __ j(equal, &skip); | |
| 3115 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | |
| 3116 __ bind(&skip); | |
| 3117 __ ret(0); | |
| 3118 } | 3319 } |
| 3119 | 3320 |
| 3120 | 3321 |
| 3121 void CEntryStub::GenerateCore(MacroAssembler* masm, | 3322 void CEntryStub::GenerateCore(MacroAssembler* masm, |
| 3122 Label* throw_normal_exception, | 3323 Label* throw_normal_exception, |
| 3123 Label* throw_termination_exception, | 3324 Label* throw_termination_exception, |
| 3124 Label* throw_out_of_memory_exception, | 3325 Label* throw_out_of_memory_exception, |
| 3125 bool do_gc, | 3326 bool do_gc, |
| 3126 bool always_allocate_scope) { | 3327 bool always_allocate_scope) { |
| 3127 // rax: result parameter for PerformGC, if any. | 3328 // rax: result parameter for PerformGC, if any. |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3253 // Handle normal exception. | 3454 // Handle normal exception. |
| 3254 __ jmp(throw_normal_exception); | 3455 __ jmp(throw_normal_exception); |
| 3255 | 3456 |
| 3256 // Retry. | 3457 // Retry. |
| 3257 __ bind(&retry); | 3458 __ bind(&retry); |
| 3258 } | 3459 } |
| 3259 | 3460 |
| 3260 | 3461 |
| 3261 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, | 3462 void CEntryStub::GenerateThrowUncatchable(MacroAssembler* masm, |
| 3262 UncatchableExceptionType type) { | 3463 UncatchableExceptionType type) { |
| 3263 // Fetch top stack handler. | 3464 __ ThrowUncatchable(type, rax); |
| 3264 ExternalReference handler_address(Isolate::k_handler_address); | |
| 3265 __ movq(kScratchRegister, handler_address); | |
| 3266 __ movq(rsp, Operand(kScratchRegister, 0)); | |
| 3267 | |
| 3268 // Unwind the handlers until the ENTRY handler is found. | |
| 3269 NearLabel loop, done; | |
| 3270 __ bind(&loop); | |
| 3271 // Load the type of the current stack handler. | |
| 3272 const int kStateOffset = StackHandlerConstants::kStateOffset; | |
| 3273 __ cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY)); | |
| 3274 __ j(equal, &done); | |
| 3275 // Fetch the next handler in the list. | |
| 3276 const int kNextOffset = StackHandlerConstants::kNextOffset; | |
| 3277 __ movq(rsp, Operand(rsp, kNextOffset)); | |
| 3278 __ jmp(&loop); | |
| 3279 __ bind(&done); | |
| 3280 | |
| 3281 // Set the top handler address to next handler past the current ENTRY handler. | |
| 3282 __ movq(kScratchRegister, handler_address); | |
| 3283 __ pop(Operand(kScratchRegister, 0)); | |
| 3284 | |
| 3285 if (type == OUT_OF_MEMORY) { | |
| 3286 // Set external caught exception to false. | |
| 3287 ExternalReference external_caught( | |
| 3288 Isolate::k_external_caught_exception_address); | |
| 3289 __ movq(rax, Immediate(false)); | |
| 3290 __ store_rax(external_caught); | |
| 3291 | |
| 3292 // Set pending exception and rax to out of memory exception. | |
| 3293 ExternalReference pending_exception(Isolate::k_pending_exception_address); | |
| 3294 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); | |
| 3295 __ store_rax(pending_exception); | |
| 3296 } | |
| 3297 | |
| 3298 // Clear the context pointer. | |
| 3299 __ Set(rsi, 0); | |
| 3300 | |
| 3301 // Restore registers from handler. | |
| 3302 STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize == | |
| 3303 StackHandlerConstants::kFPOffset); | |
| 3304 __ pop(rbp); // FP | |
| 3305 STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize == | |
| 3306 StackHandlerConstants::kStateOffset); | |
| 3307 __ pop(rdx); // State | |
| 3308 | |
| 3309 STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize == | |
| 3310 StackHandlerConstants::kPCOffset); | |
| 3311 __ ret(0); | |
| 3312 } | 3465 } |
| 3313 | 3466 |
| 3314 | 3467 |
| 3315 void CEntryStub::Generate(MacroAssembler* masm) { | 3468 void CEntryStub::Generate(MacroAssembler* masm) { |
| 3316 // rax: number of arguments including receiver | 3469 // rax: number of arguments including receiver |
| 3317 // rbx: pointer to C function (C callee-saved) | 3470 // rbx: pointer to C function (C callee-saved) |
| 3318 // rbp: frame pointer of calling JS frame (restored after C call) | 3471 // rbp: frame pointer of calling JS frame (restored after C call) |
| 3319 // rsp: stack pointer (restored after C call) | 3472 // rsp: stack pointer (restored after C call) |
| 3320 // rsi: current context (restored) | 3473 // rsi: current context (restored) |
| 3321 | 3474 |
| (...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3519 void InstanceofStub::Generate(MacroAssembler* masm) { | 3672 void InstanceofStub::Generate(MacroAssembler* masm) { |
| 3520 // Implements "value instanceof function" operator. | 3673 // Implements "value instanceof function" operator. |
| 3521 // Expected input state: | 3674 // Expected input state: |
| 3522 // rsp[0] : return address | 3675 // rsp[0] : return address |
| 3523 // rsp[1] : function pointer | 3676 // rsp[1] : function pointer |
| 3524 // rsp[2] : value | 3677 // rsp[2] : value |
| 3525 // Returns a bitwise zero to indicate that the value | 3678 // Returns a bitwise zero to indicate that the value |
| 3526 // is and instance of the function and anything else to | 3679 // is and instance of the function and anything else to |
| 3527 // indicate that the value is not an instance. | 3680 // indicate that the value is not an instance. |
| 3528 | 3681 |
| 3682 // None of the flags are supported on X64. |
| 3683 ASSERT(flags_ == kNoFlags); |
| 3684 |
| 3529 // Get the object - go slow case if it's a smi. | 3685 // Get the object - go slow case if it's a smi. |
| 3530 Label slow; | 3686 Label slow; |
| 3531 __ movq(rax, Operand(rsp, 2 * kPointerSize)); | 3687 __ movq(rax, Operand(rsp, 2 * kPointerSize)); |
| 3532 __ JumpIfSmi(rax, &slow); | 3688 __ JumpIfSmi(rax, &slow); |
| 3533 | 3689 |
| 3534 // Check that the left hand is a JS object. Leave its map in rax. | 3690 // Check that the left hand is a JS object. Leave its map in rax. |
| 3535 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); | 3691 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); |
| 3536 __ j(below, &slow); | 3692 __ j(below, &slow); |
| 3537 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE); | 3693 __ CmpInstanceType(rax, LAST_JS_OBJECT_TYPE); |
| 3538 __ j(above, &slow); | 3694 __ j(above, &slow); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3594 // We have to store a non-zero value in the cache. | 3750 // We have to store a non-zero value in the cache. |
| 3595 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); | 3751 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
| 3596 __ ret(2 * kPointerSize); | 3752 __ ret(2 * kPointerSize); |
| 3597 | 3753 |
| 3598 // Slow-case: Go through the JavaScript implementation. | 3754 // Slow-case: Go through the JavaScript implementation. |
| 3599 __ bind(&slow); | 3755 __ bind(&slow); |
| 3600 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); | 3756 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); |
| 3601 } | 3757 } |
| 3602 | 3758 |
| 3603 | 3759 |
| 3604 Register InstanceofStub::left() { return rax; } | 3760 // Passing arguments in registers is not supported. |
| 3761 Register InstanceofStub::left() { return no_reg; } |
| 3605 | 3762 |
| 3606 | 3763 |
| 3607 Register InstanceofStub::right() { return rdx; } | 3764 Register InstanceofStub::right() { return no_reg; } |
| 3608 | 3765 |
| 3609 | 3766 |
| 3610 int CompareStub::MinorKey() { | 3767 int CompareStub::MinorKey() { |
| 3611 // Encode the three parameters in a unique 16 bit value. To avoid duplicate | 3768 // Encode the three parameters in a unique 16 bit value. To avoid duplicate |
| 3612 // stubs the never NaN NaN condition is only taken into account if the | 3769 // stubs the never NaN NaN condition is only taken into account if the |
| 3613 // condition is equals. | 3770 // condition is equals. |
| 3614 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); | 3771 ASSERT(static_cast<unsigned>(cc_) < (1 << 12)); |
| 3615 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); | 3772 ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg)); |
| 3616 return ConditionField::encode(static_cast<unsigned>(cc_)) | 3773 return ConditionField::encode(static_cast<unsigned>(cc_)) |
| 3617 | RegisterField::encode(false) // lhs_ and rhs_ are not used | 3774 | RegisterField::encode(false) // lhs_ and rhs_ are not used |
| (...skipping 239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3857 | 4014 |
| 3858 | 4015 |
| 3859 void StringCharAtGenerator::GenerateSlow( | 4016 void StringCharAtGenerator::GenerateSlow( |
| 3860 MacroAssembler* masm, const RuntimeCallHelper& call_helper) { | 4017 MacroAssembler* masm, const RuntimeCallHelper& call_helper) { |
| 3861 char_code_at_generator_.GenerateSlow(masm, call_helper); | 4018 char_code_at_generator_.GenerateSlow(masm, call_helper); |
| 3862 char_from_code_generator_.GenerateSlow(masm, call_helper); | 4019 char_from_code_generator_.GenerateSlow(masm, call_helper); |
| 3863 } | 4020 } |
| 3864 | 4021 |
| 3865 | 4022 |
| 3866 void StringAddStub::Generate(MacroAssembler* masm) { | 4023 void StringAddStub::Generate(MacroAssembler* masm) { |
| 3867 Label string_add_runtime; | 4024 Label string_add_runtime, call_builtin; |
| 4025 Builtins::JavaScript builtin_id = Builtins::ADD; |
| 3868 | 4026 |
| 3869 // Load the two arguments. | 4027 // Load the two arguments. |
| 3870 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument. | 4028 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). |
| 3871 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument. | 4029 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). |
| 3872 | 4030 |
| 3873 // Make sure that both arguments are strings if not known in advance. | 4031 // Make sure that both arguments are strings if not known in advance. |
| 3874 if (string_check_) { | 4032 if (flags_ == NO_STRING_ADD_FLAGS) { |
| 3875 Condition is_smi; | 4033 Condition is_smi; |
| 3876 is_smi = masm->CheckSmi(rax); | 4034 is_smi = masm->CheckSmi(rax); |
| 3877 __ j(is_smi, &string_add_runtime); | 4035 __ j(is_smi, &string_add_runtime); |
| 3878 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); | 4036 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); |
| 3879 __ j(above_equal, &string_add_runtime); | 4037 __ j(above_equal, &string_add_runtime); |
| 3880 | 4038 |
| 3881 // First argument is a a string, test second. | 4039 // First argument is a a string, test second. |
| 3882 is_smi = masm->CheckSmi(rdx); | 4040 is_smi = masm->CheckSmi(rdx); |
| 3883 __ j(is_smi, &string_add_runtime); | 4041 __ j(is_smi, &string_add_runtime); |
| 3884 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9); | 4042 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9); |
| 3885 __ j(above_equal, &string_add_runtime); | 4043 __ j(above_equal, &string_add_runtime); |
| 4044 } else { |
| 4045 // Here at least one of the arguments is definitely a string. |
| 4046 // We convert the one that is not known to be a string. |
| 4047 if ((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) == 0) { |
| 4048 ASSERT((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) != 0); |
| 4049 GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi, |
| 4050 &call_builtin); |
| 4051 builtin_id = Builtins::STRING_ADD_RIGHT; |
| 4052 } else if ((flags_ & NO_STRING_CHECK_RIGHT_IN_STUB) == 0) { |
| 4053 ASSERT((flags_ & NO_STRING_CHECK_LEFT_IN_STUB) != 0); |
| 4054 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi, |
| 4055 &call_builtin); |
| 4056 builtin_id = Builtins::STRING_ADD_LEFT; |
| 4057 } |
| 3886 } | 4058 } |
| 3887 | 4059 |
| 3888 // Both arguments are strings. | 4060 // Both arguments are strings. |
| 3889 // rax: first string | 4061 // rax: first string |
| 3890 // rdx: second string | 4062 // rdx: second string |
| 3891 // Check if either of the strings are empty. In that case return the other. | 4063 // Check if either of the strings are empty. In that case return the other. |
| 3892 NearLabel second_not_zero_length, both_not_zero_length; | 4064 NearLabel second_not_zero_length, both_not_zero_length; |
| 3893 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset)); | 4065 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset)); |
| 3894 __ SmiTest(rcx); | 4066 __ SmiTest(rcx); |
| 3895 __ j(not_zero, &second_not_zero_length); | 4067 __ j(not_zero, &second_not_zero_length); |
| 3896 // Second string is empty, result is first string which is already in rax. | 4068 // Second string is empty, result is first string which is already in rax. |
| 3897 __ IncrementCounter(COUNTERS->string_add_native(), 1); | 4069 __ IncrementCounter(COUNTERS->string_add_native(), 1); |
| 3898 __ ret(2 * kPointerSize); | 4070 __ ret(2 * kPointerSize); |
| 3899 __ bind(&second_not_zero_length); | 4071 __ bind(&second_not_zero_length); |
| 3900 __ movq(rbx, FieldOperand(rax, String::kLengthOffset)); | 4072 __ movq(rbx, FieldOperand(rax, String::kLengthOffset)); |
| 3901 __ SmiTest(rbx); | 4073 __ SmiTest(rbx); |
| 3902 __ j(not_zero, &both_not_zero_length); | 4074 __ j(not_zero, &both_not_zero_length); |
| 3903 // First string is empty, result is second string which is in rdx. | 4075 // First string is empty, result is second string which is in rdx. |
| 3904 __ movq(rax, rdx); | 4076 __ movq(rax, rdx); |
| 3905 __ IncrementCounter(COUNTERS->string_add_native(), 1); | 4077 __ IncrementCounter(COUNTERS->string_add_native(), 1); |
| 3906 __ ret(2 * kPointerSize); | 4078 __ ret(2 * kPointerSize); |
| 3907 | 4079 |
| 3908 // Both strings are non-empty. | 4080 // Both strings are non-empty. |
| 3909 // rax: first string | 4081 // rax: first string |
| 3910 // rbx: length of first string | 4082 // rbx: length of first string |
| 3911 // rcx: length of second string | 4083 // rcx: length of second string |
| 3912 // rdx: second string | 4084 // rdx: second string |
| 3913 // r8: map of first string if string check was performed above | 4085 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS) |
| 3914 // r9: map of second string if string check was performed above | 4086 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS) |
| 3915 Label string_add_flat_result, longer_than_two; | 4087 Label string_add_flat_result, longer_than_two; |
| 3916 __ bind(&both_not_zero_length); | 4088 __ bind(&both_not_zero_length); |
| 3917 | 4089 |
| 3918 // If arguments where known to be strings, maps are not loaded to r8 and r9 | 4090 // If arguments where known to be strings, maps are not loaded to r8 and r9 |
| 3919 // by the code above. | 4091 // by the code above. |
| 3920 if (!string_check_) { | 4092 if (flags_ != NO_STRING_ADD_FLAGS) { |
| 3921 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset)); | 4093 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset)); |
| 3922 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); | 4094 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); |
| 3923 } | 4095 } |
| 3924 // Get the instance types of the two strings as they will be needed soon. | 4096 // Get the instance types of the two strings as they will be needed soon. |
| 3925 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset)); | 4097 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset)); |
| 3926 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset)); | 4098 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset)); |
| 3927 | 4099 |
| 3928 // Look at the length of the result of adding the two strings. | 4100 // Look at the length of the result of adding the two strings. |
| 3929 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2); | 4101 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2); |
| 3930 __ SmiAdd(rbx, rbx, rcx); | 4102 __ SmiAdd(rbx, rbx, rcx); |
| (...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4096 // rdx: first char of second argument | 4268 // rdx: first char of second argument |
| 4097 // rdi: length of second argument | 4269 // rdi: length of second argument |
| 4098 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false); | 4270 StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false); |
| 4099 __ movq(rax, rbx); | 4271 __ movq(rax, rbx); |
| 4100 __ IncrementCounter(COUNTERS->string_add_native(), 1); | 4272 __ IncrementCounter(COUNTERS->string_add_native(), 1); |
| 4101 __ ret(2 * kPointerSize); | 4273 __ ret(2 * kPointerSize); |
| 4102 | 4274 |
| 4103 // Just jump to runtime to add the two strings. | 4275 // Just jump to runtime to add the two strings. |
| 4104 __ bind(&string_add_runtime); | 4276 __ bind(&string_add_runtime); |
| 4105 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); | 4277 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); |
| 4278 |
| 4279 if (call_builtin.is_linked()) { |
| 4280 __ bind(&call_builtin); |
| 4281 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION); |
| 4282 } |
| 4283 } |
| 4284 |
| 4285 |
| 4286 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm, |
| 4287 int stack_offset, |
| 4288 Register arg, |
| 4289 Register scratch1, |
| 4290 Register scratch2, |
| 4291 Register scratch3, |
| 4292 Label* slow) { |
| 4293 // First check if the argument is already a string. |
| 4294 Label not_string, done; |
| 4295 __ JumpIfSmi(arg, ¬_string); |
| 4296 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1); |
| 4297 __ j(below, &done); |
| 4298 |
| 4299 // Check the number to string cache. |
| 4300 Label not_cached; |
| 4301 __ bind(¬_string); |
| 4302 // Puts the cached result into scratch1. |
| 4303 NumberToStringStub::GenerateLookupNumberStringCache(masm, |
| 4304 arg, |
| 4305 scratch1, |
| 4306 scratch2, |
| 4307 scratch3, |
| 4308 false, |
| 4309 ¬_cached); |
| 4310 __ movq(arg, scratch1); |
| 4311 __ movq(Operand(rsp, stack_offset), arg); |
| 4312 __ jmp(&done); |
| 4313 |
| 4314 // Check if the argument is a safe string wrapper. |
| 4315 __ bind(¬_cached); |
| 4316 __ JumpIfSmi(arg, slow); |
| 4317 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1. |
| 4318 __ j(not_equal, slow); |
| 4319 __ testb(FieldOperand(scratch1, Map::kBitField2Offset), |
| 4320 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf)); |
| 4321 __ j(zero, slow); |
| 4322 __ movq(arg, FieldOperand(arg, JSValue::kValueOffset)); |
| 4323 __ movq(Operand(rsp, stack_offset), arg); |
| 4324 |
| 4325 __ bind(&done); |
| 4106 } | 4326 } |
| 4107 | 4327 |
| 4108 | 4328 |
| 4109 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, | 4329 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, |
| 4110 Register dest, | 4330 Register dest, |
| 4111 Register src, | 4331 Register src, |
| 4112 Register count, | 4332 Register count, |
| 4113 bool ascii) { | 4333 bool ascii) { |
| 4114 Label loop; | 4334 Label loop; |
| 4115 __ bind(&loop); | 4335 __ bind(&loop); |
| (...skipping 508 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4624 __ addq(rsp, Immediate(2 * kPointerSize)); | 4844 __ addq(rsp, Immediate(2 * kPointerSize)); |
| 4625 __ push(rcx); | 4845 __ push(rcx); |
| 4626 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); | 4846 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8); |
| 4627 | 4847 |
| 4628 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) | 4848 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) |
| 4629 // tagged as a small integer. | 4849 // tagged as a small integer. |
| 4630 __ bind(&runtime); | 4850 __ bind(&runtime); |
| 4631 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); | 4851 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); |
| 4632 } | 4852 } |
| 4633 | 4853 |
| 4854 |
| 4855 void StringCharAtStub::Generate(MacroAssembler* masm) { |
| 4856 // Expects two arguments (object, index) on the stack: |
| 4857 |
| 4858 // Stack frame on entry. |
| 4859 // rsp[0]: return address |
| 4860 // rsp[8]: index |
| 4861 // rsp[16]: object |
| 4862 |
| 4863 Register object = rbx; |
| 4864 Register index = rax; |
| 4865 Register scratch1 = rcx; |
| 4866 Register scratch2 = rdx; |
| 4867 Register result = rax; |
| 4868 |
| 4869 __ pop(scratch1); // Return address. |
| 4870 __ pop(index); |
| 4871 __ pop(object); |
| 4872 __ push(scratch1); |
| 4873 |
| 4874 Label need_conversion; |
| 4875 Label index_out_of_range; |
| 4876 Label done; |
| 4877 StringCharAtGenerator generator(object, |
| 4878 index, |
| 4879 scratch1, |
| 4880 scratch2, |
| 4881 result, |
| 4882 &need_conversion, |
| 4883 &need_conversion, |
| 4884 &index_out_of_range, |
| 4885 STRING_INDEX_IS_NUMBER); |
| 4886 generator.GenerateFast(masm); |
| 4887 __ jmp(&done); |
| 4888 |
| 4889 __ bind(&index_out_of_range); |
| 4890 // When the index is out of range, the spec requires us to return |
| 4891 // the empty string. |
| 4892 __ Move(result, FACTORY->empty_string()); |
| 4893 __ jmp(&done); |
| 4894 |
| 4895 __ bind(&need_conversion); |
| 4896 // Move smi zero into the result register, which will trigger |
| 4897 // conversion. |
| 4898 __ Move(result, Smi::FromInt(0)); |
| 4899 __ jmp(&done); |
| 4900 |
| 4901 StubRuntimeCallHelper call_helper; |
| 4902 generator.GenerateSlow(masm, call_helper); |
| 4903 |
| 4904 __ bind(&done); |
| 4905 __ ret(0); |
| 4906 } |
| 4907 |
| 4908 |
| 4634 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { | 4909 void ICCompareStub::GenerateSmis(MacroAssembler* masm) { |
| 4635 ASSERT(state_ == CompareIC::SMIS); | 4910 ASSERT(state_ == CompareIC::SMIS); |
| 4636 NearLabel miss; | 4911 NearLabel miss; |
| 4637 __ JumpIfNotBothSmi(rdx, rax, &miss); | 4912 __ JumpIfNotBothSmi(rdx, rax, &miss); |
| 4638 | 4913 |
| 4639 if (GetCondition() == equal) { | 4914 if (GetCondition() == equal) { |
| 4640 // For equality we do not care about the sign of the result. | 4915 // For equality we do not care about the sign of the result. |
| 4641 __ subq(rax, rdx); | 4916 __ subq(rax, rdx); |
| 4642 } else { | 4917 } else { |
| 4643 NearLabel done; | 4918 NearLabel done; |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4771 // to ensure the key is a smi must be added. | 5046 // to ensure the key is a smi must be added. |
| 4772 if (key_not_smi != NULL) { | 5047 if (key_not_smi != NULL) { |
| 4773 __ JumpIfNotSmi(key, key_not_smi); | 5048 __ JumpIfNotSmi(key, key_not_smi); |
| 4774 } else { | 5049 } else { |
| 4775 if (FLAG_debug_code) { | 5050 if (FLAG_debug_code) { |
| 4776 __ AbortIfNotSmi(key); | 5051 __ AbortIfNotSmi(key); |
| 4777 } | 5052 } |
| 4778 } | 5053 } |
| 4779 __ SmiToInteger32(untagged_key, key); | 5054 __ SmiToInteger32(untagged_key, key); |
| 4780 | 5055 |
| 4781 // Verify that the receiver has pixel array elements. | |
| 4782 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset)); | 5056 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset)); |
| 4783 __ CheckMap(elements, FACTORY->pixel_array_map(), not_pixel_array, true); | 5057 // By passing NULL as not_pixel_array, callers signal that they have already |
| 5058 // verified that the receiver has pixel array elements. |
| 5059 if (not_pixel_array != NULL) { |
| 5060 __ CheckMap(elements, FACTORY->pixel_array_map(), not_pixel_array, true); |
| 5061 } else { |
| 5062 if (FLAG_debug_code) { |
| 5063 // Map check should have already made sure that elements is a pixel array. |
| 5064 __ Cmp(FieldOperand(elements, HeapObject::kMapOffset), |
| 5065 FACTORY->pixel_array_map()); |
| 5066 __ Assert(equal, "Elements isn't a pixel array"); |
| 5067 } |
| 5068 } |
| 4784 | 5069 |
| 4785 // Check that the smi is in range. | 5070 // Check that the smi is in range. |
| 4786 __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset)); | 5071 __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset)); |
| 4787 __ j(above_equal, out_of_range); // unsigned check handles negative keys. | 5072 __ j(above_equal, out_of_range); // unsigned check handles negative keys. |
| 4788 | 5073 |
| 4789 // Load and tag the element as a smi. | 5074 // Load and tag the element as a smi. |
| 4790 __ movq(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset)); | 5075 __ movq(elements, FieldOperand(elements, PixelArray::kExternalPointerOffset)); |
| 4791 __ movzxbq(result, Operand(elements, untagged_key, times_1, 0)); | 5076 __ movzxbq(result, Operand(elements, untagged_key, times_1, 0)); |
| 4792 __ Integer32ToSmi(result, result); | 5077 __ Integer32ToSmi(result, result); |
| 4793 __ ret(0); | 5078 __ ret(0); |
| 4794 } | 5079 } |
| 4795 | 5080 |
| 4796 | 5081 |
| 5082 // Stores an indexed element into a pixel array, clamping the stored value. |
| 5083 void GenerateFastPixelArrayStore(MacroAssembler* masm, |
| 5084 Register receiver, |
| 5085 Register key, |
| 5086 Register value, |
| 5087 Register elements, |
| 5088 Register scratch1, |
| 5089 bool load_elements_from_receiver, |
| 5090 bool key_is_untagged, |
| 5091 Label* key_not_smi, |
| 5092 Label* value_not_smi, |
| 5093 Label* not_pixel_array, |
| 5094 Label* out_of_range) { |
| 5095 // Register use: |
| 5096 // receiver - holds the receiver and is unchanged. |
| 5097 // key - holds the key (must be a smi) and is unchanged. |
| 5098 // value - holds the value (must be a smi) and is unchanged. |
| 5099 // elements - holds the element object of the receiver on entry if |
| 5100 // load_elements_from_receiver is false, otherwise used |
| 5101 // internally to store the pixel arrays elements and |
| 5102 // external array pointer. |
| 5103 // |
| 5104 Register external_pointer = elements; |
| 5105 Register untagged_key = scratch1; |
| 5106 Register untagged_value = receiver; // Only set once success guaranteed. |
| 5107 |
| 5108 // Fetch the receiver's elements if the caller hasn't already done so. |
| 5109 if (load_elements_from_receiver) { |
| 5110 __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset)); |
| 5111 } |
| 5112 |
| 5113 // By passing NULL as not_pixel_array, callers signal that they have already |
| 5114 // verified that the receiver has pixel array elements. |
| 5115 if (not_pixel_array != NULL) { |
| 5116 __ CheckMap(elements, FACTORY->pixel_array_map(), not_pixel_array, true); |
| 5117 } else { |
| 5118 if (FLAG_debug_code) { |
| 5119 // Map check should have already made sure that elements is a pixel array. |
| 5120 __ Cmp(FieldOperand(elements, HeapObject::kMapOffset), |
| 5121 FACTORY->pixel_array_map()); |
| 5122 __ Assert(equal, "Elements isn't a pixel array"); |
| 5123 } |
| 5124 } |
| 5125 |
| 5126 // Key must be a smi and it must be in range. |
| 5127 if (key_is_untagged) { |
| 5128 untagged_key = key; |
| 5129 } else { |
| 5130 // Some callers already have verified that the key is a smi. key_not_smi is |
| 5131 // set to NULL as a sentinel for that case. Otherwise, add an explicit |
| 5132 // check to ensure the key is a smi. |
| 5133 if (key_not_smi != NULL) { |
| 5134 __ JumpIfNotSmi(key, key_not_smi); |
| 5135 } else { |
| 5136 if (FLAG_debug_code) { |
| 5137 __ AbortIfNotSmi(key); |
| 5138 } |
| 5139 } |
| 5140 __ SmiToInteger32(untagged_key, key); |
| 5141 } |
| 5142 __ cmpl(untagged_key, FieldOperand(elements, PixelArray::kLengthOffset)); |
| 5143 __ j(above_equal, out_of_range); // unsigned check handles negative keys. |
| 5144 |
| 5145 // Value must be a smi. |
| 5146 __ JumpIfNotSmi(value, value_not_smi); |
| 5147 __ SmiToInteger32(untagged_value, value); |
| 5148 |
| 5149 { // Clamp the value to [0..255]. |
| 5150 NearLabel done; |
| 5151 __ testl(untagged_value, Immediate(0xFFFFFF00)); |
| 5152 __ j(zero, &done); |
| 5153 __ setcc(negative, untagged_value); // 1 if negative, 0 if positive. |
| 5154 __ decb(untagged_value); // 0 if negative, 255 if positive. |
| 5155 __ bind(&done); |
| 5156 } |
| 5157 |
| 5158 __ movq(external_pointer, |
| 5159 FieldOperand(elements, PixelArray::kExternalPointerOffset)); |
| 5160 __ movb(Operand(external_pointer, untagged_key, times_1, 0), untagged_value); |
| 5161 __ ret(0); // Return value in eax. |
| 5162 } |
| 5163 |
| 4797 #undef __ | 5164 #undef __ |
| 4798 | 5165 |
| 4799 } } // namespace v8::internal | 5166 } } // namespace v8::internal |
| 4800 | 5167 |
| 4801 #endif // V8_TARGET_ARCH_X64 | 5168 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |