| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 45 StubCache::Table table, | 45 StubCache::Table table, |
| 46 Register name, | 46 Register name, |
| 47 Register offset) { | 47 Register offset) { |
| 48 ASSERT_EQ(8, kPointerSize); | 48 ASSERT_EQ(8, kPointerSize); |
| 49 ASSERT_EQ(16, sizeof(StubCache::Entry)); | 49 ASSERT_EQ(16, sizeof(StubCache::Entry)); |
| 50 // The offset register holds the entry offset times four (due to masking | 50 // The offset register holds the entry offset times four (due to masking |
| 51 // and shifting optimizations). | 51 // and shifting optimizations). |
| 52 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); | 52 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); |
| 53 Label miss; | 53 Label miss; |
| 54 | 54 |
| 55 __ movq(kScratchRegister, key_offset); | 55 __ LoadAddress(kScratchRegister, key_offset); |
| 56 // Check that the key in the entry matches the name. | 56 // Check that the key in the entry matches the name. |
| 57 // Multiply entry offset by 16 to get the entry address. Since the | 57 // Multiply entry offset by 16 to get the entry address. Since the |
| 58 // offset register already holds the entry offset times four, multiply | 58 // offset register already holds the entry offset times four, multiply |
| 59 // by a further four. | 59 // by a further four. |
| 60 __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0)); | 60 __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0)); |
| 61 __ j(not_equal, &miss); | 61 __ j(not_equal, &miss); |
| 62 // Get the code entry from the cache. | 62 // Get the code entry from the cache. |
| 63 // Use key_offset + kPointerSize, rather than loading value_offset. | 63 // Use key_offset + kPointerSize, rather than loading value_offset. |
| 64 __ movq(kScratchRegister, | 64 __ movq(kScratchRegister, |
| 65 Operand(kScratchRegister, offset, times_4, kPointerSize)); | 65 Operand(kScratchRegister, offset, times_4, kPointerSize)); |
| (...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 391 Register receiver, | 391 Register receiver, |
| 392 Register holder, | 392 Register holder, |
| 393 Register name, | 393 Register name, |
| 394 JSObject* holder_obj) { | 394 JSObject* holder_obj) { |
| 395 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); | 395 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); |
| 396 | 396 |
| 397 ExternalReference ref = | 397 ExternalReference ref = |
| 398 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), | 398 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), |
| 399 masm->isolate()); | 399 masm->isolate()); |
| 400 __ movq(rax, Immediate(5)); | 400 __ movq(rax, Immediate(5)); |
| 401 __ movq(rbx, ref); | 401 __ LoadAddress(rbx, ref); |
| 402 | 402 |
| 403 CEntryStub stub(1); | 403 CEntryStub stub(1); |
| 404 __ CallStub(&stub); | 404 __ CallStub(&stub); |
| 405 } | 405 } |
| 406 | 406 |
| 407 | 407 |
| 408 // Number of pointers to be reserved on stack for fast API call. | 408 // Number of pointers to be reserved on stack for fast API call. |
| 409 static const int kFastApiCallArguments = 3; | 409 static const int kFastApiCallArguments = 3; |
| 410 | 410 |
| 411 | 411 |
| (...skipping 1072 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1484 } | 1484 } |
| 1485 | 1485 |
| 1486 Isolate* isolate = masm()->isolate(); | 1486 Isolate* isolate = masm()->isolate(); |
| 1487 ExternalReference new_space_allocation_top = | 1487 ExternalReference new_space_allocation_top = |
| 1488 ExternalReference::new_space_allocation_top_address(isolate); | 1488 ExternalReference::new_space_allocation_top_address(isolate); |
| 1489 ExternalReference new_space_allocation_limit = | 1489 ExternalReference new_space_allocation_limit = |
| 1490 ExternalReference::new_space_allocation_limit_address(isolate); | 1490 ExternalReference::new_space_allocation_limit_address(isolate); |
| 1491 | 1491 |
| 1492 const int kAllocationDelta = 4; | 1492 const int kAllocationDelta = 4; |
| 1493 // Load top. | 1493 // Load top. |
| 1494 __ movq(rcx, new_space_allocation_top); | 1494 __ Load(rcx, new_space_allocation_top); |
| 1495 __ movq(rcx, Operand(rcx, 0)); | |
| 1496 | 1495 |
| 1497 // Check if it's the end of elements. | 1496 // Check if it's the end of elements. |
| 1498 __ lea(rdx, FieldOperand(rbx, | 1497 __ lea(rdx, FieldOperand(rbx, |
| 1499 rax, times_pointer_size, | 1498 rax, times_pointer_size, |
| 1500 FixedArray::kHeaderSize - argc * kPointerSize)); | 1499 FixedArray::kHeaderSize - argc * kPointerSize)); |
| 1501 __ cmpq(rdx, rcx); | 1500 __ cmpq(rdx, rcx); |
| 1502 __ j(not_equal, &call_builtin); | 1501 __ j(not_equal, &call_builtin); |
| 1503 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize)); | 1502 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize)); |
| 1504 __ movq(kScratchRegister, new_space_allocation_limit); | 1503 Operand limit_operand = |
| 1505 __ cmpq(rcx, Operand(kScratchRegister, 0)); | 1504 masm()->ExternalOperand(new_space_allocation_limit); |
| 1505 __ cmpq(rcx, limit_operand); |
| 1506 __ j(above, &call_builtin); | 1506 __ j(above, &call_builtin); |
| 1507 | 1507 |
| 1508 // We fit and could grow elements. | 1508 // We fit and could grow elements. |
| 1509 __ movq(kScratchRegister, new_space_allocation_top); | 1509 __ Store(new_space_allocation_top, rcx); |
| 1510 __ movq(Operand(kScratchRegister, 0), rcx); | |
| 1511 __ movq(rcx, Operand(rsp, argc * kPointerSize)); | 1510 __ movq(rcx, Operand(rsp, argc * kPointerSize)); |
| 1512 | 1511 |
| 1513 // Push the argument... | 1512 // Push the argument... |
| 1514 __ movq(Operand(rdx, 0), rcx); | 1513 __ movq(Operand(rdx, 0), rcx); |
| 1515 // ... and fill the rest with holes. | 1514 // ... and fill the rest with holes. |
| 1516 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); | 1515 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); |
| 1517 for (int i = 1; i < kAllocationDelta; i++) { | 1516 for (int i = 1; i < kAllocationDelta; i++) { |
| 1518 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); | 1517 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); |
| 1519 } | 1518 } |
| 1520 | 1519 |
| (...skipping 1922 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3443 __ TailCallRuntime(Runtime::kSetProperty, 5, 1); | 3442 __ TailCallRuntime(Runtime::kSetProperty, 5, 1); |
| 3444 | 3443 |
| 3445 return GetCode(flags); | 3444 return GetCode(flags); |
| 3446 } | 3445 } |
| 3447 | 3446 |
| 3448 #undef __ | 3447 #undef __ |
| 3449 | 3448 |
| 3450 } } // namespace v8::internal | 3449 } } // namespace v8::internal |
| 3451 | 3450 |
| 3452 #endif // V8_TARGET_ARCH_X64 | 3451 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |