Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(110)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 6716018: X64: Optimize access to external references. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
45 StubCache::Table table, 45 StubCache::Table table,
46 Register name, 46 Register name,
47 Register offset) { 47 Register offset) {
48 ASSERT_EQ(8, kPointerSize); 48 ASSERT_EQ(8, kPointerSize);
49 ASSERT_EQ(16, sizeof(StubCache::Entry)); 49 ASSERT_EQ(16, sizeof(StubCache::Entry));
50 // The offset register holds the entry offset times four (due to masking 50 // The offset register holds the entry offset times four (due to masking
51 // and shifting optimizations). 51 // and shifting optimizations).
52 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 52 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
53 Label miss; 53 Label miss;
54 54
55 __ movq(kScratchRegister, key_offset); 55 __ LoadAddress(kScratchRegister, key_offset);
56 // Check that the key in the entry matches the name. 56 // Check that the key in the entry matches the name.
57 // Multiply entry offset by 16 to get the entry address. Since the 57 // Multiply entry offset by 16 to get the entry address. Since the
58 // offset register already holds the entry offset times four, multiply 58 // offset register already holds the entry offset times four, multiply
59 // by a further four. 59 // by a further four.
60 __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0)); 60 __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
61 __ j(not_equal, &miss); 61 __ j(not_equal, &miss);
62 // Get the code entry from the cache. 62 // Get the code entry from the cache.
63 // Use key_offset + kPointerSize, rather than loading value_offset. 63 // Use key_offset + kPointerSize, rather than loading value_offset.
64 __ movq(kScratchRegister, 64 __ movq(kScratchRegister,
65 Operand(kScratchRegister, offset, times_4, kPointerSize)); 65 Operand(kScratchRegister, offset, times_4, kPointerSize));
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
390 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, 390 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
391 Register receiver, 391 Register receiver,
392 Register holder, 392 Register holder,
393 Register name, 393 Register name,
394 JSObject* holder_obj) { 394 JSObject* holder_obj) {
395 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 395 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
396 396
397 ExternalReference ref = 397 ExternalReference ref =
398 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)); 398 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
399 __ movq(rax, Immediate(5)); 399 __ movq(rax, Immediate(5));
400 __ movq(rbx, ref); 400 __ LoadAddress(rbx, ref);
401 401
402 CEntryStub stub(1); 402 CEntryStub stub(1);
403 __ CallStub(&stub); 403 __ CallStub(&stub);
404 } 404 }
405 405
406 406
407 // Number of pointers to be reserved on stack for fast API call. 407 // Number of pointers to be reserved on stack for fast API call.
408 static const int kFastApiCallArguments = 3; 408 static const int kFastApiCallArguments = 3;
409 409
410 410
(...skipping 1068 matching lines...) Expand 10 before | Expand all | Expand 10 after
1479 __ jmp(&call_builtin); 1479 __ jmp(&call_builtin);
1480 } 1480 }
1481 1481
1482 ExternalReference new_space_allocation_top = 1482 ExternalReference new_space_allocation_top =
1483 ExternalReference::new_space_allocation_top_address(); 1483 ExternalReference::new_space_allocation_top_address();
1484 ExternalReference new_space_allocation_limit = 1484 ExternalReference new_space_allocation_limit =
1485 ExternalReference::new_space_allocation_limit_address(); 1485 ExternalReference::new_space_allocation_limit_address();
1486 1486
1487 const int kAllocationDelta = 4; 1487 const int kAllocationDelta = 4;
1488 // Load top. 1488 // Load top.
1489 __ movq(rcx, new_space_allocation_top); 1489 __ Load(rcx, new_space_allocation_top);
1490 __ movq(rcx, Operand(rcx, 0));
1491 1490
1492 // Check if it's the end of elements. 1491 // Check if it's the end of elements.
1493 __ lea(rdx, FieldOperand(rbx, 1492 __ lea(rdx, FieldOperand(rbx,
1494 rax, times_pointer_size, 1493 rax, times_pointer_size,
1495 FixedArray::kHeaderSize - argc * kPointerSize)); 1494 FixedArray::kHeaderSize - argc * kPointerSize));
1496 __ cmpq(rdx, rcx); 1495 __ cmpq(rdx, rcx);
1497 __ j(not_equal, &call_builtin); 1496 __ j(not_equal, &call_builtin);
1498 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize)); 1497 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1499 __ movq(kScratchRegister, new_space_allocation_limit); 1498 Operand limit_operand =
1500 __ cmpq(rcx, Operand(kScratchRegister, 0)); 1499 masm()->ExternalOperand(new_space_allocation_limit);
1500 __ cmpq(rcx, limit_operand);
1501 __ j(above, &call_builtin); 1501 __ j(above, &call_builtin);
1502 1502
1503 // We fit and could grow elements. 1503 // We fit and could grow elements.
1504 __ movq(kScratchRegister, new_space_allocation_top); 1504 __ Store(new_space_allocation_top, rcx);
1505 __ movq(Operand(kScratchRegister, 0), rcx);
1506 __ movq(rcx, Operand(rsp, argc * kPointerSize)); 1505 __ movq(rcx, Operand(rsp, argc * kPointerSize));
1507 1506
1508 // Push the argument... 1507 // Push the argument...
1509 __ movq(Operand(rdx, 0), rcx); 1508 __ movq(Operand(rdx, 0), rcx);
1510 // ... and fill the rest with holes. 1509 // ... and fill the rest with holes.
1511 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 1510 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
1512 for (int i = 1; i < kAllocationDelta; i++) { 1511 for (int i = 1; i < kAllocationDelta; i++) {
1513 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); 1512 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1514 } 1513 }
1515 1514
(...skipping 1938 matching lines...) Expand 10 before | Expand all | Expand 10 after
3454 __ TailCallRuntime(Runtime::kSetProperty, 5, 1); 3453 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
3455 3454
3456 return GetCode(flags); 3455 return GetCode(flags);
3457 } 3456 }
3458 3457
3459 #undef __ 3458 #undef __
3460 3459
3461 } } // namespace v8::internal 3460 } } // namespace v8::internal
3462 3461
3463 #endif // V8_TARGET_ARCH_X64 3462 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698