Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(42)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 7104107: Incremental mode now works for x64. The only difference (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1396 matching lines...) Expand 10 before | Expand all | Expand 10 after
1407 1407
1408 // Get the elements array of the object. 1408 // Get the elements array of the object.
1409 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); 1409 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1410 1410
1411 // Check that the elements are in fast mode and writable. 1411 // Check that the elements are in fast mode and writable.
1412 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 1412 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1413 factory()->fixed_array_map()); 1413 factory()->fixed_array_map());
1414 __ j(not_equal, &call_builtin); 1414 __ j(not_equal, &call_builtin);
1415 1415
1416 if (argc == 1) { // Otherwise fall through to call builtin. 1416 if (argc == 1) { // Otherwise fall through to call builtin.
1417 Label exit, attempt_to_grow_elements, with_write_barrier; 1417 Label attempt_to_grow_elements, with_write_barrier;
1418 1418
1419 // Get the array's length into rax and calculate new length. 1419 // Get the array's length into rax and calculate new length.
1420 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); 1420 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1421 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); 1421 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1422 __ addl(rax, Immediate(argc)); 1422 __ addl(rax, Immediate(argc));
1423 1423
1424 // Get the element's length into rcx. 1424 // Get the element's length into rcx.
1425 __ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset)); 1425 __ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
1426 1426
1427 // Check if we could survive without allocation. 1427 // Check if we could survive without allocation.
1428 __ cmpl(rax, rcx); 1428 __ cmpl(rax, rcx);
1429 __ j(greater, &attempt_to_grow_elements); 1429 __ j(greater, &attempt_to_grow_elements);
1430 1430
1431 // Save new length. 1431 // Save new length.
1432 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); 1432 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1433 1433
1434 // Push the element. 1434 // Push the element.
1435 __ movq(rcx, Operand(rsp, argc * kPointerSize)); 1435 __ movq(rcx, Operand(rsp, argc * kPointerSize));
1436 __ lea(rdx, FieldOperand(rbx, 1436 __ lea(rdx, FieldOperand(rbx,
1437 rax, times_pointer_size, 1437 rax, times_pointer_size,
1438 FixedArray::kHeaderSize - argc * kPointerSize)); 1438 FixedArray::kHeaderSize - argc * kPointerSize));
1439 __ movq(Operand(rdx, 0), rcx); 1439 __ movq(Operand(rdx, 0), rcx);
1440 1440
1441 // Check if value is a smi. 1441 // Check if value is a smi.
1442 __ Integer32ToSmi(rax, rax); // Return new length as smi. 1442 __ Integer32ToSmi(rax, rax); // Return new length as smi.
1443 1443
1444 __ JumpIfNotSmi(rcx, &with_write_barrier); 1444 __ JumpIfNotSmi(rcx, &with_write_barrier);
1445 1445
1446 __ bind(&exit);
1447 __ ret((argc + 1) * kPointerSize); 1446 __ ret((argc + 1) * kPointerSize);
1448 1447
1449 __ bind(&with_write_barrier); 1448 __ bind(&with_write_barrier);
1450 1449
1451 __ RecordWrite( 1450 __ RecordWrite(
1452 rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 1451 rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1453 1452
1454 __ ret((argc + 1) * kPointerSize); 1453 __ ret((argc + 1) * kPointerSize);
1455 1454
1456 __ bind(&attempt_to_grow_elements); 1455 __ bind(&attempt_to_grow_elements);
(...skipping 27 matching lines...) Expand all
1484 __ movq(rcx, Operand(rsp, argc * kPointerSize)); 1483 __ movq(rcx, Operand(rsp, argc * kPointerSize));
1485 1484
1486 // Push the argument... 1485 // Push the argument...
1487 __ movq(Operand(rdx, 0), rcx); 1486 __ movq(Operand(rdx, 0), rcx);
1488 // ... and fill the rest with holes. 1487 // ... and fill the rest with holes.
1489 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 1488 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
1490 for (int i = 1; i < kAllocationDelta; i++) { 1489 for (int i = 1; i < kAllocationDelta; i++) {
1491 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); 1490 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1492 } 1491 }
1493 1492
1493 // We know the elements array is in new space so we don't need the
1494 // remembered set, but we just pushed a value onto it so we may have to
1495 // tell the incremental marker to rescan the object that we just grew. We
1496 // don't need to worry about the holes because they are in old space and
1497 // already marked black.
1498 __ RecordWrite(rbx, rdx, rcx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
1499
1494 // Restore receiver to rdx as finish sequence assumes it's here. 1500 // Restore receiver to rdx as finish sequence assumes it's here.
1495 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1501 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1496 1502
1497 // Increment element's and array's sizes. 1503 // Increment element's and array's sizes.
1498 __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset), 1504 __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset),
1499 Smi::FromInt(kAllocationDelta)); 1505 Smi::FromInt(kAllocationDelta));
1500 1506
1501 // Make new length a smi before returning it. 1507 // Make new length a smi before returning it.
1502 __ Integer32ToSmi(rax, rax); 1508 __ Integer32ToSmi(rax, rax);
1503 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); 1509 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1504 1510
1505 // Elements are in new space, so write barrier is not required.
1506 __ ret((argc + 1) * kPointerSize); 1511 __ ret((argc + 1) * kPointerSize);
1507 } 1512 }
1508 1513
1509 __ bind(&call_builtin); 1514 __ bind(&call_builtin);
1510 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, 1515 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1511 isolate()), 1516 isolate()),
1512 argc + 1, 1517 argc + 1,
1513 1); 1518 1);
1514 } 1519 }
1515 1520
(...skipping 918 matching lines...) Expand 10 before | Expand all | Expand 10 after
2434 // -- rdx : receiver 2439 // -- rdx : receiver
2435 // -- rsp[0] : return address 2440 // -- rsp[0] : return address
2436 // ----------------------------------- 2441 // -----------------------------------
2437 Label miss; 2442 Label miss;
2438 2443
2439 // Check that the map of the global has not changed. 2444 // Check that the map of the global has not changed.
2440 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 2445 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2441 Handle<Map>(object->map())); 2446 Handle<Map>(object->map()));
2442 __ j(not_equal, &miss); 2447 __ j(not_equal, &miss);
2443 2448
2449 // Compute the cell operand to use.
2450 __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2451 Operand cell_operand = FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset);
2452
2444 // Check that the value in the cell is not the hole. If it is, this 2453 // Check that the value in the cell is not the hole. If it is, this
2445 // cell could have been deleted and reintroducing the global needs 2454 // cell could have been deleted and reintroducing the global needs
2446 // to update the property details in the property dictionary of the 2455 // to update the property details in the property dictionary of the
2447 // global object. We bail out to the runtime system to do that. 2456 // global object. We bail out to the runtime system to do that.
2448 __ Move(rbx, Handle<JSGlobalPropertyCell>(cell)); 2457 __ CompareRoot(cell_operand, Heap::kTheHoleValueRootIndex);
2449 __ CompareRoot(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
2450 Heap::kTheHoleValueRootIndex);
2451 __ j(equal, &miss); 2458 __ j(equal, &miss);
2452 2459
2453 // Store the value in the cell. 2460 // Store the value in the cell.
2454 __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax); 2461 __ movq(cell_operand, rax);
2462 Label done;
2463 __ JumpIfSmi(rax, &done);
2464
2465 __ movq(rcx, rax);
2466 __ lea(rdx, cell_operand);
2467 // Cells are always in the remembered set.
2468 __ RecordWrite(rbx, // Object.
2469 rdx, // Address.
2470 rcx, // Value.
2471 kDontSaveFPRegs,
2472 OMIT_REMEMBERED_SET,
2473 OMIT_SMI_CHECK);
2474
2455 2475
2456 // Return the value (register rax). 2476 // Return the value (register rax).
2477 __ bind(&done);
2478
2457 Counters* counters = isolate()->counters(); 2479 Counters* counters = isolate()->counters();
2458 __ IncrementCounter(counters->named_store_global_inline(), 1); 2480 __ IncrementCounter(counters->named_store_global_inline(), 1);
2459 __ ret(0); 2481 __ ret(0);
2460 2482
2461 // Handle store cache miss. 2483 // Handle store cache miss.
2462 __ bind(&miss); 2484 __ bind(&miss);
2463 __ IncrementCounter(counters->named_store_global_inline_miss(), 1); 2485 __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2464 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss(); 2486 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2465 __ Jump(ic, RelocInfo::CODE_TARGET); 2487 __ Jump(ic, RelocInfo::CODE_TARGET);
2466 2488
(...skipping 1136 matching lines...) Expand 10 before | Expand all | Expand 10 after
3603 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); 3625 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3604 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET); 3626 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
3605 } 3627 }
3606 3628
3607 3629
3608 #undef __ 3630 #undef __
3609 3631
3610 } } // namespace v8::internal 3632 } } // namespace v8::internal
3611 3633
3612 #endif // V8_TARGET_ARCH_X64 3634 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698