| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2442 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2453 __ JumpIfSmi(object, &false_result); | 2453 __ JumpIfSmi(object, &false_result); |
| 2454 | 2454 |
| 2455 // This is the inlined call site instanceof cache. The two occurences of the | 2455 // This is the inlined call site instanceof cache. The two occurences of the |
| 2456 // hole value will be patched to the last map/result pair generated by the | 2456 // hole value will be patched to the last map/result pair generated by the |
| 2457 // instanceof stub. | 2457 // instanceof stub. |
| 2458 Label cache_miss; | 2458 Label cache_miss; |
| 2459 // Use a temp register to avoid memory operands with variable lengths. | 2459 // Use a temp register to avoid memory operands with variable lengths. |
| 2460 Register map = ToRegister(instr->temp()); | 2460 Register map = ToRegister(instr->temp()); |
| 2461 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); | 2461 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2462 __ bind(deferred->map_check()); // Label for calculating code patching. | 2462 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 2463 Handle<JSGlobalPropertyCell> cache_cell = | 2463 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); |
| 2464 factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); | 2464 __ movq(kScratchRegister, cache_cell, RelocInfo::CELL); |
| 2465 __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL); | |
| 2466 __ cmpq(map, Operand(kScratchRegister, 0)); | 2465 __ cmpq(map, Operand(kScratchRegister, 0)); |
| 2467 __ j(not_equal, &cache_miss, Label::kNear); | 2466 __ j(not_equal, &cache_miss, Label::kNear); |
| 2468 // Patched to load either true or false. | 2467 // Patched to load either true or false. |
| 2469 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); | 2468 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); |
| 2470 #ifdef DEBUG | 2469 #ifdef DEBUG |
| 2471 // Check that the code size between patch label and patch sites is invariant. | 2470 // Check that the code size between patch label and patch sites is invariant. |
| 2472 Label end_of_patched_code; | 2471 Label end_of_patched_code; |
| 2473 __ bind(&end_of_patched_code); | 2472 __ bind(&end_of_patched_code); |
| 2474 ASSERT(true); | 2473 ASSERT(true); |
| 2475 #endif | 2474 #endif |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2624 __ Move(rcx, instr->name()); | 2623 __ Move(rcx, instr->name()); |
| 2625 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : | 2624 RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET : |
| 2626 RelocInfo::CODE_TARGET_CONTEXT; | 2625 RelocInfo::CODE_TARGET_CONTEXT; |
| 2627 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); | 2626 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); |
| 2628 CallCode(ic, mode, instr); | 2627 CallCode(ic, mode, instr); |
| 2629 } | 2628 } |
| 2630 | 2629 |
| 2631 | 2630 |
| 2632 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { | 2631 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { |
| 2633 Register value = ToRegister(instr->value()); | 2632 Register value = ToRegister(instr->value()); |
| 2634 Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell(); | 2633 Handle<Cell> cell_handle = instr->hydrogen()->cell(); |
| 2635 | 2634 |
| 2636 // If the cell we are storing to contains the hole it could have | 2635 // If the cell we are storing to contains the hole it could have |
| 2637 // been deleted from the property dictionary. In that case, we need | 2636 // been deleted from the property dictionary. In that case, we need |
| 2638 // to update the property details in the property dictionary to mark | 2637 // to update the property details in the property dictionary to mark |
| 2639 // it as no longer deleted. We deoptimize in that case. | 2638 // it as no longer deleted. We deoptimize in that case. |
| 2640 if (instr->hydrogen()->RequiresHoleCheck()) { | 2639 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2641 // We have a temp because CompareRoot might clobber kScratchRegister. | 2640 // We have a temp because CompareRoot might clobber kScratchRegister. |
| 2642 Register cell = ToRegister(instr->temp()); | 2641 Register cell = ToRegister(instr->temp()); |
| 2643 ASSERT(!value.is(cell)); | 2642 ASSERT(!value.is(cell)); |
| 2644 __ movq(cell, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL); | 2643 __ movq(cell, cell_handle, RelocInfo::CELL); |
| 2645 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex); | 2644 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex); |
| 2646 DeoptimizeIf(equal, instr->environment()); | 2645 DeoptimizeIf(equal, instr->environment()); |
| 2647 // Store the value. | 2646 // Store the value. |
| 2648 __ movq(Operand(cell, 0), value); | 2647 __ movq(Operand(cell, 0), value); |
| 2649 } else { | 2648 } else { |
| 2650 // Store the value. | 2649 // Store the value. |
| 2651 __ movq(kScratchRegister, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL); | 2650 __ movq(kScratchRegister, cell_handle, RelocInfo::CELL); |
| 2652 __ movq(Operand(kScratchRegister, 0), value); | 2651 __ movq(Operand(kScratchRegister, 0), value); |
| 2653 } | 2652 } |
| 2654 // Cells are always rescanned, so no write barrier here. | 2653 // Cells are always rescanned, so no write barrier here. |
| 2655 } | 2654 } |
| 2656 | 2655 |
| 2657 | 2656 |
| 2658 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { | 2657 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { |
| 2659 ASSERT(ToRegister(instr->global_object()).is(rdx)); | 2658 ASSERT(ToRegister(instr->global_object()).is(rdx)); |
| 2660 ASSERT(ToRegister(instr->value()).is(rax)); | 2659 ASSERT(ToRegister(instr->value()).is(rax)); |
| 2661 | 2660 |
| (...skipping 2935 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5597 FixedArray::kHeaderSize - kPointerSize)); | 5596 FixedArray::kHeaderSize - kPointerSize)); |
| 5598 __ bind(&done); | 5597 __ bind(&done); |
| 5599 } | 5598 } |
| 5600 | 5599 |
| 5601 | 5600 |
| 5602 #undef __ | 5601 #undef __ |
| 5603 | 5602 |
| 5604 } } // namespace v8::internal | 5603 } } // namespace v8::internal |
| 5605 | 5604 |
| 5606 #endif // V8_TARGET_ARCH_X64 | 5605 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |