| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 108 if (count > 0) { | 108 if (count > 0) { |
| 109 Comment cmnt(masm(), "[ Allocate space for locals"); | 109 Comment cmnt(masm(), "[ Allocate space for locals"); |
| 110 // The locals are initialized to a constant (the undefined value), but | 110 // The locals are initialized to a constant (the undefined value), but |
| 111 // we sync them with the actual frame to allocate space for spilling | 111 // we sync them with the actual frame to allocate space for spilling |
| 112 // them later. First sync everything above the stack pointer so we can | 112 // them later. First sync everything above the stack pointer so we can |
| 113 // use pushes to allocate and initialize the locals. | 113 // use pushes to allocate and initialize the locals. |
| 114 SyncRange(stack_pointer_ + 1, element_count() - 1); | 114 SyncRange(stack_pointer_ + 1, element_count() - 1); |
| 115 Handle<Object> undefined = Factory::undefined_value(); | 115 Handle<Object> undefined = Factory::undefined_value(); |
| 116 FrameElement initial_value = | 116 FrameElement initial_value = |
| 117 FrameElement::ConstantElement(undefined, FrameElement::SYNCED); | 117 FrameElement::ConstantElement(undefined, FrameElement::SYNCED); |
| 118 if (count == 1) { | 118 if (count < kLocalVarBound) { |
| 119 __ Push(undefined); | 119 // For fewer locals the unrolled loop is more compact. |
| 120 } else if (count < kLocalVarBound) { | 120 |
| 121 // For less locals the unrolled loop is more compact. | 121 // Hope for one of the first eight registers, where the push operation |
| 122 __ movq(kScratchRegister, undefined, RelocInfo::EMBEDDED_OBJECT); | 122 // takes only one byte (kScratchRegister needs the REX.W bit). |
| 123 Result tmp = cgen()->allocator()->Allocate(); |
| 124 ASSERT(tmp.is_valid()); |
| 125 __ movq(tmp.reg(), undefined, RelocInfo::EMBEDDED_OBJECT); |
| 123 for (int i = 0; i < count; i++) { | 126 for (int i = 0; i < count; i++) { |
| 124 __ push(kScratchRegister); | 127 __ push(tmp.reg()); |
| 125 } | 128 } |
| 126 } else { | 129 } else { |
| 127 // For more locals a loop in generated code is more compact. | 130 // For more locals a loop in generated code is more compact. |
| 128 Label alloc_locals_loop; | 131 Label alloc_locals_loop; |
| 129 Result cnt = cgen()->allocator()->Allocate(); | 132 Result cnt = cgen()->allocator()->Allocate(); |
| 130 ASSERT(cnt.is_valid()); | 133 ASSERT(cnt.is_valid()); |
| 131 __ movq(cnt.reg(), Immediate(count)); | |
| 132 __ movq(kScratchRegister, undefined, RelocInfo::EMBEDDED_OBJECT); | 134 __ movq(kScratchRegister, undefined, RelocInfo::EMBEDDED_OBJECT); |
| 135 #ifdef DEBUG |
| 136 Label loop_size; |
| 137 __ bind(&loop_size); |
| 138 #endif |
| 139 if (is_uint8(count)) { |
| 140 // Loading imm8 is shorter than loading imm32. |
| 141 // Loading only partial byte register, and using decb below. |
| 142 __ movb(cnt.reg(), Immediate(count)); |
| 143 } else { |
| 144 __ movl(cnt.reg(), Immediate(count)); |
| 145 } |
| 133 __ bind(&alloc_locals_loop); | 146 __ bind(&alloc_locals_loop); |
| 134 __ push(kScratchRegister); | 147 __ push(kScratchRegister); |
| 135 __ decl(cnt.reg()); | 148 if (is_uint8(count)) { |
| 149 __ decb(cnt.reg()); |
| 150 } else { |
| 151 __ decl(cnt.reg()); |
| 152 } |
| 136 __ j(not_zero, &alloc_locals_loop); | 153 __ j(not_zero, &alloc_locals_loop); |
| 154 #ifdef DEBUG |
| 155 CHECK(masm()->SizeOfCodeGeneratedSince(&loop_size) < kLocalVarBound); |
| 156 #endif |
| 137 } | 157 } |
| 138 for (int i = 0; i < count; i++) { | 158 for (int i = 0; i < count; i++) { |
| 139 elements_.Add(initial_value); | 159 elements_.Add(initial_value); |
| 140 stack_pointer_++; | 160 stack_pointer_++; |
| 141 } | 161 } |
| 142 } | 162 } |
| 143 } | 163 } |
| 144 | 164 |
| 145 | 165 |
| 146 void VirtualFrame::SaveContextRegister() { | 166 void VirtualFrame::SaveContextRegister() { |
| (...skipping 1069 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1216 Adjust(kHandlerSize - 1); | 1236 Adjust(kHandlerSize - 1); |
| 1217 __ PushTryHandler(IN_JAVASCRIPT, type); | 1237 __ PushTryHandler(IN_JAVASCRIPT, type); |
| 1218 } | 1238 } |
| 1219 | 1239 |
| 1220 | 1240 |
| 1221 #undef __ | 1241 #undef __ |
| 1222 | 1242 |
| 1223 } } // namespace v8::internal | 1243 } } // namespace v8::internal |
| 1224 | 1244 |
| 1225 #endif // V8_TARGET_ARCH_X64 | 1245 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |