Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(806)

Side by Side Diff: src/x64/virtual-frame-x64.cc

Issue 126198: X64 Implementation: Make codegen load literals and assign to local variables. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/register-allocator-x64.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
78 __ movq(rbp, rsp); 78 __ movq(rbp, rsp);
79 79
80 // Store the context in the frame. The context is kept in rsi and a 80 // Store the context in the frame. The context is kept in rsi and a
81 // copy is stored in the frame. The external reference to rsi 81 // copy is stored in the frame. The external reference to rsi
82 // remains. 82 // remains.
83 EmitPush(rsi); 83 EmitPush(rsi);
84 84
85 // Store the function in the frame. The frame owns the register 85 // Store the function in the frame. The frame owns the register
86 // reference now (ie, it can keep it in rdi or spill it later). 86 // reference now (ie, it can keep it in rdi or spill it later).
87 Push(rdi); 87 Push(rdi);
88 // SyncElementAt(element_count() - 1); 88 SyncElementAt(element_count() - 1);
89 cgen()->allocator()->Unuse(rdi); 89 cgen()->allocator()->Unuse(rdi);
90 } 90 }
91 91
92 92
93 void VirtualFrame::Exit() { 93 void VirtualFrame::Exit() {
94 Comment cmnt(masm(), "[ Exit JS frame"); 94 Comment cmnt(masm(), "[ Exit JS frame");
95 // Record the location of the JS exit code for patching when setting 95 // Record the location of the JS exit code for patching when setting
96 // break point. 96 // break point.
97 __ RecordJSReturn(); 97 __ RecordJSReturn();
98 98
99 // Avoid using the leave instruction here, because it is too 99 // Avoid using the leave instruction here, because it is too
100 // short. We need the return sequence to be a least the size of a 100 // short. We need the return sequence to be a least the size of a
101 // call instruction to support patching the exit code in the 101 // call instruction to support patching the exit code in the
102 // debugger. See VisitReturnStatement for the full return sequence. 102 // debugger. See GenerateReturnSequence for the full return sequence.
103 // TODO(X64): A patched call will be very long now. Make sure we 103 // TODO(X64): A patched call will be very long now. Make sure we
104 // have enough room. 104 // have enough room.
105 __ movq(rsp, rbp); 105 __ movq(rsp, rbp);
106 stack_pointer_ = frame_pointer(); 106 stack_pointer_ = frame_pointer();
107 for (int i = element_count() - 1; i > stack_pointer_; i--) { 107 for (int i = element_count() - 1; i > stack_pointer_; i--) {
108 FrameElement last = elements_.RemoveLast(); 108 FrameElement last = elements_.RemoveLast();
109 if (last.is_register()) { 109 if (last.is_register()) {
110 Unuse(last.reg()); 110 Unuse(last.reg());
111 } 111 }
112 } 112 }
113 113
114 EmitPop(rbp); 114 EmitPop(rbp);
115 } 115 }
116 116
117 117
118 void VirtualFrame::AllocateStackSlots() {
119 int count = local_count();
120 if (count > 0) {
121 Comment cmnt(masm(), "[ Allocate space for locals");
122 // The locals are initialized to a constant (the undefined value), but
123 // we sync them with the actual frame to allocate space for spilling
124 // them later. First sync everything above the stack pointer so we can
125 // use pushes to allocate and initialize the locals.
126 SyncRange(stack_pointer_ + 1, element_count() - 1);
127 Handle<Object> undefined = Factory::undefined_value();
128 FrameElement initial_value =
129 FrameElement::ConstantElement(undefined, FrameElement::SYNCED);
130 __ movq(kScratchRegister, undefined, RelocInfo::EMBEDDED_OBJECT);
131 for (int i = 0; i < count; i++) {
132 elements_.Add(initial_value);
133 stack_pointer_++;
134 __ push(kScratchRegister);
135 }
136 }
137 }
138
139
118 void VirtualFrame::EmitPop(Register reg) { 140 void VirtualFrame::EmitPop(Register reg) {
119 ASSERT(stack_pointer_ == element_count() - 1); 141 ASSERT(stack_pointer_ == element_count() - 1);
120 stack_pointer_--; 142 stack_pointer_--;
121 elements_.RemoveLast(); 143 elements_.RemoveLast();
122 __ pop(reg); 144 __ pop(reg);
123 } 145 }
124 146
125 147
126 void VirtualFrame::EmitPop(const Operand& operand) { 148 void VirtualFrame::EmitPop(const Operand& operand) {
127 ASSERT(stack_pointer_ == element_count() - 1); 149 ASSERT(stack_pointer_ == element_count() - 1);
(...skipping 20 matching lines...) Expand all
148 170
149 171
150 void VirtualFrame::EmitPush(Immediate immediate) { 172 void VirtualFrame::EmitPush(Immediate immediate) {
151 ASSERT(stack_pointer_ == element_count() - 1); 173 ASSERT(stack_pointer_ == element_count() - 1);
152 elements_.Add(FrameElement::MemoryElement()); 174 elements_.Add(FrameElement::MemoryElement());
153 stack_pointer_++; 175 stack_pointer_++;
154 __ push(immediate); 176 __ push(immediate);
155 } 177 }
156 178
157 179
158 void VirtualFrame::Drop(int a) { 180 void VirtualFrame::Drop(int count) {
159 UNIMPLEMENTED(); 181 ASSERT(height() >= count);
160 } 182 int num_virtual_elements = (element_count() - 1) - stack_pointer_;
161 183
162 int VirtualFrame::InvalidateFrameSlotAt(int a) { 184 // Emit code to lower the stack pointer if necessary.
163 UNIMPLEMENTED(); 185 if (num_virtual_elements < count) {
164 return -1; 186 int num_dropped = count - num_virtual_elements;
165 } 187 stack_pointer_ -= num_dropped;
188 __ addq(rsp, Immediate(num_dropped * kPointerSize));
189 }
190
191 // Discard elements from the virtual frame and free any registers.
192 for (int i = 0; i < count; i++) {
193 FrameElement dropped = elements_.RemoveLast();
194 if (dropped.is_register()) {
195 Unuse(dropped.reg());
196 }
197 }
198 }
199
200
201 int VirtualFrame::InvalidateFrameSlotAt(int index) {
202 FrameElement original = elements_[index];
203
204 // Is this element the backing store of any copies?
205 int new_backing_index = kIllegalIndex;
206 if (original.is_copied()) {
207 // Verify it is copied, and find first copy.
208 for (int i = index + 1; i < element_count(); i++) {
209 if (elements_[i].is_copy() && elements_[i].index() == index) {
210 new_backing_index = i;
211 break;
212 }
213 }
214 }
215
216 if (new_backing_index == kIllegalIndex) {
217 // No copies found, return kIllegalIndex.
218 if (original.is_register()) {
219 Unuse(original.reg());
220 }
221 elements_[index] = FrameElement::InvalidElement();
222 return kIllegalIndex;
223 }
224
225 // This is the backing store of copies.
226 Register backing_reg;
227 if (original.is_memory()) {
228 Result fresh = cgen()->allocator()->Allocate();
229 ASSERT(fresh.is_valid());
230 Use(fresh.reg(), new_backing_index);
231 backing_reg = fresh.reg();
232 __ movq(backing_reg, Operand(rbp, fp_relative(index)));
233 } else {
234 // The original was in a register.
235 backing_reg = original.reg();
236 set_register_location(backing_reg, new_backing_index);
237 }
238 // Invalidate the element at index.
239 elements_[index] = FrameElement::InvalidElement();
240 // Set the new backing element.
241 if (elements_[new_backing_index].is_synced()) {
242 elements_[new_backing_index] =
243 FrameElement::RegisterElement(backing_reg, FrameElement::SYNCED);
244 } else {
245 elements_[new_backing_index] =
246 FrameElement::RegisterElement(backing_reg, FrameElement::NOT_SYNCED);
247 }
248 // Update the other copies.
249 for (int i = new_backing_index + 1; i < element_count(); i++) {
250 if (elements_[i].is_copy() && elements_[i].index() == index) {
251 elements_[i].set_index(new_backing_index);
252 elements_[new_backing_index].set_copied();
253 }
254 }
255 return new_backing_index;
256 }
257
258
259 void VirtualFrame::StoreToFrameSlotAt(int index) {
260 // Store the value on top of the frame to the virtual frame slot at
261 // a given index. The value on top of the frame is left in place.
262 // This is a duplicating operation, so it can create copies.
263 ASSERT(index >= 0);
264 ASSERT(index < element_count());
265
266 int top_index = element_count() - 1;
267 FrameElement top = elements_[top_index];
268 FrameElement original = elements_[index];
269 if (top.is_copy() && top.index() == index) return;
270 ASSERT(top.is_valid());
271
272 InvalidateFrameSlotAt(index);
273
274 // InvalidateFrameSlotAt can potentially change any frame element, due
275 // to spilling registers to allocate temporaries in order to preserve
276 // the copy-on-write semantics of aliased elements. Reload top from
277 // the frame.
278 top = elements_[top_index];
279
280 if (top.is_copy()) {
281 // There are two cases based on the relative positions of the
282 // stored-to slot and the backing slot of the top element.
283 int backing_index = top.index();
284 ASSERT(backing_index != index);
285 if (backing_index < index) {
286 // 1. The top element is a copy of a slot below the stored-to
287 // slot. The stored-to slot becomes an unsynced copy of that
288 // same backing slot.
289 elements_[index] = CopyElementAt(backing_index);
290 } else {
291 // 2. The top element is a copy of a slot above the stored-to
292 // slot. The stored-to slot becomes the new (unsynced) backing
293 // slot and both the top element and the element at the former
294 // backing slot become copies of it. The sync state of the top
295 // and former backing elements is preserved.
296 FrameElement backing_element = elements_[backing_index];
297 ASSERT(backing_element.is_memory() || backing_element.is_register());
298 if (backing_element.is_memory()) {
299 // Because sets of copies are canonicalized to be backed by
300 // their lowest frame element, and because memory frame
301 // elements are backed by the corresponding stack address, we
302 // have to move the actual value down in the stack.
303 //
304 // TODO(209): considering allocating the stored-to slot to the
305 // temp register. Alternatively, allow copies to appear in
306 // any order in the frame and lazily move the value down to
307 // the slot.
308 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index)));
309 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
310 } else {
311 set_register_location(backing_element.reg(), index);
312 if (backing_element.is_synced()) {
313 // If the element is a register, we will not actually move
314 // anything on the stack but only update the virtual frame
315 // element.
316 backing_element.clear_sync();
317 }
318 }
319 elements_[index] = backing_element;
320
321 // The old backing element becomes a copy of the new backing
322 // element.
323 FrameElement new_element = CopyElementAt(index);
324 elements_[backing_index] = new_element;
325 if (backing_element.is_synced()) {
326 elements_[backing_index].set_sync();
327 }
328
329 // All the copies of the old backing element (including the top
330 // element) become copies of the new backing element.
331 for (int i = backing_index + 1; i < element_count(); i++) {
332 if (elements_[i].is_copy() && elements_[i].index() == backing_index) {
333 elements_[i].set_index(index);
334 }
335 }
336 }
337 return;
338 }
339
340 // Move the top element to the stored-to slot and replace it (the
341 // top element) with a copy.
342 elements_[index] = top;
343 if (top.is_memory()) {
344 // TODO(209): consider allocating the stored-to slot to the temp
345 // register. Alternatively, allow copies to appear in any order
346 // in the frame and lazily move the value down to the slot.
347 FrameElement new_top = CopyElementAt(index);
348 new_top.set_sync();
349 elements_[top_index] = new_top;
350
351 // The sync state of the former top element is correct (synced).
352 // Emit code to move the value down in the frame.
353 __ movq(kScratchRegister, Operand(rsp, 0));
354 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister);
355 } else if (top.is_register()) {
356 set_register_location(top.reg(), index);
357 // The stored-to slot has the (unsynced) register reference and
358 // the top element becomes a copy. The sync state of the top is
359 // preserved.
360 FrameElement new_top = CopyElementAt(index);
361 if (top.is_synced()) {
362 new_top.set_sync();
363 elements_[index].clear_sync();
364 }
365 elements_[top_index] = new_top;
366 } else {
367 // The stored-to slot holds the same value as the top but
368 // unsynced. (We do not have copies of constants yet.)
369 ASSERT(top.is_constant());
370 elements_[index].clear_sync();
371 }
372 }
373
166 374
167 void VirtualFrame::MergeTo(VirtualFrame* a) { 375 void VirtualFrame::MergeTo(VirtualFrame* a) {
168 UNIMPLEMENTED(); 376 UNIMPLEMENTED();
169 } 377 }
170 378
379
171 Result VirtualFrame::Pop() { 380 Result VirtualFrame::Pop() {
172 UNIMPLEMENTED(); 381 FrameElement element = elements_.RemoveLast();
173 return Result(NULL); 382 int index = element_count();
174 } 383 ASSERT(element.is_valid());
384
385 bool pop_needed = (stack_pointer_ == index);
386 if (pop_needed) {
387 stack_pointer_--;
388 if (element.is_memory()) {
389 Result temp = cgen()->allocator()->Allocate();
390 ASSERT(temp.is_valid());
391 temp.set_static_type(element.static_type());
392 __ pop(temp.reg());
393 return temp;
394 }
395
396 __ addq(rsp, Immediate(kPointerSize));
397 }
398 ASSERT(!element.is_memory());
399
400 // The top element is a register, constant, or a copy. Unuse
401 // registers and follow copies to their backing store.
402 if (element.is_register()) {
403 Unuse(element.reg());
404 } else if (element.is_copy()) {
405 ASSERT(element.index() < index);
406 index = element.index();
407 element = elements_[index];
408 }
409 ASSERT(!element.is_copy());
410
411 // The element is memory, a register, or a constant.
412 if (element.is_memory()) {
413 // Memory elements could only be the backing store of a copy.
414 // Allocate the original to a register.
415 ASSERT(index <= stack_pointer_);
416 Result temp = cgen()->allocator()->Allocate();
417 ASSERT(temp.is_valid());
418 Use(temp.reg(), index);
419 FrameElement new_element =
420 FrameElement::RegisterElement(temp.reg(), FrameElement::SYNCED);
421 // Preserve the copy flag on the element.
422 if (element.is_copied()) new_element.set_copied();
423 new_element.set_static_type(element.static_type());
424 elements_[index] = new_element;
425 __ movq(temp.reg(), Operand(rbp, fp_relative(index)));
426 return Result(temp.reg(), element.static_type());
427 } else if (element.is_register()) {
428 return Result(element.reg(), element.static_type());
429 } else {
430 ASSERT(element.is_constant());
431 return Result(element.handle());
432 }
433 }
434
175 435
176 Result VirtualFrame::RawCallStub(CodeStub* a) { 436 Result VirtualFrame::RawCallStub(CodeStub* a) {
177 UNIMPLEMENTED(); 437 UNIMPLEMENTED();
178 return Result(NULL); 438 return Result(NULL);
179 } 439 }
180 440
181 void VirtualFrame::SyncElementBelowStackPointer(int a) { 441 void VirtualFrame::SyncElementBelowStackPointer(int a) {
182 UNIMPLEMENTED(); 442 UNIMPLEMENTED();
183 } 443 }
184 444
185 void VirtualFrame::SyncElementByPushing(int a) {
186 UNIMPLEMENTED();
187 }
188
189 void VirtualFrame::SyncRange(int a, int b) {
190 UNIMPLEMENTED();
191 }
192
193 445
446 void VirtualFrame::SyncElementByPushing(int index) {
447 // Sync an element of the frame that is just above the stack pointer
448 // by pushing it.
449 ASSERT(index == stack_pointer_ + 1);
450 stack_pointer_++;
451 FrameElement element = elements_[index];
452
453 switch (element.type()) {
454 case FrameElement::INVALID:
455 __ push(Immediate(Smi::FromInt(0)));
456 break;
457
458 case FrameElement::MEMORY:
459 // No memory elements exist above the stack pointer.
460 UNREACHABLE();
461 break;
462
463 case FrameElement::REGISTER:
464 __ push(element.reg());
465 break;
466
467 case FrameElement::CONSTANT:
468 if (element.handle()->IsSmi()) {
469 if (CodeGeneratorScope::Current()->IsUnsafeSmi(element.handle())) {
470 CodeGeneratorScope::Current()->LoadUnsafeSmi(kScratchRegister,
471 element.handle());
472 } else {
473 CodeGeneratorScope::Current()->masm()->
474 movq(kScratchRegister, element.handle(), RelocInfo::NONE);
475 }
476 } else {
477 CodeGeneratorScope::Current()->masm()->
478 movq(kScratchRegister,
479 element.handle(),
480 RelocInfo::EMBEDDED_OBJECT);
481 }
482 __ push(kScratchRegister);
483 break;
484
485 case FrameElement::COPY: {
486 int backing_index = element.index();
487 FrameElement backing = elements_[backing_index];
488 ASSERT(backing.is_memory() || backing.is_register());
489 if (backing.is_memory()) {
490 __ push(Operand(rbp, fp_relative(backing_index)));
491 } else {
492 __ push(backing.reg());
493 }
494 break;
495 }
496 }
497 elements_[index].set_sync();
498 }
499
500
501 // Clear the dirty bits for the range of elements in
502 // [min(stack_pointer_ + 1,begin), end].
503 void VirtualFrame::SyncRange(int begin, int end) {
504 ASSERT(begin >= 0);
505 ASSERT(end < element_count());
506 // Sync elements below the range if they have not been materialized
507 // on the stack.
508 int start = Min(begin, stack_pointer_ + 1);
509
510 // If positive we have to adjust the stack pointer.
511 int delta = end - stack_pointer_;
512 if (delta > 0) {
513 stack_pointer_ = end;
514 __ subq(rsp, Immediate(delta * kPointerSize));
515 }
516
517 for (int i = start; i <= end; i++) {
518 if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i);
519 }
520 }
521
522
194 #undef __ 523 #undef __
195 524
196 } } // namespace v8::internal 525 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/x64/register-allocator-x64.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698