| OLD | NEW |
| 1 // Copyright 2008 the V8 project authors. All rights reserved. | 1 // Copyright 2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 159 cgen_->allocator()->Use(reg); | 159 cgen_->allocator()->Use(reg); |
| 160 } | 160 } |
| 161 | 161 |
| 162 | 162 |
| 163 void VirtualFrame::Unuse(Register reg) { | 163 void VirtualFrame::Unuse(Register reg) { |
| 164 frame_registers_.Unuse(reg); | 164 frame_registers_.Unuse(reg); |
| 165 cgen_->allocator()->Unuse(reg); | 165 cgen_->allocator()->Unuse(reg); |
| 166 } | 166 } |
| 167 | 167 |
| 168 | 168 |
| 169 // Clear the dirty bit for the element at a given index. This requires | |
| 170 // writing dirty elements to the actual frame. We can only allocate space | |
| 171 // in the actual frame for the virtual element immediately above the stack | |
| 172 // pointer. | |
| 173 void VirtualFrame::SyncElementAt(int index) { | |
| 174 FrameElement element = elements_[index]; | |
| 175 | |
| 176 if (!element.is_synced()) { | |
| 177 if (index <= stack_pointer_) { | |
| 178 // Write elements below the stack pointer to their (already allocated) | |
| 179 // actual frame location. | |
| 180 if (element.is_constant()) { | |
| 181 __ Set(Operand(ebp, fp_relative(index)), Immediate(element.handle())); | |
| 182 } else { | |
| 183 ASSERT(element.is_register()); | |
| 184 __ mov(Operand(ebp, fp_relative(index)), element.reg()); | |
| 185 } | |
| 186 } else { | |
| 187 // Push elements above the stack pointer to allocate space and sync | |
| 188 // them. Space should have already been allocated in the actual frame | |
| 189 // for all the elements below this one. | |
| 190 ASSERT(index == stack_pointer_ + 1); | |
| 191 stack_pointer_++; | |
| 192 if (element.is_constant()) { | |
| 193 __ push(Immediate(element.handle())); | |
| 194 } else { | |
| 195 ASSERT(element.is_register()); | |
| 196 __ push(element.reg()); | |
| 197 } | |
| 198 } | |
| 199 | |
| 200 elements_[index].set_sync(); | |
| 201 } | |
| 202 } | |
| 203 | |
| 204 | |
| 205 // Spill any register if possible, making its reference count zero. | 169 // Spill any register if possible, making its reference count zero. |
| 206 Register VirtualFrame::SpillAnyRegister() { | 170 Register VirtualFrame::SpillAnyRegister() { |
| 207 // Find the leftmost (ordered by register code), least | 171 // Find the leftmost (ordered by register code), least |
| 208 // internally-referenced register whose internal reference count matches | 172 // internally-referenced register whose internal reference count matches |
| 209 // its external reference count (so that spilling it from the frame frees | 173 // its external reference count (so that spilling it from the frame frees |
| 210 // it for use). | 174 // it for use). |
| 211 int min_count = kMaxInt; | 175 int min_count = kMaxInt; |
| 212 int best_register_code = no_reg.code_; | 176 int best_register_code = no_reg.code_; |
| 213 | 177 |
| 214 for (int i = 0; i < RegisterFile::kNumRegisters; i++) { | 178 for (int i = 0; i < RegisterFile::kNumRegisters; i++) { |
| (...skipping 21 matching lines...) Expand all Loading... |
| 236 i++; | 200 i++; |
| 237 } | 201 } |
| 238 } | 202 } |
| 239 | 203 |
| 240 ASSERT(cgen_->allocator()->count(best_register_code) == 0); | 204 ASSERT(cgen_->allocator()->count(best_register_code) == 0); |
| 241 Register result = { best_register_code }; | 205 Register result = { best_register_code }; |
| 242 return result; | 206 return result; |
| 243 } | 207 } |
| 244 | 208 |
| 245 | 209 |
| 246 // Spill an element, making its type be MEMORY. If it is a register the | 210 // Spill an element, making its type be MEMORY. |
| 247 // reference count is not decremented, so it must be done externally. | 211 // Does not decrement usage counts, if element is a register. |
| 248 void VirtualFrame::RawSpillElementAt(int index) { | 212 void VirtualFrame::RawSpillElementAt(int index) { |
| 249 if (index > stack_pointer_ + 1) { | |
| 250 SyncRange(stack_pointer_ + 1, index); | |
| 251 } | |
| 252 SyncElementAt(index); | 213 SyncElementAt(index); |
| 253 // The element is now in memory. | 214 // The element is now in memory. |
| 254 elements_[index] = FrameElement::MemoryElement(); | 215 elements_[index] = FrameElement::MemoryElement(); |
| 255 } | 216 } |
| 256 | 217 |
| 257 | 218 |
| 258 // Make the type of the element at a given index be MEMORY. We can only | 219 // Make the type of the element at a given index be MEMORY. |
| 259 // allocate space in the actual frame for the virtual element immediately | |
| 260 // above the stack pointer. | |
| 261 void VirtualFrame::SpillElementAt(int index) { | 220 void VirtualFrame::SpillElementAt(int index) { |
| 262 if (elements_[index].is_register()) { | 221 if (elements_[index].is_register()) { |
| 263 Unuse(elements_[index].reg()); | 222 Unuse(elements_[index].reg()); |
| 264 } | 223 } |
| 265 RawSpillElementAt(index); | 224 RawSpillElementAt(index); |
| 266 } | 225 } |
| 267 | 226 |
| 268 | 227 |
| 228 // Clear the dirty bit for the element at a given index. |
| 229 // The element must be on the physical stack, or the first |
| 230 // element below the stack pointer (created by a single push). |
| 231 void VirtualFrame::RawSyncElementAt(int index) { |
| 232 FrameElement element = elements_[index]; |
| 233 |
| 234 if (!element.is_synced()) { |
| 235 if (index <= stack_pointer_) { |
| 236 // Write elements below the stack pointer to their (already allocated) |
| 237 // actual frame location. |
| 238 if (element.is_constant()) { |
| 239 __ Set(Operand(ebp, fp_relative(index)), Immediate(element.handle())); |
| 240 } else { |
| 241 ASSERT(element.is_register()); |
| 242 __ mov(Operand(ebp, fp_relative(index)), element.reg()); |
| 243 } |
| 244 } else { |
| 245 // Push elements above the stack pointer to allocate space and sync |
| 246 // them. Space should have already been allocated in the actual frame |
| 247 // for all the elements below this one. |
| 248 ASSERT(index == stack_pointer_ + 1); |
| 249 stack_pointer_++; |
| 250 if (element.is_constant()) { |
| 251 __ push(Immediate(element.handle())); |
| 252 } else { |
| 253 ASSERT(element.is_register()); |
| 254 __ push(element.reg()); |
| 255 } |
| 256 } |
| 257 |
| 258 elements_[index].set_sync(); |
| 259 } |
| 260 } |
| 261 |
| 262 |
| 269 // Clear the dirty bits for the range of elements in [begin, end). | 263 // Clear the dirty bits for the range of elements in [begin, end). |
| 270 void VirtualFrame::SyncRange(int begin, int end) { | 264 void VirtualFrame::SyncRange(int begin, int end) { |
| 271 ASSERT(begin >= 0); | 265 ASSERT(begin >= 0); |
| 272 ASSERT(end <= elements_.length()); | 266 ASSERT(end <= elements_.length()); |
| 273 for (int i = begin; i < end; i++) { | 267 for (int i = begin; i < end; i++) { |
| 274 SyncElementAt(i); | 268 RawSyncElementAt(i); |
| 275 } | 269 } |
| 276 } | 270 } |
| 277 | 271 |
| 278 | 272 |
| 273 // Clear the dirty bit for the element at a given index. |
| 274 void VirtualFrame::SyncElementAt(int index) { |
| 275 if (index > stack_pointer_ + 1) { |
| 276 SyncRange(stack_pointer_ + 1, index); |
| 277 } |
| 278 RawSyncElementAt(index); |
| 279 } |
| 280 |
| 281 |
| 279 // Make the type of all elements be MEMORY. | 282 // Make the type of all elements be MEMORY. |
| 280 void VirtualFrame::SpillAll() { | 283 void VirtualFrame::SpillAll() { |
| 281 for (int i = 0; i < elements_.length(); i++) { | 284 for (int i = 0; i < elements_.length(); i++) { |
| 282 SpillElementAt(i); | 285 SpillElementAt(i); |
| 283 } | 286 } |
| 284 } | 287 } |
| 285 | 288 |
| 286 | 289 |
| 287 void VirtualFrame::PrepareForCall(int frame_arg_count) { | 290 void VirtualFrame::PrepareForCall(int frame_arg_count) { |
| 288 ASSERT(height() >= frame_arg_count); | 291 ASSERT(height() >= frame_arg_count); |
| (...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 446 ASSERT(elements_.length() == expected->elements_.length()); | 449 ASSERT(elements_.length() == expected->elements_.length()); |
| 447 ASSERT(parameter_count_ == expected->parameter_count_); | 450 ASSERT(parameter_count_ == expected->parameter_count_); |
| 448 ASSERT(local_count_ == expected->local_count_); | 451 ASSERT(local_count_ == expected->local_count_); |
| 449 ASSERT(frame_pointer_ == expected->frame_pointer_); | 452 ASSERT(frame_pointer_ == expected->frame_pointer_); |
| 450 | 453 |
| 451 // Mergable frames have all elements in locations, either memory or | 454 // Mergable frames have all elements in locations, either memory or |
| 452 // register. We thus have a series of to-memory and to-register moves. | 455 // register. We thus have a series of to-memory and to-register moves. |
| 453 // First perform all to-memory moves, register-to-memory moves because | 456 // First perform all to-memory moves, register-to-memory moves because |
| 454 // they can free registers and constant-to-memory moves because they do | 457 // they can free registers and constant-to-memory moves because they do |
| 455 // not use registers. | 458 // not use registers. |
| 456 for (int i = 0; i < elements_.length(); i++) { | 459 MergeMoveRegistersToMemory(expected); |
| 457 FrameElement source = elements_[i]; | |
| 458 FrameElement target = expected->elements_[i]; | |
| 459 if (target.is_memory() && !source.is_memory()) { | |
| 460 ASSERT(source.is_register() || source.is_constant()); | |
| 461 SpillElementAt(i); | |
| 462 } | |
| 463 } | |
| 464 | |
| 465 MergeMoveRegistersToRegisters(expected); | 460 MergeMoveRegistersToRegisters(expected); |
| 466 | 461 MergeMoveMemoryToRegisters(expected); |
| 467 // Finally, constant-to-register and memory-to-register. We do these from | |
| 468 // the top down so we can use pop for memory-to-register moves above the | |
| 469 // expected stack pointer. | |
| 470 for (int i = elements_.length() - 1; i >= 0; i--) { | |
| 471 FrameElement source = elements_[i]; | |
| 472 FrameElement target = expected->elements_[i]; | |
| 473 if (target.is_register() && !source.is_register()) { | |
| 474 ASSERT(source.is_constant() || source.is_memory()); | |
| 475 if (source.is_memory()) { | |
| 476 ASSERT(i <= stack_pointer_); | |
| 477 if (i <= expected->stack_pointer_) { | |
| 478 // Elements below both stack pointers can just be moved. | |
| 479 __ mov(target.reg(), Operand(ebp, fp_relative(i))); | |
| 480 } else { | |
| 481 // Elements below the current stack pointer but above the expected | |
| 482 // one can be popped, bet first we may have to adjust the stack | |
| 483 // pointer downward. | |
| 484 if (stack_pointer_ > i + 1) { | |
| 485 #ifdef DEBUG | |
| 486 // In debug builds check to ensure this is safe. | |
| 487 for (int j = stack_pointer_; j > i; j--) { | |
| 488 ASSERT(!elements_[j].is_memory()); | |
| 489 } | |
| 490 #endif | |
| 491 stack_pointer_ = i + 1; | |
| 492 __ add(Operand(esp), | |
| 493 Immediate((stack_pointer_ - i) * kPointerSize)); | |
| 494 } | |
| 495 stack_pointer_--; | |
| 496 __ pop(target.reg()); | |
| 497 } | |
| 498 } else { | |
| 499 // Source is constant. | |
| 500 __ Set(target.reg(), Immediate(source.handle())); | |
| 501 if (target.is_synced()) { | |
| 502 if (i > stack_pointer_) { | |
| 503 SyncRange(stack_pointer_ + 1, i); | |
| 504 } | |
| 505 SyncElementAt(i); | |
| 506 } | |
| 507 } | |
| 508 Use(target.reg()); | |
| 509 elements_[i] = target; | |
| 510 } | |
| 511 } | |
| 512 | 462 |
| 513 // At this point, the frames should be identical. | 463 // At this point, the frames should be identical. |
| 514 ASSERT(stack_pointer_ == expected->stack_pointer_); | 464 ASSERT(stack_pointer_ == expected->stack_pointer_); |
| 515 #ifdef DEBUG | 465 #ifdef DEBUG |
| 516 for (int i = 0; i < elements_.length(); i++) { | 466 for (int i = 0; i < elements_.length(); i++) { |
| 517 FrameElement expect = expected->elements_[i]; | 467 FrameElement expect = expected->elements_[i]; |
| 518 if (expect.is_memory()) { | 468 if (expect.is_memory()) { |
| 519 ASSERT(elements_[i].is_memory()); | 469 ASSERT(elements_[i].is_memory()); |
| 520 ASSERT(elements_[i].is_synced() && expect.is_synced()); | 470 ASSERT(elements_[i].is_synced() && expect.is_synced()); |
| 521 } else if (expect.is_register()) { | 471 } else if (expect.is_register()) { |
| 522 ASSERT(elements_[i].is_register()); | 472 ASSERT(elements_[i].is_register()); |
| 523 ASSERT(elements_[i].reg().is(expect.reg())); | 473 ASSERT(elements_[i].reg().is(expect.reg())); |
| 524 ASSERT(elements_[i].is_synced() == expect.is_synced()); | 474 ASSERT(elements_[i].is_synced() == expect.is_synced()); |
| 525 } else { | 475 } else { |
| 526 ASSERT(expect.is_constant()); | 476 ASSERT(expect.is_constant()); |
| 527 ASSERT(elements_[i].is_constant()); | 477 ASSERT(elements_[i].is_constant()); |
| 528 ASSERT(elements_[i].handle().location() == | 478 ASSERT(elements_[i].handle().location() == |
| 529 expect.handle().location()); | 479 expect.handle().location()); |
| 530 ASSERT(elements_[i].is_synced() == expect.is_synced()); | 480 ASSERT(elements_[i].is_synced() == expect.is_synced()); |
| 531 } | 481 } |
| 532 } | 482 } |
| 533 #endif | 483 #endif |
| 534 } | 484 } |
| 535 | 485 |
| 536 | 486 |
| 487 void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame *expected) { |
| 488 for (int i = 0; i < elements_.length(); i++) { |
| 489 FrameElement source = elements_[i]; |
| 490 FrameElement target = expected->elements_[i]; |
| 491 if (target.is_memory() && !source.is_memory()) { |
| 492 ASSERT(source.is_register() || source.is_constant()); |
| 493 SpillElementAt(i); |
| 494 } |
| 495 } |
| 496 } |
| 497 |
| 498 |
| 537 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame *expected) { | 499 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame *expected) { |
| 538 int start = 0; | 500 int start = 0; |
| 539 int end = elements_.length() - 1; | 501 int end = elements_.length() - 1; |
| 540 bool any_moves_blocked; // Did we fail to make some moves this iteration? | 502 bool any_moves_blocked; // Did we fail to make some moves this iteration? |
| 541 bool should_break_cycles = false; | 503 bool should_break_cycles = false; |
| 542 bool any_moves_made; // Did we make any progress this iteration? | 504 bool any_moves_made; // Did we make any progress this iteration? |
| 543 do { | 505 do { |
| 544 any_moves_blocked = false; | 506 any_moves_blocked = false; |
| 545 any_moves_made = false; | 507 any_moves_made = false; |
| 546 int first_move_blocked = kIllegalIndex; | 508 int first_move_blocked = kIllegalIndex; |
| 547 int last_move_blocked = kIllegalIndex; | 509 int last_move_blocked = kIllegalIndex; |
| 548 for (int i = start; i <= end; i++) { | 510 for (int i = start; i <= end; i++) { |
| 549 FrameElement source = elements_[i]; | 511 FrameElement source = elements_[i]; |
| 550 FrameElement target = expected->elements_[i]; | 512 FrameElement target = expected->elements_[i]; |
| 551 if (source.is_register() && target.is_register()) { | 513 if (source.is_register() && target.is_register()) { |
| 552 if (target.reg().is(source.reg())) { | 514 if (target.reg().is(source.reg())) { |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 588 // Update control flags for next iteration. | 550 // Update control flags for next iteration. |
| 589 should_break_cycles = (any_moves_blocked && !any_moves_made); | 551 should_break_cycles = (any_moves_blocked && !any_moves_made); |
| 590 if (any_moves_blocked) { | 552 if (any_moves_blocked) { |
| 591 start = first_move_blocked; | 553 start = first_move_blocked; |
| 592 end = last_move_blocked; | 554 end = last_move_blocked; |
| 593 } | 555 } |
| 594 } while (any_moves_blocked); | 556 } while (any_moves_blocked); |
| 595 } | 557 } |
| 596 | 558 |
| 597 | 559 |
| 560 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame *expected) { |
| 561 // Finally, constant-to-register and memory-to-register. We do these from |
| 562 // the top down so we can use pop for memory-to-register moves above the |
| 563 // expected stack pointer. |
| 564 for (int i = elements_.length() - 1; i >= 0; i--) { |
| 565 FrameElement source = elements_[i]; |
| 566 FrameElement target = expected->elements_[i]; |
| 567 if (target.is_register() && !source.is_register()) { |
| 568 ASSERT(source.is_constant() || source.is_memory()); |
| 569 if (source.is_memory()) { |
| 570 ASSERT(i <= stack_pointer_); |
| 571 if (i <= expected->stack_pointer_) { |
| 572 // Elements below both stack pointers can just be moved. |
| 573 __ mov(target.reg(), Operand(ebp, fp_relative(i))); |
| 574 } else { |
| 575 // Elements below the current stack pointer but above the expected |
| 576 // one can be popped, but first we may have to adjust the stack |
| 577 // pointer downward. |
| 578 if (stack_pointer_ > i) { |
| 579 // Sync elements between i and stack pointer, and bring |
| 580 // stack pointer down to i. |
| 581 #ifdef DEBUG |
| 582 // In debug builds check to ensure this is safe. |
| 583 for (int j = stack_pointer_; j > i; j--) { |
| 584 ASSERT(!elements_[j].is_memory()); |
| 585 } |
| 586 #endif |
| 587 __ add(Operand(esp), |
| 588 Immediate((stack_pointer_ - i) * kPointerSize)); |
| 589 stack_pointer_ = i; |
| 590 } |
| 591 stack_pointer_--; |
| 592 __ pop(target.reg()); |
| 593 } |
| 594 } else { |
| 595 // Source is constant. |
| 596 __ Set(target.reg(), Immediate(source.handle())); |
| 597 if (target.is_synced()) { |
| 598 SyncElementAt(i); |
| 599 } |
| 600 } |
| 601 Use(target.reg()); |
| 602 elements_[i] = target; |
| 603 } |
| 604 } |
| 605 } |
| 606 |
| 607 |
| 598 void VirtualFrame::DetachFromCodeGenerator() { | 608 void VirtualFrame::DetachFromCodeGenerator() { |
| 599 // Tell the global register allocator that it is free to reallocate all | 609 // Tell the global register allocator that it is free to reallocate all |
| 600 // register references contained in this frame. The frame elements remain | 610 // register references contained in this frame. The frame elements remain |
| 601 // register references, so the frame-internal reference count is not | 611 // register references, so the frame-internal reference count is not |
| 602 // decremented. | 612 // decremented. |
| 603 for (int i = 0; i < elements_.length(); i++) { | 613 for (int i = 0; i < elements_.length(); i++) { |
| 604 if (elements_[i].is_register()) { | 614 if (elements_[i].is_register()) { |
| 605 cgen_->allocator()->Unuse(elements_[i].reg()); | 615 cgen_->allocator()->Unuse(elements_[i].reg()); |
| 606 } | 616 } |
| 607 } | 617 } |
| (...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 932 return false; | 942 return false; |
| 933 } | 943 } |
| 934 } | 944 } |
| 935 return true; | 945 return true; |
| 936 } | 946 } |
| 937 #endif | 947 #endif |
| 938 | 948 |
| 939 #undef __ | 949 #undef __ |
| 940 | 950 |
| 941 } } // namespace v8::internal | 951 } } // namespace v8::internal |
| OLD | NEW |