| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 29 matching lines...) Expand all Loading... |
| 40 // VirtualFrame implementation. | 40 // VirtualFrame implementation. |
| 41 | 41 |
| 42 // On entry to a function, the virtual frame already contains the receiver, | 42 // On entry to a function, the virtual frame already contains the receiver, |
| 43 // the parameters, and a return address. All frame elements are in memory. | 43 // the parameters, and a return address. All frame elements are in memory. |
| 44 VirtualFrame::VirtualFrame() | 44 VirtualFrame::VirtualFrame() |
| 45 : elements_(parameter_count() + local_count() + kPreallocatedElements), | 45 : elements_(parameter_count() + local_count() + kPreallocatedElements), |
| 46 stack_pointer_(parameter_count() + 1) { // 0-based index of TOS. | 46 stack_pointer_(parameter_count() + 1) { // 0-based index of TOS. |
| 47 for (int i = 0; i <= stack_pointer_; i++) { | 47 for (int i = 0; i <= stack_pointer_; i++) { |
| 48 elements_.Add(FrameElement::MemoryElement()); | 48 elements_.Add(FrameElement::MemoryElement()); |
| 49 } | 49 } |
| 50 for (int i = 0; i < kNumRegisters; i++) { | 50 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { |
| 51 register_locations_[i] = kIllegalIndex; | 51 register_locations_[i] = kIllegalIndex; |
| 52 } | 52 } |
| 53 } | 53 } |
| 54 | 54 |
| 55 | 55 |
| 56 void VirtualFrame::SyncElementBelowStackPointer(int index) { | 56 void VirtualFrame::SyncElementBelowStackPointer(int index) { |
| 57 // Emit code to write elements below the stack pointer to their | 57 // Emit code to write elements below the stack pointer to their |
| 58 // (already allocated) stack address. | 58 // (already allocated) stack address. |
| 59 ASSERT(index <= stack_pointer_); | 59 ASSERT(index <= stack_pointer_); |
| 60 FrameElement element = elements_[index]; | 60 FrameElement element = elements_[index]; |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 149 } | 149 } |
| 150 } | 150 } |
| 151 elements_[index].set_sync(); | 151 elements_[index].set_sync(); |
| 152 } | 152 } |
| 153 | 153 |
| 154 | 154 |
| 155 // Clear the dirty bits for the range of elements in | 155 // Clear the dirty bits for the range of elements in |
| 156 // [min(stack_pointer_ + 1,begin), end]. | 156 // [min(stack_pointer_ + 1,begin), end]. |
| 157 void VirtualFrame::SyncRange(int begin, int end) { | 157 void VirtualFrame::SyncRange(int begin, int end) { |
| 158 ASSERT(begin >= 0); | 158 ASSERT(begin >= 0); |
| 159 ASSERT(end < elements_.length()); | 159 ASSERT(end < element_count()); |
| 160 // Sync elements below the range if they have not been materialized | 160 // Sync elements below the range if they have not been materialized |
| 161 // on the stack. | 161 // on the stack. |
| 162 int start = Min(begin, stack_pointer_ + 1); | 162 int start = Min(begin, stack_pointer_ + 1); |
| 163 | 163 |
| 164 // If positive we have to adjust the stack pointer. | 164 // If positive we have to adjust the stack pointer. |
| 165 int delta = end - stack_pointer_; | 165 int delta = end - stack_pointer_; |
| 166 if (delta > 0) { | 166 if (delta > 0) { |
| 167 stack_pointer_ = end; | 167 stack_pointer_ = end; |
| 168 __ sub(Operand(esp), Immediate(delta * kPointerSize)); | 168 __ sub(Operand(esp), Immediate(delta * kPointerSize)); |
| 169 } | 169 } |
| 170 | 170 |
| 171 for (int i = start; i <= end; i++) { | 171 for (int i = start; i <= end; i++) { |
| 172 if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i); | 172 if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i); |
| 173 } | 173 } |
| 174 } | 174 } |
| 175 | 175 |
| 176 | 176 |
| 177 void VirtualFrame::MakeMergable(int mergable_elements) { | 177 void VirtualFrame::MakeMergable(int mergable_elements) { |
| 178 if (mergable_elements == JumpTarget::kAllElements) { | 178 if (mergable_elements == JumpTarget::kAllElements) { |
| 179 mergable_elements = elements_.length(); | 179 mergable_elements = element_count(); |
| 180 } | 180 } |
| 181 ASSERT(mergable_elements <= elements_.length()); | 181 ASSERT(mergable_elements <= element_count()); |
| 182 | 182 |
| 183 int start_index = elements_.length() - mergable_elements; | 183 int start_index = element_count() - mergable_elements; |
| 184 for (int i = start_index; i < elements_.length(); i++) { | 184 for (int i = start_index; i < element_count(); i++) { |
| 185 FrameElement element = elements_[i]; | 185 FrameElement element = elements_[i]; |
| 186 | 186 |
| 187 if (element.is_constant() || element.is_copy()) { | 187 if (element.is_constant() || element.is_copy()) { |
| 188 if (element.is_synced()) { | 188 if (element.is_synced()) { |
| 189 // Just spill. | 189 // Just spill. |
| 190 elements_[i] = FrameElement::MemoryElement(); | 190 elements_[i] = FrameElement::MemoryElement(); |
| 191 } else { | 191 } else { |
| 192 // Allocate to a register. | 192 // Allocate to a register. |
| 193 FrameElement backing_element; // Invalid if not a copy. | 193 FrameElement backing_element; // Invalid if not a copy. |
| 194 if (element.is_copy()) { | 194 if (element.is_copy()) { |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 274 // Move registers, constants, and copies to memory. Perform moves | 274 // Move registers, constants, and copies to memory. Perform moves |
| 275 // from the top downward in the frame in order to leave the backing | 275 // from the top downward in the frame in order to leave the backing |
| 276 // stores of copies in registers. | 276 // stores of copies in registers. |
| 277 // | 277 // |
| 278 // Moving memory-backed copies to memory requires a spare register | 278 // Moving memory-backed copies to memory requires a spare register |
| 279 // for the memory-to-memory moves. Since we are performing a merge, | 279 // for the memory-to-memory moves. Since we are performing a merge, |
| 280 // we use esi (which is already saved in the frame). We keep track | 280 // we use esi (which is already saved in the frame). We keep track |
| 281 // of the index of the frame element esi is caching or kIllegalIndex | 281 // of the index of the frame element esi is caching or kIllegalIndex |
| 282 // if esi has not been disturbed. | 282 // if esi has not been disturbed. |
| 283 int esi_caches = kIllegalIndex; | 283 int esi_caches = kIllegalIndex; |
| 284 for (int i = elements_.length() - 1; i >= 0; i--) { | 284 for (int i = element_count() - 1; i >= 0; i--) { |
| 285 FrameElement target = expected->elements_[i]; | 285 FrameElement target = expected->elements_[i]; |
| 286 if (target.is_register()) continue; // Handle registers later. | 286 if (target.is_register()) continue; // Handle registers later. |
| 287 if (target.is_memory()) { | 287 if (target.is_memory()) { |
| 288 FrameElement source = elements_[i]; | 288 FrameElement source = elements_[i]; |
| 289 switch (source.type()) { | 289 switch (source.type()) { |
| 290 case FrameElement::INVALID: | 290 case FrameElement::INVALID: |
| 291 // Not a legal merge move. | 291 // Not a legal merge move. |
| 292 UNREACHABLE(); | 292 UNREACHABLE(); |
| 293 break; | 293 break; |
| 294 | 294 |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 340 if (esi_caches != kIllegalIndex) { | 340 if (esi_caches != kIllegalIndex) { |
| 341 __ mov(esi, Operand(ebp, fp_relative(context_index()))); | 341 __ mov(esi, Operand(ebp, fp_relative(context_index()))); |
| 342 } | 342 } |
| 343 } | 343 } |
| 344 | 344 |
| 345 | 345 |
| 346 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) { | 346 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) { |
| 347 // We have already done X-to-memory moves. | 347 // We have already done X-to-memory moves. |
| 348 ASSERT(stack_pointer_ >= expected->stack_pointer_); | 348 ASSERT(stack_pointer_ >= expected->stack_pointer_); |
| 349 | 349 |
| 350 for (int i = 0; i < kNumRegisters; i++) { | 350 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { |
| 351 // Move the right value into register i if it is currently in a register. | 351 // Move the right value into register i if it is currently in a register. |
| 352 int index = expected->register_locations_[i]; | 352 int index = expected->register_location(i); |
| 353 int use_index = register_locations_[i]; | 353 int use_index = register_location(i); |
| 354 // Skip if register i is unused in the target or else if source is | 354 // Skip if register i is unused in the target or else if source is |
| 355 // not a register (this is not a register-to-register move). | 355 // not a register (this is not a register-to-register move). |
| 356 if (index == kIllegalIndex || !elements_[index].is_register()) continue; | 356 if (index == kIllegalIndex || !elements_[index].is_register()) continue; |
| 357 | 357 |
| 358 Register target = { i }; | 358 Register target = RegisterAllocator::ToRegister(i); |
| 359 Register source = elements_[index].reg(); | 359 Register source = elements_[index].reg(); |
| 360 | |
| 361 if (index != use_index) { | 360 if (index != use_index) { |
| 362 if (use_index == kIllegalIndex) { // Target is currently unused. | 361 if (use_index == kIllegalIndex) { // Target is currently unused. |
| 363 // Copy contents of source from source to target. | 362 // Copy contents of source from source to target. |
| 364 // Set frame element register to target. | 363 // Set frame element register to target. |
| 365 Use(target, index); | 364 Use(target, index); |
| 366 Unuse(source); | 365 Unuse(source); |
| 367 __ mov(target, source); | 366 __ mov(target, source); |
| 368 } else { | 367 } else { |
| 369 // Exchange contents of registers source and target. | 368 // Exchange contents of registers source and target. |
| 370 // Nothing except the register backing use_index has changed. | 369 // Nothing except the register backing use_index has changed. |
| 371 elements_[use_index].set_reg(source); | 370 elements_[use_index].set_reg(source); |
| 372 register_locations_[target.code()] = index; | 371 set_register_location(target, index); |
| 373 register_locations_[source.code()] = use_index; | 372 set_register_location(source, use_index); |
| 374 __ xchg(source, target); | 373 __ xchg(source, target); |
| 375 } | 374 } |
| 376 } | 375 } |
| 377 | 376 |
| 378 if (!elements_[index].is_synced() && | 377 if (!elements_[index].is_synced() && |
| 379 expected->elements_[index].is_synced()) { | 378 expected->elements_[index].is_synced()) { |
| 380 __ mov(Operand(ebp, fp_relative(index)), target); | 379 __ mov(Operand(ebp, fp_relative(index)), target); |
| 381 } | 380 } |
| 382 elements_[index] = expected->elements_[index]; | 381 elements_[index] = expected->elements_[index]; |
| 383 } | 382 } |
| 384 } | 383 } |
| 385 | 384 |
| 386 | 385 |
| 387 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) { | 386 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) { |
| 388 // Move memory, constants, and copies to registers. This is the | 387 // Move memory, constants, and copies to registers. This is the |
| 389 // final step and since it is not done from the bottom up, but in | 388 // final step and since it is not done from the bottom up, but in |
| 390 // register code order, we have special code to ensure that the backing | 389 // register code order, we have special code to ensure that the backing |
| 391 // elements of copies are in their correct locations when we | 390 // elements of copies are in their correct locations when we |
| 392 // encounter the copies. | 391 // encounter the copies. |
| 393 for (int i = 0; i < kNumRegisters; i++) { | 392 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { |
| 394 int index = expected->register_locations_[i]; | 393 int index = expected->register_location(i); |
| 395 if (index != kIllegalIndex) { | 394 if (index != kIllegalIndex) { |
| 396 FrameElement source = elements_[index]; | 395 FrameElement source = elements_[index]; |
| 397 FrameElement target = expected->elements_[index]; | 396 FrameElement target = expected->elements_[index]; |
| 398 Register target_reg = { i }; | 397 Register target_reg = RegisterAllocator::ToRegister(i); |
| 399 ASSERT(target.reg().is(target_reg)); | 398 ASSERT(target.reg().is(target_reg)); |
| 400 switch (source.type()) { | 399 switch (source.type()) { |
| 401 case FrameElement::INVALID: // Fall through. | 400 case FrameElement::INVALID: // Fall through. |
| 402 UNREACHABLE(); | 401 UNREACHABLE(); |
| 403 break; | 402 break; |
| 404 case FrameElement::REGISTER: | 403 case FrameElement::REGISTER: |
| 405 ASSERT(source.reg().is(target_reg)); | 404 ASSERT(source.Equals(target)); |
| 406 continue; // Go to next iteration. Skips Use(target_reg) below. | 405 // Go to next iteration. Skips Use(target_reg) and syncing |
| 406 // below. It is safe to skip syncing because a target |
| 407 // register frame element would only be synced if all source |
| 408 // elements were. |
| 409 continue; |
| 407 break; | 410 break; |
| 408 case FrameElement::MEMORY: | 411 case FrameElement::MEMORY: |
| 409 ASSERT(index <= stack_pointer_); | 412 ASSERT(index <= stack_pointer_); |
| 410 __ mov(target_reg, Operand(ebp, fp_relative(index))); | 413 __ mov(target_reg, Operand(ebp, fp_relative(index))); |
| 411 break; | 414 break; |
| 412 | 415 |
| 413 case FrameElement::CONSTANT: | 416 case FrameElement::CONSTANT: |
| 414 if (cgen()->IsUnsafeSmi(source.handle())) { | 417 if (cgen()->IsUnsafeSmi(source.handle())) { |
| 415 cgen()->LoadUnsafeSmi(target_reg, source.handle()); | 418 cgen()->LoadUnsafeSmi(target_reg, source.handle()); |
| 416 } else { | 419 } else { |
| (...skipping 19 matching lines...) Expand all Loading... |
| 436 Operand(ebp, fp_relative(backing_index))); | 439 Operand(ebp, fp_relative(backing_index))); |
| 437 __ mov(target_reg, new_backing_reg); | 440 __ mov(target_reg, new_backing_reg); |
| 438 } else { | 441 } else { |
| 439 __ mov(target_reg, Operand(ebp, fp_relative(backing_index))); | 442 __ mov(target_reg, Operand(ebp, fp_relative(backing_index))); |
| 440 } | 443 } |
| 441 } else { | 444 } else { |
| 442 __ mov(target_reg, backing.reg()); | 445 __ mov(target_reg, backing.reg()); |
| 443 } | 446 } |
| 444 } | 447 } |
| 445 } | 448 } |
| 446 // Ensure the proper sync state. If the source was memory no | 449 // Ensure the proper sync state. |
| 447 // code needs to be emitted. | |
| 448 if (target.is_synced() && !source.is_synced()) { | 450 if (target.is_synced() && !source.is_synced()) { |
| 449 __ mov(Operand(ebp, fp_relative(index)), target_reg); | 451 __ mov(Operand(ebp, fp_relative(index)), target_reg); |
| 450 } | 452 } |
| 451 Use(target_reg, index); | 453 Use(target_reg, index); |
| 452 elements_[index] = target; | 454 elements_[index] = target; |
| 453 } | 455 } |
| 454 } | 456 } |
| 455 } | 457 } |
| 456 | 458 |
| 457 | 459 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 475 __ mov(ebp, Operand(esp)); | 477 __ mov(ebp, Operand(esp)); |
| 476 | 478 |
| 477 // Store the context in the frame. The context is kept in esi and a | 479 // Store the context in the frame. The context is kept in esi and a |
| 478 // copy is stored in the frame. The external reference to esi | 480 // copy is stored in the frame. The external reference to esi |
| 479 // remains. | 481 // remains. |
| 480 EmitPush(esi); | 482 EmitPush(esi); |
| 481 | 483 |
| 482 // Store the function in the frame. The frame owns the register | 484 // Store the function in the frame. The frame owns the register |
| 483 // reference now (ie, it can keep it in edi or spill it later). | 485 // reference now (ie, it can keep it in edi or spill it later). |
| 484 Push(edi); | 486 Push(edi); |
| 485 SyncElementAt(elements_.length() - 1); | 487 SyncElementAt(element_count() - 1); |
| 486 cgen()->allocator()->Unuse(edi); | 488 cgen()->allocator()->Unuse(edi); |
| 487 } | 489 } |
| 488 | 490 |
| 489 | 491 |
| 490 void VirtualFrame::Exit() { | 492 void VirtualFrame::Exit() { |
| 491 Comment cmnt(masm(), "[ Exit JS frame"); | 493 Comment cmnt(masm(), "[ Exit JS frame"); |
| 492 // Record the location of the JS exit code for patching when setting | 494 // Record the location of the JS exit code for patching when setting |
| 493 // break point. | 495 // break point. |
| 494 __ RecordJSReturn(); | 496 __ RecordJSReturn(); |
| 495 | 497 |
| 496 // Avoid using the leave instruction here, because it is too | 498 // Avoid using the leave instruction here, because it is too |
| 497 // short. We need the return sequence to be a least the size of a | 499 // short. We need the return sequence to be a least the size of a |
| 498 // call instruction to support patching the exit code in the | 500 // call instruction to support patching the exit code in the |
| 499 // debugger. See VisitReturnStatement for the full return sequence. | 501 // debugger. See VisitReturnStatement for the full return sequence. |
| 500 __ mov(esp, Operand(ebp)); | 502 __ mov(esp, Operand(ebp)); |
| 501 stack_pointer_ = frame_pointer(); | 503 stack_pointer_ = frame_pointer(); |
| 502 for (int i = elements_.length() - 1; i > stack_pointer_; i--) { | 504 for (int i = element_count() - 1; i > stack_pointer_; i--) { |
| 503 FrameElement last = elements_.RemoveLast(); | 505 FrameElement last = elements_.RemoveLast(); |
| 504 if (last.is_register()) { | 506 if (last.is_register()) { |
| 505 Unuse(last.reg()); | 507 Unuse(last.reg()); |
| 506 } | 508 } |
| 507 } | 509 } |
| 508 | 510 |
| 509 EmitPop(ebp); | 511 EmitPop(ebp); |
| 510 } | 512 } |
| 511 | 513 |
| 512 | 514 |
| 513 void VirtualFrame::AllocateStackSlots() { | 515 void VirtualFrame::AllocateStackSlots() { |
| 514 int count = local_count(); | 516 int count = local_count(); |
| 515 if (count > 0) { | 517 if (count > 0) { |
| 516 Comment cmnt(masm(), "[ Allocate space for locals"); | 518 Comment cmnt(masm(), "[ Allocate space for locals"); |
| 517 // The locals are initialized to a constant (the undefined value), but | 519 // The locals are initialized to a constant (the undefined value), but |
| 518 // we sync them with the actual frame to allocate space for spilling | 520 // we sync them with the actual frame to allocate space for spilling |
| 519 // them later. First sync everything above the stack pointer so we can | 521 // them later. First sync everything above the stack pointer so we can |
| 520 // use pushes to allocate and initialize the locals. | 522 // use pushes to allocate and initialize the locals. |
| 521 SyncRange(stack_pointer_ + 1, elements_.length() - 1); | 523 SyncRange(stack_pointer_ + 1, element_count() - 1); |
| 522 Handle<Object> undefined = Factory::undefined_value(); | 524 Handle<Object> undefined = Factory::undefined_value(); |
| 523 FrameElement initial_value = | 525 FrameElement initial_value = |
| 524 FrameElement::ConstantElement(undefined, FrameElement::SYNCED); | 526 FrameElement::ConstantElement(undefined, FrameElement::SYNCED); |
| 525 Result temp = cgen()->allocator()->Allocate(); | 527 Result temp = cgen()->allocator()->Allocate(); |
| 526 ASSERT(temp.is_valid()); | 528 ASSERT(temp.is_valid()); |
| 527 __ Set(temp.reg(), Immediate(undefined)); | 529 __ Set(temp.reg(), Immediate(undefined)); |
| 528 for (int i = 0; i < count; i++) { | 530 for (int i = 0; i < count; i++) { |
| 529 elements_.Add(initial_value); | 531 elements_.Add(initial_value); |
| 530 stack_pointer_++; | 532 stack_pointer_++; |
| 531 __ push(temp.reg()); | 533 __ push(temp.reg()); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 554 } | 556 } |
| 555 | 557 |
| 556 | 558 |
| 557 int VirtualFrame::InvalidateFrameSlotAt(int index) { | 559 int VirtualFrame::InvalidateFrameSlotAt(int index) { |
| 558 FrameElement original = elements_[index]; | 560 FrameElement original = elements_[index]; |
| 559 | 561 |
| 560 // Is this element the backing store of any copies? | 562 // Is this element the backing store of any copies? |
| 561 int new_backing_index = kIllegalIndex; | 563 int new_backing_index = kIllegalIndex; |
| 562 if (original.is_copied()) { | 564 if (original.is_copied()) { |
| 563 // Verify it is copied, and find first copy. | 565 // Verify it is copied, and find first copy. |
| 564 for (int i = index + 1; i < elements_.length(); i++) { | 566 for (int i = index + 1; i < element_count(); i++) { |
| 565 if (elements_[i].is_copy() && elements_[i].index() == index) { | 567 if (elements_[i].is_copy() && elements_[i].index() == index) { |
| 566 new_backing_index = i; | 568 new_backing_index = i; |
| 567 break; | 569 break; |
| 568 } | 570 } |
| 569 } | 571 } |
| 570 } | 572 } |
| 571 | 573 |
| 572 if (new_backing_index == kIllegalIndex) { | 574 if (new_backing_index == kIllegalIndex) { |
| 573 // No copies found, return kIllegalIndex. | 575 // No copies found, return kIllegalIndex. |
| 574 if (original.is_register()) { | 576 if (original.is_register()) { |
| 575 Unuse(original.reg()); | 577 Unuse(original.reg()); |
| 576 } | 578 } |
| 577 elements_[index] = FrameElement::InvalidElement(); | 579 elements_[index] = FrameElement::InvalidElement(); |
| 578 return kIllegalIndex; | 580 return kIllegalIndex; |
| 579 } | 581 } |
| 580 | 582 |
| 581 // This is the backing store of copies. | 583 // This is the backing store of copies. |
| 582 Register backing_reg; | 584 Register backing_reg; |
| 583 if (original.is_memory()) { | 585 if (original.is_memory()) { |
| 584 Result fresh = cgen()->allocator()->Allocate(); | 586 Result fresh = cgen()->allocator()->Allocate(); |
| 585 ASSERT(fresh.is_valid()); | 587 ASSERT(fresh.is_valid()); |
| 586 Use(fresh.reg(), new_backing_index); | 588 Use(fresh.reg(), new_backing_index); |
| 587 backing_reg = fresh.reg(); | 589 backing_reg = fresh.reg(); |
| 588 __ mov(backing_reg, Operand(ebp, fp_relative(index))); | 590 __ mov(backing_reg, Operand(ebp, fp_relative(index))); |
| 589 } else { | 591 } else { |
| 590 // The original was in a register. | 592 // The original was in a register. |
| 591 backing_reg = original.reg(); | 593 backing_reg = original.reg(); |
| 592 register_locations_[backing_reg.code()] = new_backing_index; | 594 set_register_location(backing_reg, new_backing_index); |
| 593 } | 595 } |
| 594 // Invalidate the element at index. | 596 // Invalidate the element at index. |
| 595 elements_[index] = FrameElement::InvalidElement(); | 597 elements_[index] = FrameElement::InvalidElement(); |
| 596 // Set the new backing element. | 598 // Set the new backing element. |
| 597 if (elements_[new_backing_index].is_synced()) { | 599 if (elements_[new_backing_index].is_synced()) { |
| 598 elements_[new_backing_index] = | 600 elements_[new_backing_index] = |
| 599 FrameElement::RegisterElement(backing_reg, FrameElement::SYNCED); | 601 FrameElement::RegisterElement(backing_reg, FrameElement::SYNCED); |
| 600 } else { | 602 } else { |
| 601 elements_[new_backing_index] = | 603 elements_[new_backing_index] = |
| 602 FrameElement::RegisterElement(backing_reg, FrameElement::NOT_SYNCED); | 604 FrameElement::RegisterElement(backing_reg, FrameElement::NOT_SYNCED); |
| 603 } | 605 } |
| 604 // Update the other copies. | 606 // Update the other copies. |
| 605 for (int i = new_backing_index + 1; i < elements_.length(); i++) { | 607 for (int i = new_backing_index + 1; i < element_count(); i++) { |
| 606 if (elements_[i].is_copy() && elements_[i].index() == index) { | 608 if (elements_[i].is_copy() && elements_[i].index() == index) { |
| 607 elements_[i].set_index(new_backing_index); | 609 elements_[i].set_index(new_backing_index); |
| 608 elements_[new_backing_index].set_copied(); | 610 elements_[new_backing_index].set_copied(); |
| 609 } | 611 } |
| 610 } | 612 } |
| 611 return new_backing_index; | 613 return new_backing_index; |
| 612 } | 614 } |
| 613 | 615 |
| 614 | 616 |
| 615 void VirtualFrame::TakeFrameSlotAt(int index) { | 617 void VirtualFrame::TakeFrameSlotAt(int index) { |
| 616 ASSERT(index >= 0); | 618 ASSERT(index >= 0); |
| 617 ASSERT(index <= elements_.length()); | 619 ASSERT(index <= element_count()); |
| 618 FrameElement original = elements_[index]; | 620 FrameElement original = elements_[index]; |
| 619 int new_backing_store_index = InvalidateFrameSlotAt(index); | 621 int new_backing_store_index = InvalidateFrameSlotAt(index); |
| 620 if (new_backing_store_index != kIllegalIndex) { | 622 if (new_backing_store_index != kIllegalIndex) { |
| 621 elements_.Add(CopyElementAt(new_backing_store_index)); | 623 elements_.Add(CopyElementAt(new_backing_store_index)); |
| 622 return; | 624 return; |
| 623 } | 625 } |
| 624 | 626 |
| 625 switch (original.type()) { | 627 switch (original.type()) { |
| 626 case FrameElement::MEMORY: { | 628 case FrameElement::MEMORY: { |
| 627 // Emit code to load the original element's data into a register. | 629 // Emit code to load the original element's data into a register. |
| 628 // Push that register as a FrameElement on top of the frame. | 630 // Push that register as a FrameElement on top of the frame. |
| 629 Result fresh = cgen()->allocator()->Allocate(); | 631 Result fresh = cgen()->allocator()->Allocate(); |
| 630 ASSERT(fresh.is_valid()); | 632 ASSERT(fresh.is_valid()); |
| 631 FrameElement new_element = | 633 FrameElement new_element = |
| 632 FrameElement::RegisterElement(fresh.reg(), | 634 FrameElement::RegisterElement(fresh.reg(), |
| 633 FrameElement::NOT_SYNCED); | 635 FrameElement::NOT_SYNCED); |
| 634 Use(fresh.reg(), elements_.length()); | 636 Use(fresh.reg(), element_count()); |
| 635 elements_.Add(new_element); | 637 elements_.Add(new_element); |
| 636 __ mov(fresh.reg(), Operand(ebp, fp_relative(index))); | 638 __ mov(fresh.reg(), Operand(ebp, fp_relative(index))); |
| 637 break; | 639 break; |
| 638 } | 640 } |
| 639 case FrameElement::REGISTER: | 641 case FrameElement::REGISTER: |
| 640 Use(original.reg(), elements_.length()); | 642 Use(original.reg(), element_count()); |
| 641 // Fall through. | 643 // Fall through. |
| 642 case FrameElement::CONSTANT: | 644 case FrameElement::CONSTANT: |
| 643 case FrameElement::COPY: | 645 case FrameElement::COPY: |
| 644 original.clear_sync(); | 646 original.clear_sync(); |
| 645 elements_.Add(original); | 647 elements_.Add(original); |
| 646 break; | 648 break; |
| 647 case FrameElement::INVALID: | 649 case FrameElement::INVALID: |
| 648 UNREACHABLE(); | 650 UNREACHABLE(); |
| 649 break; | 651 break; |
| 650 } | 652 } |
| 651 } | 653 } |
| 652 | 654 |
| 653 | 655 |
| 654 void VirtualFrame::StoreToFrameSlotAt(int index) { | 656 void VirtualFrame::StoreToFrameSlotAt(int index) { |
| 655 // Store the value on top of the frame to the virtual frame slot at | 657 // Store the value on top of the frame to the virtual frame slot at |
| 656 // a given index. The value on top of the frame is left in place. | 658 // a given index. The value on top of the frame is left in place. |
| 657 // This is a duplicating operation, so it can create copies. | 659 // This is a duplicating operation, so it can create copies. |
| 658 ASSERT(index >= 0); | 660 ASSERT(index >= 0); |
| 659 ASSERT(index < elements_.length()); | 661 ASSERT(index < element_count()); |
| 660 | 662 |
| 661 int top_index = elements_.length() - 1; | 663 int top_index = element_count() - 1; |
| 662 FrameElement top = elements_[top_index]; | 664 FrameElement top = elements_[top_index]; |
| 663 FrameElement original = elements_[index]; | 665 FrameElement original = elements_[index]; |
| 664 if (top.is_copy() && top.index() == index) return; | 666 if (top.is_copy() && top.index() == index) return; |
| 665 ASSERT(top.is_valid()); | 667 ASSERT(top.is_valid()); |
| 666 | 668 |
| 667 InvalidateFrameSlotAt(index); | 669 InvalidateFrameSlotAt(index); |
| 668 | 670 |
| 669 // InvalidateFrameSlotAt can potentially change any frame element, due | 671 // InvalidateFrameSlotAt can potentially change any frame element, due |
| 670 // to spilling registers to allocate temporaries in order to preserve | 672 // to spilling registers to allocate temporaries in order to preserve |
| 671 // the copy-on-write semantics of aliased elements. Reload top from | 673 // the copy-on-write semantics of aliased elements. Reload top from |
| (...skipping 26 matching lines...) Expand all Loading... |
| 698 // | 700 // |
| 699 // TODO(209): considering allocating the stored-to slot to the | 701 // TODO(209): considering allocating the stored-to slot to the |
| 700 // temp register. Alternatively, allow copies to appear in | 702 // temp register. Alternatively, allow copies to appear in |
| 701 // any order in the frame and lazily move the value down to | 703 // any order in the frame and lazily move the value down to |
| 702 // the slot. | 704 // the slot. |
| 703 Result temp = cgen()->allocator()->Allocate(); | 705 Result temp = cgen()->allocator()->Allocate(); |
| 704 ASSERT(temp.is_valid()); | 706 ASSERT(temp.is_valid()); |
| 705 __ mov(temp.reg(), Operand(ebp, fp_relative(backing_index))); | 707 __ mov(temp.reg(), Operand(ebp, fp_relative(backing_index))); |
| 706 __ mov(Operand(ebp, fp_relative(index)), temp.reg()); | 708 __ mov(Operand(ebp, fp_relative(index)), temp.reg()); |
| 707 } else { | 709 } else { |
| 708 register_locations_[backing_element.reg().code()] = index; | 710 set_register_location(backing_element.reg(), index); |
| 709 if (backing_element.is_synced()) { | 711 if (backing_element.is_synced()) { |
| 710 // If the element is a register, we will not actually move | 712 // If the element is a register, we will not actually move |
| 711 // anything on the stack but only update the virtual frame | 713 // anything on the stack but only update the virtual frame |
| 712 // element. | 714 // element. |
| 713 backing_element.clear_sync(); | 715 backing_element.clear_sync(); |
| 714 } | 716 } |
| 715 } | 717 } |
| 716 elements_[index] = backing_element; | 718 elements_[index] = backing_element; |
| 717 | 719 |
| 718 // The old backing element becomes a copy of the new backing | 720 // The old backing element becomes a copy of the new backing |
| 719 // element. | 721 // element. |
| 720 FrameElement new_element = CopyElementAt(index); | 722 FrameElement new_element = CopyElementAt(index); |
| 721 elements_[backing_index] = new_element; | 723 elements_[backing_index] = new_element; |
| 722 if (backing_element.is_synced()) { | 724 if (backing_element.is_synced()) { |
| 723 elements_[backing_index].set_sync(); | 725 elements_[backing_index].set_sync(); |
| 724 } | 726 } |
| 725 | 727 |
| 726 // All the copies of the old backing element (including the top | 728 // All the copies of the old backing element (including the top |
| 727 // element) become copies of the new backing element. | 729 // element) become copies of the new backing element. |
| 728 for (int i = backing_index + 1; i < elements_.length(); i++) { | 730 for (int i = backing_index + 1; i < element_count(); i++) { |
| 729 if (elements_[i].is_copy() && elements_[i].index() == backing_index) { | 731 if (elements_[i].is_copy() && elements_[i].index() == backing_index) { |
| 730 elements_[i].set_index(index); | 732 elements_[i].set_index(index); |
| 731 } | 733 } |
| 732 } | 734 } |
| 733 } | 735 } |
| 734 return; | 736 return; |
| 735 } | 737 } |
| 736 | 738 |
| 737 // Move the top element to the stored-to slot and replace it (the | 739 // Move the top element to the stored-to slot and replace it (the |
| 738 // top element) with a copy. | 740 // top element) with a copy. |
| 739 elements_[index] = top; | 741 elements_[index] = top; |
| 740 if (top.is_memory()) { | 742 if (top.is_memory()) { |
| 741 // TODO(209): consider allocating the stored-to slot to the temp | 743 // TODO(209): consider allocating the stored-to slot to the temp |
| 742 // register. Alternatively, allow copies to appear in any order | 744 // register. Alternatively, allow copies to appear in any order |
| 743 // in the frame and lazily move the value down to the slot. | 745 // in the frame and lazily move the value down to the slot. |
| 744 FrameElement new_top = CopyElementAt(index); | 746 FrameElement new_top = CopyElementAt(index); |
| 745 new_top.set_sync(); | 747 new_top.set_sync(); |
| 746 elements_[top_index] = new_top; | 748 elements_[top_index] = new_top; |
| 747 | 749 |
| 748 // The sync state of the former top element is correct (synced). | 750 // The sync state of the former top element is correct (synced). |
| 749 // Emit code to move the value down in the frame. | 751 // Emit code to move the value down in the frame. |
| 750 Result temp = cgen()->allocator()->Allocate(); | 752 Result temp = cgen()->allocator()->Allocate(); |
| 751 ASSERT(temp.is_valid()); | 753 ASSERT(temp.is_valid()); |
| 752 __ mov(temp.reg(), Operand(esp, 0)); | 754 __ mov(temp.reg(), Operand(esp, 0)); |
| 753 __ mov(Operand(ebp, fp_relative(index)), temp.reg()); | 755 __ mov(Operand(ebp, fp_relative(index)), temp.reg()); |
| 754 } else if (top.is_register()) { | 756 } else if (top.is_register()) { |
| 755 register_locations_[top.reg().code()] = index; | 757 set_register_location(top.reg(), index); |
| 756 // The stored-to slot has the (unsynced) register reference and | 758 // The stored-to slot has the (unsynced) register reference and |
| 757 // the top element becomes a copy. The sync state of the top is | 759 // the top element becomes a copy. The sync state of the top is |
| 758 // preserved. | 760 // preserved. |
| 759 FrameElement new_top = CopyElementAt(index); | 761 FrameElement new_top = CopyElementAt(index); |
| 760 if (top.is_synced()) { | 762 if (top.is_synced()) { |
| 761 new_top.set_sync(); | 763 new_top.set_sync(); |
| 762 elements_[index].clear_sync(); | 764 elements_[index].clear_sync(); |
| 763 } | 765 } |
| 764 elements_[top_index] = new_top; | 766 elements_[top_index] = new_top; |
| 765 } else { | 767 } else { |
| (...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 969 __ Set(num_args.reg(), Immediate(arg_count)); | 971 __ Set(num_args.reg(), Immediate(arg_count)); |
| 970 | 972 |
| 971 function.Unuse(); | 973 function.Unuse(); |
| 972 num_args.Unuse(); | 974 num_args.Unuse(); |
| 973 return RawCallCodeObject(ic, RelocInfo::CONSTRUCT_CALL); | 975 return RawCallCodeObject(ic, RelocInfo::CONSTRUCT_CALL); |
| 974 } | 976 } |
| 975 | 977 |
| 976 | 978 |
| 977 void VirtualFrame::Drop(int count) { | 979 void VirtualFrame::Drop(int count) { |
| 978 ASSERT(height() >= count); | 980 ASSERT(height() >= count); |
| 979 int num_virtual_elements = (elements_.length() - 1) - stack_pointer_; | 981 int num_virtual_elements = (element_count() - 1) - stack_pointer_; |
| 980 | 982 |
| 981 // Emit code to lower the stack pointer if necessary. | 983 // Emit code to lower the stack pointer if necessary. |
| 982 if (num_virtual_elements < count) { | 984 if (num_virtual_elements < count) { |
| 983 int num_dropped = count - num_virtual_elements; | 985 int num_dropped = count - num_virtual_elements; |
| 984 stack_pointer_ -= num_dropped; | 986 stack_pointer_ -= num_dropped; |
| 985 __ add(Operand(esp), Immediate(num_dropped * kPointerSize)); | 987 __ add(Operand(esp), Immediate(num_dropped * kPointerSize)); |
| 986 } | 988 } |
| 987 | 989 |
| 988 // Discard elements from the virtual frame and free any registers. | 990 // Discard elements from the virtual frame and free any registers. |
| 989 for (int i = 0; i < count; i++) { | 991 for (int i = 0; i < count; i++) { |
| 990 FrameElement dropped = elements_.RemoveLast(); | 992 FrameElement dropped = elements_.RemoveLast(); |
| 991 if (dropped.is_register()) { | 993 if (dropped.is_register()) { |
| 992 Unuse(dropped.reg()); | 994 Unuse(dropped.reg()); |
| 993 } | 995 } |
| 994 } | 996 } |
| 995 } | 997 } |
| 996 | 998 |
| 997 | 999 |
| 998 Result VirtualFrame::Pop() { | 1000 Result VirtualFrame::Pop() { |
| 999 FrameElement element = elements_.RemoveLast(); | 1001 FrameElement element = elements_.RemoveLast(); |
| 1000 int index = elements_.length(); | 1002 int index = element_count(); |
| 1001 ASSERT(element.is_valid()); | 1003 ASSERT(element.is_valid()); |
| 1002 | 1004 |
| 1003 bool pop_needed = (stack_pointer_ == index); | 1005 bool pop_needed = (stack_pointer_ == index); |
| 1004 if (pop_needed) { | 1006 if (pop_needed) { |
| 1005 stack_pointer_--; | 1007 stack_pointer_--; |
| 1006 if (element.is_memory()) { | 1008 if (element.is_memory()) { |
| 1007 Result temp = cgen()->allocator()->Allocate(); | 1009 Result temp = cgen()->allocator()->Allocate(); |
| 1008 ASSERT(temp.is_valid()); | 1010 ASSERT(temp.is_valid()); |
| 1009 temp.set_static_type(element.static_type()); | 1011 temp.set_static_type(element.static_type()); |
| 1010 __ pop(temp.reg()); | 1012 __ pop(temp.reg()); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1045 } else if (element.is_register()) { | 1047 } else if (element.is_register()) { |
| 1046 return Result(element.reg(), element.static_type()); | 1048 return Result(element.reg(), element.static_type()); |
| 1047 } else { | 1049 } else { |
| 1048 ASSERT(element.is_constant()); | 1050 ASSERT(element.is_constant()); |
| 1049 return Result(element.handle()); | 1051 return Result(element.handle()); |
| 1050 } | 1052 } |
| 1051 } | 1053 } |
| 1052 | 1054 |
| 1053 | 1055 |
| 1054 void VirtualFrame::EmitPop(Register reg) { | 1056 void VirtualFrame::EmitPop(Register reg) { |
| 1055 ASSERT(stack_pointer_ == elements_.length() - 1); | 1057 ASSERT(stack_pointer_ == element_count() - 1); |
| 1056 stack_pointer_--; | 1058 stack_pointer_--; |
| 1057 elements_.RemoveLast(); | 1059 elements_.RemoveLast(); |
| 1058 __ pop(reg); | 1060 __ pop(reg); |
| 1059 } | 1061 } |
| 1060 | 1062 |
| 1061 | 1063 |
| 1062 void VirtualFrame::EmitPop(Operand operand) { | 1064 void VirtualFrame::EmitPop(Operand operand) { |
| 1063 ASSERT(stack_pointer_ == elements_.length() - 1); | 1065 ASSERT(stack_pointer_ == element_count() - 1); |
| 1064 stack_pointer_--; | 1066 stack_pointer_--; |
| 1065 elements_.RemoveLast(); | 1067 elements_.RemoveLast(); |
| 1066 __ pop(operand); | 1068 __ pop(operand); |
| 1067 } | 1069 } |
| 1068 | 1070 |
| 1069 | 1071 |
| 1070 void VirtualFrame::EmitPush(Register reg) { | 1072 void VirtualFrame::EmitPush(Register reg) { |
| 1071 ASSERT(stack_pointer_ == elements_.length() - 1); | 1073 ASSERT(stack_pointer_ == element_count() - 1); |
| 1072 elements_.Add(FrameElement::MemoryElement()); | 1074 elements_.Add(FrameElement::MemoryElement()); |
| 1073 stack_pointer_++; | 1075 stack_pointer_++; |
| 1074 __ push(reg); | 1076 __ push(reg); |
| 1075 } | 1077 } |
| 1076 | 1078 |
| 1077 | 1079 |
| 1078 void VirtualFrame::EmitPush(Operand operand) { | 1080 void VirtualFrame::EmitPush(Operand operand) { |
| 1079 ASSERT(stack_pointer_ == elements_.length() - 1); | 1081 ASSERT(stack_pointer_ == element_count() - 1); |
| 1080 elements_.Add(FrameElement::MemoryElement()); | 1082 elements_.Add(FrameElement::MemoryElement()); |
| 1081 stack_pointer_++; | 1083 stack_pointer_++; |
| 1082 __ push(operand); | 1084 __ push(operand); |
| 1083 } | 1085 } |
| 1084 | 1086 |
| 1085 | 1087 |
| 1086 void VirtualFrame::EmitPush(Immediate immediate) { | 1088 void VirtualFrame::EmitPush(Immediate immediate) { |
| 1087 ASSERT(stack_pointer_ == elements_.length() - 1); | 1089 ASSERT(stack_pointer_ == element_count() - 1); |
| 1088 elements_.Add(FrameElement::MemoryElement()); | 1090 elements_.Add(FrameElement::MemoryElement()); |
| 1089 stack_pointer_++; | 1091 stack_pointer_++; |
| 1090 __ push(immediate); | 1092 __ push(immediate); |
| 1091 } | 1093 } |
| 1092 | 1094 |
| 1093 | 1095 |
| 1094 #undef __ | 1096 #undef __ |
| 1095 | 1097 |
| 1096 } } // namespace v8::internal | 1098 } } // namespace v8::internal |
| OLD | NEW |