| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 180 } | 180 } |
| 181 | 181 |
| 182 | 182 |
| 183 void VirtualFrame::MakeMergable(int mergable_elements) { | 183 void VirtualFrame::MakeMergable(int mergable_elements) { |
| 184 if (mergable_elements == JumpTarget::kAllElements) { | 184 if (mergable_elements == JumpTarget::kAllElements) { |
| 185 mergable_elements = elements_.length(); | 185 mergable_elements = elements_.length(); |
| 186 } | 186 } |
| 187 ASSERT(mergable_elements <= elements_.length()); | 187 ASSERT(mergable_elements <= elements_.length()); |
| 188 | 188 |
| 189 int start_index = elements_.length() - mergable_elements; | 189 int start_index = elements_.length() - mergable_elements; |
| 190 | |
| 191 // The is_copied flags on entry frame elements are expected to be | |
| 192 // exact. Set them for the elements below the water mark. | |
| 193 for (int i = 0; i < start_index; i++) { | |
| 194 elements_[i].clear_copied(); | |
| 195 if (elements_[i].is_copy()) { | |
| 196 elements_[elements_[i].index()].set_copied(); | |
| 197 } | |
| 198 } | |
| 199 | |
| 200 for (int i = start_index; i < elements_.length(); i++) { | 190 for (int i = start_index; i < elements_.length(); i++) { |
| 201 FrameElement element = elements_[i]; | 191 FrameElement element = elements_[i]; |
| 202 | 192 |
| 203 if (element.is_constant() || element.is_copy()) { | 193 if (element.is_constant() || element.is_copy()) { |
| 204 if (element.is_synced()) { | 194 if (element.is_synced()) { |
| 205 // Just spill. | 195 // Just spill. |
| 206 elements_[i] = FrameElement::MemoryElement(); | 196 elements_[i] = FrameElement::MemoryElement(); |
| 207 } else { | 197 } else { |
| 208 // Allocate to a register. | 198 // Allocate to a register. |
| 209 FrameElement backing_element; // Invalid if not a copy. | 199 FrameElement backing_element; // Invalid if not a copy. |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 265 if (stack_pointer_ < expected->stack_pointer_) { | 255 if (stack_pointer_ < expected->stack_pointer_) { |
| 266 int difference = expected->stack_pointer_ - stack_pointer_; | 256 int difference = expected->stack_pointer_ - stack_pointer_; |
| 267 stack_pointer_ = expected->stack_pointer_; | 257 stack_pointer_ = expected->stack_pointer_; |
| 268 __ sub(Operand(esp), Immediate(difference * kPointerSize)); | 258 __ sub(Operand(esp), Immediate(difference * kPointerSize)); |
| 269 } | 259 } |
| 270 | 260 |
| 271 MergeMoveRegistersToMemory(expected); | 261 MergeMoveRegistersToMemory(expected); |
| 272 MergeMoveRegistersToRegisters(expected); | 262 MergeMoveRegistersToRegisters(expected); |
| 273 MergeMoveMemoryToRegisters(expected); | 263 MergeMoveMemoryToRegisters(expected); |
| 274 | 264 |
| 275 // Fix any sync flag problems from the bottom-up and make the copied | |
| 276 // flags exact. This assumes that the backing store of copies is | |
| 277 // always lower in the frame. | |
| 278 for (int i = 0; i < elements_.length(); i++) { | |
| 279 FrameElement source = elements_[i]; | |
| 280 FrameElement target = expected->elements_[i]; | |
| 281 if (source.is_synced() && !target.is_synced()) { | |
| 282 elements_[i].clear_sync(); | |
| 283 } else if (!source.is_synced() && target.is_synced()) { | |
| 284 SyncElementAt(i); | |
| 285 } | |
| 286 elements_[i].clear_copied(); | |
| 287 if (elements_[i].is_copy()) { | |
| 288 elements_[elements_[i].index()].set_copied(); | |
| 289 } | |
| 290 } | |
| 291 | |
| 292 // Adjust the stack pointer downward if necessary. | 265 // Adjust the stack pointer downward if necessary. |
| 293 if (stack_pointer_ > expected->stack_pointer_) { | 266 if (stack_pointer_ > expected->stack_pointer_) { |
| 294 int difference = stack_pointer_ - expected->stack_pointer_; | 267 int difference = stack_pointer_ - expected->stack_pointer_; |
| 295 stack_pointer_ = expected->stack_pointer_; | 268 stack_pointer_ = expected->stack_pointer_; |
| 296 __ add(Operand(esp), Immediate(difference * kPointerSize)); | 269 __ add(Operand(esp), Immediate(difference * kPointerSize)); |
| 297 } | 270 } |
| 298 | 271 |
| 299 // At this point, the frames should be identical. | 272 // At this point, the frames should be identical. |
| 300 ASSERT(Equals(expected)); | 273 ASSERT(Equals(expected)); |
| 301 } | 274 } |
| 302 | 275 |
| 303 | 276 |
| 304 void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) { | 277 void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) { |
| 305 ASSERT(stack_pointer_ >= expected->stack_pointer_); | 278 ASSERT(stack_pointer_ >= expected->stack_pointer_); |
| 306 | 279 |
| 307 // Move registers, constants, and copies to memory. Perform moves | 280 // Move registers, constants, and copies to memory. Perform moves |
| 308 // from the top downward in the frame in order to leave the backing | 281 // from the top downward in the frame in order to leave the backing |
| 309 // stores of copies in registers. | 282 // stores of copies in registers. |
| 310 // | 283 // |
| 311 // Moving memory-backed copies to memory requires a spare register | 284 // Moving memory-backed copies to memory requires a spare register |
| 312 // for the memory-to-memory moves. Since we are performing a merge, | 285 // for the memory-to-memory moves. Since we are performing a merge, |
| 313 // we use esi (which is already saved in the frame). We keep track | 286 // we use esi (which is already saved in the frame). We keep track |
| 314 // of the index of the frame element esi is caching or kIllegalIndex | 287 // of the index of the frame element esi is caching or kIllegalIndex |
| 315 // if esi has not been disturbed. | 288 // if esi has not been disturbed. |
| 316 int esi_caches = kIllegalIndex; | 289 int esi_caches = kIllegalIndex; |
| 317 // Loop downward from the stack pointer or the top of the frame if | 290 for (int i = elements_.length() - 1; i >= 0; i--) { |
| 318 // the stack pointer is floating above the frame. | |
| 319 int start = Min(static_cast<int>(stack_pointer_), elements_.length() - 1); | |
| 320 for (int i = start; i >= 0; i--) { | |
| 321 FrameElement target = expected->elements_[i]; | 291 FrameElement target = expected->elements_[i]; |
| 292 if (target.is_register()) continue; // Handle registers later. |
| 322 if (target.is_memory()) { | 293 if (target.is_memory()) { |
| 323 FrameElement source = elements_[i]; | 294 FrameElement source = elements_[i]; |
| 324 switch (source.type()) { | 295 switch (source.type()) { |
| 325 case FrameElement::INVALID: | 296 case FrameElement::INVALID: |
| 326 // Not a legal merge move. | 297 // Not a legal merge move. |
| 327 UNREACHABLE(); | 298 UNREACHABLE(); |
| 328 break; | 299 break; |
| 329 | 300 |
| 330 case FrameElement::MEMORY: | 301 case FrameElement::MEMORY: |
| 331 // Already in place. | 302 // Already in place. |
| (...skipping 29 matching lines...) Expand all Loading... |
| 361 __ mov(esi, Operand(ebp, fp_relative(backing_index))); | 332 __ mov(esi, Operand(ebp, fp_relative(backing_index))); |
| 362 } | 333 } |
| 363 __ mov(Operand(ebp, fp_relative(i)), esi); | 334 __ mov(Operand(ebp, fp_relative(i)), esi); |
| 364 } else { | 335 } else { |
| 365 ASSERT(backing_element.is_register()); | 336 ASSERT(backing_element.is_register()); |
| 366 __ mov(Operand(ebp, fp_relative(i)), backing_element.reg()); | 337 __ mov(Operand(ebp, fp_relative(i)), backing_element.reg()); |
| 367 } | 338 } |
| 368 } | 339 } |
| 369 break; | 340 break; |
| 370 } | 341 } |
| 371 elements_[i] = target; | |
| 372 } | 342 } |
| 343 elements_[i] = target; |
| 373 } | 344 } |
| 374 | 345 |
| 375 if (esi_caches != kIllegalIndex) { | 346 if (esi_caches != kIllegalIndex) { |
| 376 __ mov(esi, Operand(ebp, fp_relative(context_index()))); | 347 __ mov(esi, Operand(ebp, fp_relative(context_index()))); |
| 377 } | 348 } |
| 378 } | 349 } |
| 379 | 350 |
| 380 | 351 |
| 381 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) { | 352 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) { |
| 382 // We have already done X-to-memory moves. | 353 // We have already done X-to-memory moves. |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 414 expected->elements_[index].is_synced()) { | 385 expected->elements_[index].is_synced()) { |
| 415 __ mov(Operand(ebp, fp_relative(index)), target); | 386 __ mov(Operand(ebp, fp_relative(index)), target); |
| 416 } | 387 } |
| 417 elements_[index] = expected->elements_[index]; | 388 elements_[index] = expected->elements_[index]; |
| 418 } | 389 } |
| 419 } | 390 } |
| 420 | 391 |
| 421 | 392 |
| 422 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) { | 393 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) { |
| 423 // Move memory, constants, and copies to registers. This is the | 394 // Move memory, constants, and copies to registers. This is the |
| 424 // final step and is done from the bottom up so that the backing | 395 // final step and since it is not done from the bottom up, but in |
| 396 // register code order, we have special code to ensure that the backing |
| 425 // elements of copies are in their correct locations when we | 397 // elements of copies are in their correct locations when we |
| 426 // encounter the copies. | 398 // encounter the copies. |
| 427 for (int i = 0; i < kNumRegisters; i++) { | 399 for (int i = 0; i < kNumRegisters; i++) { |
| 428 int index = expected->register_locations_[i]; | 400 int index = expected->register_locations_[i]; |
| 429 if (index != kIllegalIndex) { | 401 if (index != kIllegalIndex) { |
| 430 FrameElement source = elements_[index]; | 402 FrameElement source = elements_[index]; |
| 431 FrameElement target = expected->elements_[index]; | 403 FrameElement target = expected->elements_[index]; |
| 432 Register target_reg = { i }; | 404 Register target_reg = { i }; |
| 433 ASSERT(expected->elements_[index].reg().is(target_reg)); | 405 ASSERT(target.reg().is(target_reg)); |
| 434 switch (source.type()) { | 406 switch (source.type()) { |
| 435 case FrameElement::INVALID: // Fall through. | 407 case FrameElement::INVALID: // Fall through. |
| 436 UNREACHABLE(); | 408 UNREACHABLE(); |
| 437 break; | 409 break; |
| 438 case FrameElement::REGISTER: | 410 case FrameElement::REGISTER: |
| 439 ASSERT(source.reg().is(target_reg)); | 411 ASSERT(source.reg().is(target_reg)); |
| 440 continue; // Go to next iteration. Skips Use(target_reg) below. | 412 continue; // Go to next iteration. Skips Use(target_reg) below. |
| 441 break; | 413 break; |
| 442 case FrameElement::MEMORY: | 414 case FrameElement::MEMORY: |
| 443 ASSERT(index <= stack_pointer_); | 415 ASSERT(index <= stack_pointer_); |
| (...skipping 682 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1126 ASSERT(stack_pointer_ == elements_.length() - 1); | 1098 ASSERT(stack_pointer_ == elements_.length() - 1); |
| 1127 elements_.Add(FrameElement::MemoryElement()); | 1099 elements_.Add(FrameElement::MemoryElement()); |
| 1128 stack_pointer_++; | 1100 stack_pointer_++; |
| 1129 __ push(immediate); | 1101 __ push(immediate); |
| 1130 } | 1102 } |
| 1131 | 1103 |
| 1132 | 1104 |
| 1133 #undef __ | 1105 #undef __ |
| 1134 | 1106 |
| 1135 } } // namespace v8::internal | 1107 } } // namespace v8::internal |
| OLD | NEW |