OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
346 arguments()->PrintTo(stream); | 346 arguments()->PrintTo(stream); |
347 | 347 |
348 stream->Add(" length "); | 348 stream->Add(" length "); |
349 length()->PrintTo(stream); | 349 length()->PrintTo(stream); |
350 | 350 |
351 stream->Add(" index "); | 351 stream->Add(" index "); |
352 index()->PrintTo(stream); | 352 index()->PrintTo(stream); |
353 } | 353 } |
354 | 354 |
355 | 355 |
356 int LPlatformChunk::GetNextSpillIndex(bool is_double) { | 356 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) { |
357 return spill_slot_count_++; | 357 return spill_slot_count_++; |
358 } | 358 } |
359 | 359 |
360 | 360 |
361 LOperand* LPlatformChunk::GetNextSpillSlot(bool is_double) { | 361 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) { |
362 // All stack slots are Double stack slots on x64. | 362 // All stack slots are Double stack slots on x64. |
363 // Alternatively, at some point, start using half-size | 363 // Alternatively, at some point, start using half-size |
364 // stack slots for int32 values. | 364 // stack slots for int32 values. |
365 int index = GetNextSpillIndex(is_double); | 365 int index = GetNextSpillIndex(kind); |
366 if (is_double) { | 366 if (kind == DOUBLE_REGISTERS) { |
367 return LDoubleStackSlot::Create(index, zone()); | 367 return LDoubleStackSlot::Create(index, zone()); |
368 } else { | 368 } else { |
| 369 ASSERT(kind == GENERAL_REGISTERS); |
369 return LStackSlot::Create(index, zone()); | 370 return LStackSlot::Create(index, zone()); |
370 } | 371 } |
371 } | 372 } |
372 | 373 |
373 | 374 |
374 void LStoreNamedField::PrintDataTo(StringStream* stream) { | 375 void LStoreNamedField::PrintDataTo(StringStream* stream) { |
375 object()->PrintTo(stream); | 376 object()->PrintTo(stream); |
376 hydrogen()->access().PrintTo(stream); | 377 hydrogen()->access().PrintTo(stream); |
377 stream->Add(" <- "); | 378 stream->Add(" <- "); |
378 value()->PrintTo(stream); | 379 value()->PrintTo(stream); |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
438 LPlatformChunk* LChunkBuilder::Build() { | 439 LPlatformChunk* LChunkBuilder::Build() { |
439 ASSERT(is_unused()); | 440 ASSERT(is_unused()); |
440 chunk_ = new(zone()) LPlatformChunk(info(), graph()); | 441 chunk_ = new(zone()) LPlatformChunk(info(), graph()); |
441 LPhase phase("L_Building chunk", chunk_); | 442 LPhase phase("L_Building chunk", chunk_); |
442 status_ = BUILDING; | 443 status_ = BUILDING; |
443 | 444 |
444 // If compiling for OSR, reserve space for the unoptimized frame, | 445 // If compiling for OSR, reserve space for the unoptimized frame, |
445 // which will be subsumed into this frame. | 446 // which will be subsumed into this frame. |
446 if (graph()->has_osr()) { | 447 if (graph()->has_osr()) { |
447 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) { | 448 for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) { |
448 chunk_->GetNextSpillIndex(false); | 449 chunk_->GetNextSpillIndex(GENERAL_REGISTERS); |
449 } | 450 } |
450 } | 451 } |
451 | 452 |
452 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); | 453 const ZoneList<HBasicBlock*>* blocks = graph()->blocks(); |
453 for (int i = 0; i < blocks->length(); i++) { | 454 for (int i = 0; i < blocks->length(); i++) { |
454 HBasicBlock* next = NULL; | 455 HBasicBlock* next = NULL; |
455 if (i < blocks->length() - 1) next = blocks->at(i + 1); | 456 if (i < blocks->length() - 1) next = blocks->at(i + 1); |
456 DoBasicBlock(blocks->at(i), next); | 457 DoBasicBlock(blocks->at(i), next); |
457 if (is_aborted()) return NULL; | 458 if (is_aborted()) return NULL; |
458 } | 459 } |
(...skipping 2065 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2524 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { | 2525 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) { |
2525 LOperand* object = UseRegister(instr->object()); | 2526 LOperand* object = UseRegister(instr->object()); |
2526 LOperand* index = UseTempRegister(instr->index()); | 2527 LOperand* index = UseTempRegister(instr->index()); |
2527 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); | 2528 return DefineSameAsFirst(new(zone()) LLoadFieldByIndex(object, index)); |
2528 } | 2529 } |
2529 | 2530 |
2530 | 2531 |
2531 } } // namespace v8::internal | 2532 } } // namespace v8::internal |
2532 | 2533 |
2533 #endif // V8_TARGET_ARCH_X64 | 2534 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |