OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
52 TranscendentalCache::Type type_; | 52 TranscendentalCache::Type type_; |
53 ArgumentType argument_type_; | 53 ArgumentType argument_type_; |
54 void GenerateCallCFunction(MacroAssembler* masm, Register scratch); | 54 void GenerateCallCFunction(MacroAssembler* masm, Register scratch); |
55 | 55 |
56 Major MajorKey() { return TranscendentalCache; } | 56 Major MajorKey() { return TranscendentalCache; } |
57 int MinorKey() { return type_ | argument_type_; } | 57 int MinorKey() { return type_ | argument_type_; } |
58 Runtime::FunctionId RuntimeFunction(); | 58 Runtime::FunctionId RuntimeFunction(); |
59 }; | 59 }; |
60 | 60 |
61 | 61 |
| 62 class StoreBufferOverflowStub: public CodeStub { |
| 63 public: |
| 64 explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp) |
| 65 : save_doubles_(save_fp) { } |
| 66 |
| 67 void Generate(MacroAssembler* masm); |
| 68 |
| 69 virtual bool CompilingCallsToThisStubIsGCSafe() { return true; } |
| 70 static void GenerateFixedRegStubsAheadOfTime(); |
| 71 virtual bool SometimesSetsUpAFrame() { return false; } |
| 72 |
| 73 private: |
| 74 SaveFPRegsMode save_doubles_; |
| 75 |
| 76 Major MajorKey() { return StoreBufferOverflow; } |
| 77 int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; } |
| 78 }; |
| 79 |
| 80 |
62 class UnaryOpStub: public CodeStub { | 81 class UnaryOpStub: public CodeStub { |
63 public: | 82 public: |
64 UnaryOpStub(Token::Value op, | 83 UnaryOpStub(Token::Value op, |
65 UnaryOverwriteMode mode, | 84 UnaryOverwriteMode mode, |
66 UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED) | 85 UnaryOpIC::TypeInfo operand_type = UnaryOpIC::UNINITIALIZED) |
67 : op_(op), | 86 : op_(op), |
68 mode_(mode), | 87 mode_(mode), |
69 operand_type_(operand_type) { | 88 operand_type_(operand_type) { |
70 } | 89 } |
71 | 90 |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
317 // so you don't have to set up the frame. | 336 // so you don't have to set up the frame. |
318 class WriteInt32ToHeapNumberStub : public CodeStub { | 337 class WriteInt32ToHeapNumberStub : public CodeStub { |
319 public: | 338 public: |
320 WriteInt32ToHeapNumberStub(Register the_int, | 339 WriteInt32ToHeapNumberStub(Register the_int, |
321 Register the_heap_number, | 340 Register the_heap_number, |
322 Register scratch, | 341 Register scratch, |
323 Register scratch2) | 342 Register scratch2) |
324 : the_int_(the_int), | 343 : the_int_(the_int), |
325 the_heap_number_(the_heap_number), | 344 the_heap_number_(the_heap_number), |
326 scratch_(scratch), | 345 scratch_(scratch), |
327 sign_(scratch2) { } | 346 sign_(scratch2) { |
| 347 ASSERT(IntRegisterBits::is_valid(the_int_.code())); |
| 348 ASSERT(HeapNumberRegisterBits::is_valid(the_heap_number_.code())); |
| 349 ASSERT(ScratchRegisterBits::is_valid(scratch_.code())); |
| 350 ASSERT(SignRegisterBits::is_valid(sign_.code())); |
| 351 } |
| 352 |
| 353 bool CompilingCallsToThisStubIsGCSafe(); |
| 354 static void GenerateFixedRegStubsAheadOfTime(); |
328 | 355 |
329 private: | 356 private: |
330 Register the_int_; | 357 Register the_int_; |
331 Register the_heap_number_; | 358 Register the_heap_number_; |
332 Register scratch_; | 359 Register scratch_; |
333 Register sign_; | 360 Register sign_; |
334 | 361 |
335 // Minor key encoding in 16 bits. | 362 // Minor key encoding in 16 bits. |
336 class IntRegisterBits: public BitField<int, 0, 4> {}; | 363 class IntRegisterBits: public BitField<int, 0, 4> {}; |
337 class HeapNumberRegisterBits: public BitField<int, 4, 4> {}; | 364 class HeapNumberRegisterBits: public BitField<int, 4, 4> {}; |
(...skipping 30 matching lines...) Expand all Loading... |
368 Label* not_found); | 395 Label* not_found); |
369 | 396 |
370 private: | 397 private: |
371 Major MajorKey() { return NumberToString; } | 398 Major MajorKey() { return NumberToString; } |
372 int MinorKey() { return 0; } | 399 int MinorKey() { return 0; } |
373 | 400 |
374 void Generate(MacroAssembler* masm); | 401 void Generate(MacroAssembler* masm); |
375 }; | 402 }; |
376 | 403 |
377 | 404 |
| 405 class RecordWriteStub: public CodeStub { |
| 406 public: |
| 407 RecordWriteStub(Register object, |
| 408 Register value, |
| 409 Register address, |
| 410 RememberedSetAction remembered_set_action, |
| 411 SaveFPRegsMode fp_mode) |
| 412 : object_(object), |
| 413 value_(value), |
| 414 address_(address), |
| 415 remembered_set_action_(remembered_set_action), |
| 416 save_fp_regs_mode_(fp_mode), |
| 417 regs_(object, // An input reg. |
| 418 address, // An input reg. |
| 419 value) { // One scratch reg. |
| 420 } |
| 421 |
| 422 enum Mode { |
| 423 STORE_BUFFER_ONLY, |
| 424 INCREMENTAL, |
| 425 INCREMENTAL_COMPACTION |
| 426 }; |
| 427 |
| 428 virtual bool CompilingCallsToThisStubIsGCSafe(); |
| 429 static void GenerateFixedRegStubsAheadOfTime(); |
| 430 virtual bool SometimesSetsUpAFrame() { return false; } |
| 431 |
| 432 static void PatchBranchIntoNop(MacroAssembler* masm, int pos) { |
| 433 const unsigned offset = masm->instr_at(pos) & kImm16Mask; |
| 434 masm->instr_at_put(pos, BNE | (zero_reg.code() << kRsShift) | |
| 435 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); |
| 436 ASSERT(Assembler::IsBne(masm->instr_at(pos))); |
| 437 } |
| 438 |
| 439 static void PatchNopIntoBranch(MacroAssembler* masm, int pos) { |
| 440 const unsigned offset = masm->instr_at(pos) & kImm16Mask; |
| 441 masm->instr_at_put(pos, BEQ | (zero_reg.code() << kRsShift) | |
| 442 (zero_reg.code() << kRtShift) | (offset & kImm16Mask)); |
| 443 ASSERT(Assembler::IsBeq(masm->instr_at(pos))); |
| 444 } |
| 445 |
| 446 static Mode GetMode(Code* stub) { |
| 447 Instr first_instruction = Assembler::instr_at(stub->instruction_start()); |
| 448 Instr second_instruction = Assembler::instr_at(stub->instruction_start() + |
| 449 2 * Assembler::kInstrSize); |
| 450 |
| 451 if (Assembler::IsBeq(first_instruction)) { |
| 452 return INCREMENTAL; |
| 453 } |
| 454 |
| 455 ASSERT(Assembler::IsBne(first_instruction)); |
| 456 |
| 457 if (Assembler::IsBeq(second_instruction)) { |
| 458 return INCREMENTAL_COMPACTION; |
| 459 } |
| 460 |
| 461 ASSERT(Assembler::IsBne(second_instruction)); |
| 462 |
| 463 return STORE_BUFFER_ONLY; |
| 464 } |
| 465 |
| 466 static void Patch(Code* stub, Mode mode) { |
| 467 MacroAssembler masm(NULL, |
| 468 stub->instruction_start(), |
| 469 stub->instruction_size()); |
| 470 switch (mode) { |
| 471 case STORE_BUFFER_ONLY: |
| 472 ASSERT(GetMode(stub) == INCREMENTAL || |
| 473 GetMode(stub) == INCREMENTAL_COMPACTION); |
| 474 PatchBranchIntoNop(&masm, 0); |
| 475 PatchBranchIntoNop(&masm, 2 * Assembler::kInstrSize); |
| 476 break; |
| 477 case INCREMENTAL: |
| 478 ASSERT(GetMode(stub) == STORE_BUFFER_ONLY); |
| 479 PatchNopIntoBranch(&masm, 0); |
| 480 break; |
| 481 case INCREMENTAL_COMPACTION: |
| 482 ASSERT(GetMode(stub) == STORE_BUFFER_ONLY); |
| 483 PatchNopIntoBranch(&masm, 2 * Assembler::kInstrSize); |
| 484 break; |
| 485 } |
| 486 ASSERT(GetMode(stub) == mode); |
| 487 CPU::FlushICache(stub->instruction_start(), 4 * Assembler::kInstrSize); |
| 488 } |
| 489 |
| 490 private: |
| 491 // This is a helper class for freeing up 3 scratch registers. The input is |
| 492 // two registers that must be preserved and one scratch register provided by |
| 493 // the caller. |
| 494 class RegisterAllocation { |
| 495 public: |
| 496 RegisterAllocation(Register object, |
| 497 Register address, |
| 498 Register scratch0) |
| 499 : object_(object), |
| 500 address_(address), |
| 501 scratch0_(scratch0) { |
| 502 ASSERT(!AreAliased(scratch0, object, address, no_reg)); |
| 503 scratch1_ = GetRegThatIsNotOneOf(object_, address_, scratch0_); |
| 504 } |
| 505 |
| 506 void Save(MacroAssembler* masm) { |
| 507 ASSERT(!AreAliased(object_, address_, scratch1_, scratch0_)); |
| 508 // We don't have to save scratch0_ because it was given to us as |
| 509 // a scratch register. |
| 510 masm->push(scratch1_); |
| 511 } |
| 512 |
| 513 void Restore(MacroAssembler* masm) { |
| 514 masm->pop(scratch1_); |
| 515 } |
| 516 |
| 517 // If we have to call into C then we need to save and restore all caller- |
| 518 // saved registers that were not already preserved. The scratch registers |
| 519 // will be restored by other means so we don't bother pushing them here. |
| 520 void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) { |
| 521 masm->MultiPush((kJSCallerSaved | ra.bit()) & ~scratch1_.bit()); |
| 522 if (mode == kSaveFPRegs) { |
| 523 CpuFeatures::Scope scope(FPU); |
| 524 masm->MultiPushFPU(kCallerSavedFPU); |
| 525 } |
| 526 } |
| 527 |
| 528 inline void RestoreCallerSaveRegisters(MacroAssembler*masm, |
| 529 SaveFPRegsMode mode) { |
| 530 if (mode == kSaveFPRegs) { |
| 531 CpuFeatures::Scope scope(FPU); |
| 532 masm->MultiPopFPU(kCallerSavedFPU); |
| 533 } |
| 534 masm->MultiPop((kJSCallerSaved | ra.bit()) & ~scratch1_.bit()); |
| 535 } |
| 536 |
| 537 inline Register object() { return object_; } |
| 538 inline Register address() { return address_; } |
| 539 inline Register scratch0() { return scratch0_; } |
| 540 inline Register scratch1() { return scratch1_; } |
| 541 |
| 542 private: |
| 543 Register object_; |
| 544 Register address_; |
| 545 Register scratch0_; |
| 546 Register scratch1_; |
| 547 |
| 548 Register GetRegThatIsNotOneOf(Register r1, |
| 549 Register r2, |
| 550 Register r3) { |
| 551 for (int i = 0; i < Register::kNumAllocatableRegisters; i++) { |
| 552 Register candidate = Register::FromAllocationIndex(i); |
| 553 if (candidate.is(r1)) continue; |
| 554 if (candidate.is(r2)) continue; |
| 555 if (candidate.is(r3)) continue; |
| 556 return candidate; |
| 557 } |
| 558 UNREACHABLE(); |
| 559 return no_reg; |
| 560 } |
| 561 friend class RecordWriteStub; |
| 562 }; |
| 563 |
| 564 enum OnNoNeedToInformIncrementalMarker { |
| 565 kReturnOnNoNeedToInformIncrementalMarker, |
| 566 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker |
| 567 }; |
| 568 |
| 569 void Generate(MacroAssembler* masm); |
| 570 void GenerateIncremental(MacroAssembler* masm, Mode mode); |
| 571 void CheckNeedsToInformIncrementalMarker( |
| 572 MacroAssembler* masm, |
| 573 OnNoNeedToInformIncrementalMarker on_no_need, |
| 574 Mode mode); |
| 575 void InformIncrementalMarker(MacroAssembler* masm, Mode mode); |
| 576 |
| 577 Major MajorKey() { return RecordWrite; } |
| 578 |
| 579 int MinorKey() { |
| 580 return ObjectBits::encode(object_.code()) | |
| 581 ValueBits::encode(value_.code()) | |
| 582 AddressBits::encode(address_.code()) | |
| 583 RememberedSetActionBits::encode(remembered_set_action_) | |
| 584 SaveFPRegsModeBits::encode(save_fp_regs_mode_); |
| 585 } |
| 586 |
| 587 bool MustBeInStubCache() { |
| 588 // All stubs must be registered in the stub cache |
| 589 // otherwise IncrementalMarker would not be able to find |
| 590 // and patch it. |
| 591 return true; |
| 592 } |
| 593 |
| 594 void Activate(Code* code) { |
| 595 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code); |
| 596 } |
| 597 |
| 598 class ObjectBits: public BitField<int, 0, 5> {}; |
| 599 class ValueBits: public BitField<int, 5, 5> {}; |
| 600 class AddressBits: public BitField<int, 10, 5> {}; |
| 601 class RememberedSetActionBits: public BitField<RememberedSetAction, 15, 1> {}; |
| 602 class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 16, 1> {}; |
| 603 |
| 604 Register object_; |
| 605 Register value_; |
| 606 Register address_; |
| 607 RememberedSetAction remembered_set_action_; |
| 608 SaveFPRegsMode save_fp_regs_mode_; |
| 609 Label slow_; |
| 610 RegisterAllocation regs_; |
| 611 }; |
| 612 |
| 613 |
378 // Enter C code from generated RegExp code in a way that allows | 614 // Enter C code from generated RegExp code in a way that allows |
379 // the C code to fix the return address in case of a GC. | 615 // the C code to fix the return address in case of a GC. |
380 // Currently only needed on ARM and MIPS. | 616 // Currently only needed on ARM and MIPS. |
381 class RegExpCEntryStub: public CodeStub { | 617 class RegExpCEntryStub: public CodeStub { |
382 public: | 618 public: |
383 RegExpCEntryStub() {} | 619 RegExpCEntryStub() {} |
384 virtual ~RegExpCEntryStub() {} | 620 virtual ~RegExpCEntryStub() {} |
385 void Generate(MacroAssembler* masm); | 621 void Generate(MacroAssembler* masm); |
386 | 622 |
387 private: | 623 private: |
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
600 | 836 |
601 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; | 837 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; |
602 | 838 |
603 LookupMode mode_; | 839 LookupMode mode_; |
604 }; | 840 }; |
605 | 841 |
606 | 842 |
607 } } // namespace v8::internal | 843 } } // namespace v8::internal |
608 | 844 |
609 #endif // V8_MIPS_CODE_STUBS_ARM_H_ | 845 #endif // V8_MIPS_CODE_STUBS_ARM_H_ |
OLD | NEW |