Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(213)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 8106002: MIPS: port Merge experimental/gc branch to the bleeding_edge. (Closed)
Patch Set: Rebased on r9598, greatly simplified. Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/codegen-mips.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 868 matching lines...) Expand 10 before | Expand all | Expand 10 after
879 __ sw(v1, FieldMemOperand(heap_number_result, HeapNumber::kExponentOffset)); 879 __ sw(v1, FieldMemOperand(heap_number_result, HeapNumber::kExponentOffset));
880 __ sw(v0, FieldMemOperand(heap_number_result, HeapNumber::kMantissaOffset)); 880 __ sw(v0, FieldMemOperand(heap_number_result, HeapNumber::kMantissaOffset));
881 } 881 }
882 // Place heap_number_result in v0 and return to the pushed return address. 882 // Place heap_number_result in v0 and return to the pushed return address.
883 __ mov(v0, heap_number_result); 883 __ mov(v0, heap_number_result);
884 __ pop(ra); 884 __ pop(ra);
885 __ Ret(); 885 __ Ret();
886 } 886 }
887 887
888 888
889 bool WriteInt32ToHeapNumberStub::CompilingCallsToThisStubIsGCSafe() {
890 // These variants are compiled ahead of time. See next method.
891 if (the_int_.is(a1) &&
892 the_heap_number_.is(v0) &&
893 scratch_.is(a2) &&
894 sign_.is(a3)) {
895 return true;
896 }
897 if (the_int_.is(a2) &&
898 the_heap_number_.is(v0) &&
899 scratch_.is(a3) &&
900 sign_.is(a0)) {
901 return true;
902 }
903 // Other register combinations are generated as and when they are needed,
904 // so it is unsafe to call them from stubs (we can't generate a stub while
905 // we are generating a stub).
906 return false;
907 }
908
909
910 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() {
911 WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
912 WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
913 Handle<Code> code1 = stub1.GetCode();
914 Handle<Code> code2 = stub2.GetCode();
915 }
916
917
889 // See comment for class, this does NOT work for int32's that are in Smi range. 918 // See comment for class, this does NOT work for int32's that are in Smi range.
890 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) { 919 void WriteInt32ToHeapNumberStub::Generate(MacroAssembler* masm) {
891 Label max_negative_int; 920 Label max_negative_int;
892 // the_int_ has the answer which is a signed int32 but not a Smi. 921 // the_int_ has the answer which is a signed int32 but not a Smi.
893 // We test for the special value that has a different exponent. 922 // We test for the special value that has a different exponent.
894 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u); 923 STATIC_ASSERT(HeapNumber::kSignMask == 0x80000000u);
895 // Test sign, and save for later conditionals. 924 // Test sign, and save for later conditionals.
896 __ And(sign_, the_int_, Operand(0x80000000u)); 925 __ And(sign_, the_int_, Operand(0x80000000u));
897 __ Branch(&max_negative_int, eq, the_int_, Operand(0x80000000u)); 926 __ Branch(&max_negative_int, eq, the_int_, Operand(0x80000000u));
898 927
(...skipping 895 matching lines...) Expand 10 before | Expand all | Expand 10 after
1794 __ Push(a3, a2, a1); 1823 __ Push(a3, a2, a1);
1795 // Patch the caller to an appropriate specialized stub and return the 1824 // Patch the caller to an appropriate specialized stub and return the
1796 // operation result to the caller of the stub. 1825 // operation result to the caller of the stub.
1797 __ TailCallExternalReference( 1826 __ TailCallExternalReference(
1798 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()), 1827 ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
1799 3, 1828 3,
1800 1); 1829 1);
1801 } 1830 }
1802 1831
1803 1832
1833 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
1834 // We don't allow a GC during a store buffer overflow so there is no need to
1835 // store the registers in any particular way, but we do have to store and
1836 // restore them.
1837 __ MultiPush(kJSCallerSaved | ra.bit());
1838 if (save_doubles_ == kSaveFPRegs) {
1839 CpuFeatures::Scope scope(FPU);
1840 __ MultiPushFPU(kCallerSavedFPU);
1841 }
1842 const int argument_count = 1;
1843 const int fp_argument_count = 0;
1844 const Register scratch = a1;
1845
1846 AllowExternalCallThatCantCauseGC scope(masm);
1847 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
1848 __ li(a0, Operand(ExternalReference::isolate_address()));
1849 __ CallCFunction(
1850 ExternalReference::store_buffer_overflow_function(masm->isolate()),
1851 argument_count);
1852 if (save_doubles_ == kSaveFPRegs) {
1853 CpuFeatures::Scope scope(FPU);
1854 __ MultiPopFPU(kCallerSavedFPU);
1855 }
1856
1857 __ MultiPop(kJSCallerSaved | ra.bit());
1858 __ Ret();
1859 }
1860
1861
1804 void UnaryOpStub::PrintName(StringStream* stream) { 1862 void UnaryOpStub::PrintName(StringStream* stream) {
1805 const char* op_name = Token::Name(op_); 1863 const char* op_name = Token::Name(op_);
1806 const char* overwrite_name = NULL; // Make g++ happy. 1864 const char* overwrite_name = NULL; // Make g++ happy.
1807 switch (mode_) { 1865 switch (mode_) {
1808 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; 1866 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
1809 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; 1867 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
1810 } 1868 }
1811 stream->Add("UnaryOpStub_%s_%s_%s", 1869 stream->Add("UnaryOpStub_%s_%s_%s",
1812 op_name, 1870 op_name,
1813 overwrite_name, 1871 overwrite_name,
(...skipping 1663 matching lines...) Expand 10 before | Expand all | Expand 10 after
3477 } 3535 }
3478 3536
3479 3537
3480 bool CEntryStub::IsPregenerated() { 3538 bool CEntryStub::IsPregenerated() {
3481 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) && 3539 return (!save_doubles_ || ISOLATE->fp_stubs_generated()) &&
3482 result_size_ == 1; 3540 result_size_ == 1;
3483 } 3541 }
3484 3542
3485 3543
3486 void CodeStub::GenerateStubsAheadOfTime() { 3544 void CodeStub::GenerateStubsAheadOfTime() {
3545 WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime();
3487 } 3546 }
3488 3547
3489 3548
3490 void CodeStub::GenerateFPStubs() { 3549 void CodeStub::GenerateFPStubs() {
3491 CEntryStub save_doubles(1); 3550 CEntryStub save_doubles(1, kSaveFPRegs);
3492 save_doubles.SaveDoubles();
3493 Handle<Code> code = save_doubles.GetCode(); 3551 Handle<Code> code = save_doubles.GetCode();
3494 code->GetIsolate()->set_fp_stubs_generated(true); 3552 code->GetIsolate()->set_fp_stubs_generated(true);
3495 } 3553 }
3496 3554
3497 3555
3498 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { 3556 void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
3499 __ Throw(v0); 3557 __ Throw(v0);
3500 } 3558 }
3501 3559
3502 3560
(...skipping 1302 matching lines...) Expand 10 before | Expand all | Expand 10 after
4805 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 4863 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4806 __ Addu(a1, a1, Operand(2)); // a1 was a smi. 4864 __ Addu(a1, a1, Operand(2)); // a1 was a smi.
4807 4865
4808 // a1: number of capture registers 4866 // a1: number of capture registers
4809 // subject: subject string 4867 // subject: subject string
4810 // Store the capture count. 4868 // Store the capture count.
4811 __ sll(a2, a1, kSmiTagSize + kSmiShiftSize); // To smi. 4869 __ sll(a2, a1, kSmiTagSize + kSmiShiftSize); // To smi.
4812 __ sw(a2, FieldMemOperand(last_match_info_elements, 4870 __ sw(a2, FieldMemOperand(last_match_info_elements,
4813 RegExpImpl::kLastCaptureCountOffset)); 4871 RegExpImpl::kLastCaptureCountOffset));
4814 // Store last subject and last input. 4872 // Store last subject and last input.
4815 __ mov(a3, last_match_info_elements); // Moved up to reduce latency.
4816 __ sw(subject, 4873 __ sw(subject,
4817 FieldMemOperand(last_match_info_elements, 4874 FieldMemOperand(last_match_info_elements,
4818 RegExpImpl::kLastSubjectOffset)); 4875 RegExpImpl::kLastSubjectOffset));
4819 __ RecordWrite(a3, Operand(RegExpImpl::kLastSubjectOffset), a2, t0); 4876 __ mov(a2, subject);
4877 __ RecordWriteField(last_match_info_elements,
4878 RegExpImpl::kLastSubjectOffset,
4879 a2,
4880 t3,
4881 kRAHasNotBeenSaved,
4882 kDontSaveFPRegs);
4820 __ sw(subject, 4883 __ sw(subject,
4821 FieldMemOperand(last_match_info_elements, 4884 FieldMemOperand(last_match_info_elements,
4822 RegExpImpl::kLastInputOffset)); 4885 RegExpImpl::kLastInputOffset));
4823 __ mov(a3, last_match_info_elements); 4886 __ RecordWriteField(last_match_info_elements,
4824 __ RecordWrite(a3, Operand(RegExpImpl::kLastInputOffset), a2, t0); 4887 RegExpImpl::kLastInputOffset,
4888 subject,
4889 t3,
4890 kRAHasNotBeenSaved,
4891 kDontSaveFPRegs);
4825 4892
4826 // Get the static offsets vector filled by the native regexp code. 4893 // Get the static offsets vector filled by the native regexp code.
4827 ExternalReference address_of_static_offsets_vector = 4894 ExternalReference address_of_static_offsets_vector =
4828 ExternalReference::address_of_static_offsets_vector(masm->isolate()); 4895 ExternalReference::address_of_static_offsets_vector(masm->isolate());
4829 __ li(a2, Operand(address_of_static_offsets_vector)); 4896 __ li(a2, Operand(address_of_static_offsets_vector));
4830 4897
4831 // a1: number of capture registers 4898 // a1: number of capture registers
4832 // a2: offsets vector 4899 // a2: offsets vector
4833 Label next_capture, done; 4900 Label next_capture, done;
4834 // Capture register counter starts from number of capture registers and 4901 // Capture register counter starts from number of capture registers and
(...skipping 2179 matching lines...) Expand 10 before | Expand all | Expand 10 after
7014 __ bind(&in_dictionary); 7081 __ bind(&in_dictionary);
7015 __ li(result, 1); 7082 __ li(result, 1);
7016 __ Ret(); 7083 __ Ret();
7017 7084
7018 __ bind(&not_in_dictionary); 7085 __ bind(&not_in_dictionary);
7019 __ mov(result, zero_reg); 7086 __ mov(result, zero_reg);
7020 __ Ret(); 7087 __ Ret();
7021 } 7088 }
7022 7089
7023 7090
7091 struct AheadOfTimeWriteBarrierStubList {
7092 Register object, value, address;
7093 RememberedSetAction action;
7094 };
7095
7096
7097 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
7098 // TODO(1696): Fill this in for MIPS.
7099 // Null termination.
7100 { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
7101 };
7102
7103
7104 bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() {
7105 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7106 !entry->object.is(no_reg);
7107 entry++) {
7108 if (object_.is(entry->object) &&
7109 value_.is(entry->value) &&
7110 address_.is(entry->address) &&
7111 remembered_set_action_ == entry->action &&
7112 save_fp_regs_mode_ == kDontSaveFPRegs) {
7113 return true;
7114 }
7115 }
7116 return true; // TODO(1696): Should be false.
7117 }
7118
7119
7120 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
7121 StoreBufferOverflowStub stub1(kDontSaveFPRegs);
7122 stub1.GetCode();
7123 StoreBufferOverflowStub stub2(kSaveFPRegs);
7124 stub2.GetCode();
7125 }
7126
7127
7128 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
7129 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
7130 !entry->object.is(no_reg);
7131 entry++) {
7132 RecordWriteStub stub(entry->object,
7133 entry->value,
7134 entry->address,
7135 entry->action,
7136 kDontSaveFPRegs);
7137 stub.GetCode();
7138 }
7139 }
7140
7141
7142 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
7143 // the value has just been written into the object, now this stub makes sure
7144 // we keep the GC informed. The word in the object where the value has been
7145 // written is in the address register.
7146 void RecordWriteStub::Generate(MacroAssembler* masm) {
7147 Label skip_to_incremental_noncompacting;
7148 Label skip_to_incremental_compacting;
7149
7150 // The first two branch+nop instructions are generated with labels so as to
7151 // get the offset fixed up correctly by the bind(Label*) call. We patch it
7152 // back and forth between a "bne zero_reg, zero_reg, ..." (a nop in this
7153 // position) and the "beq zero_reg, zero_reg, ..." when we start and stop
7154 // incremental heap marking.
7155 // See RecordWriteStub::Patch for details.
7156 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
7157 __ nop();
7158 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
7159 __ nop();
7160
7161 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
7162 __ RememberedSetHelper(
7163 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
7164 }
7165 __ Ret();
7166
7167 __ bind(&skip_to_incremental_noncompacting);
7168 GenerateIncremental(masm, INCREMENTAL);
7169
7170 __ bind(&skip_to_incremental_compacting);
7171 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
7172
7173 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
7174 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
7175
7176 PatchBranchIntoNop(masm, 0);
7177 PatchBranchIntoNop(masm, 2 * Assembler::kInstrSize);
7178 }
7179
7180
7181 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
7182 regs_.Save(masm);
7183
7184 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
7185 Label dont_need_remembered_set;
7186
7187 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
7188 __ JumpIfNotInNewSpace(regs_.scratch0(),
7189 regs_.scratch0(),
7190 &dont_need_remembered_set);
7191
7192 __ CheckPageFlag(regs_.object(),
7193 regs_.scratch0(),
7194 1 << MemoryChunk::SCAN_ON_SCAVENGE,
7195 ne,
7196 &dont_need_remembered_set);
7197
7198 // First notify the incremental marker if necessary, then update the
7199 // remembered set.
7200 CheckNeedsToInformIncrementalMarker(
7201 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
7202 InformIncrementalMarker(masm, mode);
7203 regs_.Restore(masm);
7204 __ RememberedSetHelper(
7205 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
7206
7207 __ bind(&dont_need_remembered_set);
7208 }
7209
7210 CheckNeedsToInformIncrementalMarker(
7211 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
7212 InformIncrementalMarker(masm, mode);
7213 regs_.Restore(masm);
7214 __ Ret();
7215 }
7216
7217
7218 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
7219 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
7220 int argument_count = 3;
7221 __ PrepareCallCFunction(argument_count, regs_.scratch0());
7222 Register address =
7223 a0.is(regs_.address()) ? regs_.scratch0() : regs_.address();
7224 ASSERT(!address.is(regs_.object()));
7225 ASSERT(!address.is(a0));
7226 __ Move(address, regs_.address());
7227 __ Move(a0, regs_.object());
7228 if (mode == INCREMENTAL_COMPACTION) {
7229 __ Move(a1, address);
7230 } else {
7231 ASSERT(mode == INCREMENTAL);
7232 __ lw(a1, MemOperand(address, 0));
7233 }
7234 __ li(a2, Operand(ExternalReference::isolate_address()));
7235
7236 AllowExternalCallThatCantCauseGC scope(masm);
7237 if (mode == INCREMENTAL_COMPACTION) {
7238 __ CallCFunction(
7239 ExternalReference::incremental_evacuation_record_write_function(
7240 masm->isolate()),
7241 argument_count);
7242 } else {
7243 ASSERT(mode == INCREMENTAL);
7244 __ CallCFunction(
7245 ExternalReference::incremental_marking_record_write_function(
7246 masm->isolate()),
7247 argument_count);
7248 }
7249 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
7250 }
7251
7252
7253 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
7254 MacroAssembler* masm,
7255 OnNoNeedToInformIncrementalMarker on_no_need,
7256 Mode mode) {
7257 Label on_black;
7258 Label need_incremental;
7259 Label need_incremental_pop_scratch;
7260
7261 // Let's look at the color of the object: If it is not black we don't have
7262 // to inform the incremental marker.
7263 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
7264
7265 regs_.Restore(masm);
7266 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7267 __ RememberedSetHelper(
7268 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
7269 } else {
7270 __ Ret();
7271 }
7272
7273 __ bind(&on_black);
7274
7275 // Get the value from the slot.
7276 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
7277
7278 if (mode == INCREMENTAL_COMPACTION) {
7279 Label ensure_not_white;
7280
7281 __ CheckPageFlag(regs_.scratch0(), // Contains value.
7282 regs_.scratch1(), // Scratch.
7283 MemoryChunk::kEvacuationCandidateMask,
7284 eq,
7285 &ensure_not_white);
7286
7287 __ CheckPageFlag(regs_.object(),
7288 regs_.scratch1(), // Scratch.
7289 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
7290 eq,
7291 &need_incremental);
7292
7293 __ bind(&ensure_not_white);
7294 }
7295
7296 // We need extra registers for this, so we push the object and the address
7297 // register temporarily.
7298 __ Push(regs_.object(), regs_.address());
7299 __ EnsureNotWhite(regs_.scratch0(), // The value.
7300 regs_.scratch1(), // Scratch.
7301 regs_.object(), // Scratch.
7302 regs_.address(), // Scratch.
7303 &need_incremental_pop_scratch);
7304 __ Pop(regs_.object(), regs_.address());
7305
7306 regs_.Restore(masm);
7307 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
7308 __ RememberedSetHelper(
7309 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
7310 } else {
7311 __ Ret();
7312 }
7313
7314 __ bind(&need_incremental_pop_scratch);
7315 __ Pop(regs_.object(), regs_.address());
7316
7317 __ bind(&need_incremental);
7318
7319 // Fall through when we need to inform the incremental marker.
7320 }
7321
7322
7024 #undef __ 7323 #undef __
7025 7324
7026 } } // namespace v8::internal 7325 } } // namespace v8::internal
7027 7326
7028 #endif // V8_TARGET_ARCH_MIPS 7327 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/codegen-mips.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698