Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 291 __ bind(&true_result); | 291 __ bind(&true_result); |
| 292 __ Set(rax, 1); | 292 __ Set(rax, 1); |
| 293 __ ret(1 * kPointerSize); | 293 __ ret(1 * kPointerSize); |
| 294 __ bind(&false_result); | 294 __ bind(&false_result); |
| 295 __ Set(rax, 0); | 295 __ Set(rax, 0); |
| 296 __ ret(1 * kPointerSize); | 296 __ ret(1 * kPointerSize); |
| 297 } | 297 } |
| 298 | 298 |
| 299 | 299 |
| 300 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 300 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 301 // We don't allow a GC during a store buffer overflow so there is no need to | 301 __ PushCallerSaved(save_doubles_); |
| 302 // store the registers in any particular way, but we do have to store and | |
| 303 // restore them. | |
| 304 Register saved_regs[] = | |
| 305 { rax, rcx, rdx, rbx, rbp, rsi, rdi, r8, r9, r10, r11 }; | |
| 306 const int kNumberOfSavedRegs = sizeof(saved_regs) / sizeof(Register); | |
| 307 for (int i = 0; i < kNumberOfSavedRegs; i++) { | |
| 308 __ push(saved_regs[i]); | |
| 309 } | |
| 310 // R12 to r15 are callee save on all platforms. | |
| 311 if (save_doubles_ == kSaveFPRegs) { | |
| 312 CpuFeatures::Scope scope(SSE2); | |
| 313 __ subq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); | |
| 314 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { | |
| 315 XMMRegister reg = XMMRegister::from_code(i); | |
| 316 __ movsd(Operand(rsp, i * kDoubleSize), reg); | |
| 317 } | |
| 318 } | |
| 319 const int argument_count = 1; | 302 const int argument_count = 1; |
| 320 __ PrepareCallCFunction(argument_count); | 303 __ PrepareCallCFunction(argument_count); |
| 321 #ifdef _WIN64 | 304 #ifdef _WIN64 |
| 322 __ LoadAddress(rcx, ExternalReference::isolate_address()); | 305 __ LoadAddress(rcx, ExternalReference::isolate_address()); |
| 323 #else | 306 #else |
| 324 __ LoadAddress(rdi, ExternalReference::isolate_address()); | 307 __ LoadAddress(rdi, ExternalReference::isolate_address()); |
| 325 #endif | 308 #endif |
| 326 __ CallCFunction( | 309 __ CallCFunction( |
| 327 ExternalReference::store_buffer_overflow_function(masm->isolate()), | 310 ExternalReference::store_buffer_overflow_function(masm->isolate()), |
| 328 argument_count); | 311 argument_count); |
| 329 if (save_doubles_ == kSaveFPRegs) { | 312 __ PopCallerSaved(save_doubles_); |
| 330 CpuFeatures::Scope scope(SSE2); | |
| 331 for (int i = 0; i < XMMRegister::kNumRegisters; i++) { | |
| 332 XMMRegister reg = XMMRegister::from_code(i); | |
| 333 __ movsd(reg, Operand(rsp, i * kDoubleSize)); | |
| 334 } | |
| 335 __ addq(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters)); | |
| 336 } | |
| 337 for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) { | |
| 338 __ pop(saved_regs[i]); | |
| 339 } | |
| 340 __ ret(0); | 313 __ ret(0); |
| 341 } | 314 } |
| 342 | 315 |
| 343 | 316 |
| 344 class FloatingPointHelper : public AllStatic { | 317 class FloatingPointHelper : public AllStatic { |
| 345 public: | 318 public: |
| 346 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles. | 319 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles. |
| 347 // If the operands are not both numbers, jump to not_numbers. | 320 // If the operands are not both numbers, jump to not_numbers. |
| 348 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis. | 321 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis. |
| 349 // NumberOperands assumes both are smis or heap numbers. | 322 // NumberOperands assumes both are smis or heap numbers. |
| (...skipping 4849 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5199 } | 5172 } |
| 5200 | 5173 |
| 5201 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | 5174 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
| 5202 __ RememberedSetHelper( | 5175 __ RememberedSetHelper( |
| 5203 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); | 5176 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); |
| 5204 } else { | 5177 } else { |
| 5205 __ ret(0); | 5178 __ ret(0); |
| 5206 } | 5179 } |
| 5207 | 5180 |
| 5208 __ bind(&skip_non_incremental_part); | 5181 __ bind(&skip_non_incremental_part); |
| 5209 __ int3(); | 5182 GenerateIncremental(masm); |
| 5183 } | |
| 5184 | |
| 5185 | |
| 5186 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm) { | |
| 5187 regs_.Save(masm); | |
| 5188 | |
| 5189 if (remembered_set_action_ == EMIT_REMEMBERED_SET) { | |
| 5190 Label dont_need_remembered_set; | |
| 5191 | |
| 5192 __ movq(regs_.scratch0(), Operand(regs_.address(), 0)); | |
| 5193 __ JumpIfNotInNewSpace(regs_.scratch0(), | |
| 5194 regs_.scratch0(), | |
| 5195 &dont_need_remembered_set); | |
| 5196 | |
| 5197 __ CheckPageFlag(regs_.object(), | |
| 5198 regs_.scratch0(), | |
| 5199 MemoryChunk::SCAN_ON_SCAVENGE, | |
| 5200 not_zero, | |
| 5201 &dont_need_remembered_set); | |
| 5202 | |
| 5203 // First notify the incremental marker if necessary, then update the | |
| 5204 // remembered set. | |
| 5205 CheckNeedsToInformIncrementalMarker( | |
| 5206 masm, kRememberedSetOnNoNeedToInformIncrementalMarker); | |
| 5207 InformIncrementalMarker(masm); | |
| 5208 regs_.Restore(masm); | |
| 5209 __ RememberedSetHelper( | |
| 5210 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); | |
| 5211 | |
| 5212 __ bind(&dont_need_remembered_set); | |
| 5213 } | |
| 5214 | |
| 5215 CheckNeedsToInformIncrementalMarker( | |
| 5216 masm, kReturnOnNoNeedToInformIncrementalMarker); | |
| 5217 InformIncrementalMarker(masm); | |
| 5218 regs_.Restore(masm); | |
| 5219 __ ret(0); | |
| 5220 } | |
| 5221 | |
| 5222 | |
| 5223 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { | |
| 5224 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); | |
| 5225 #ifdef _WIN64 | |
| 5226 Register arg3 = r8; | |
| 5227 Register arg2 = rdx; | |
| 5228 Register arg1 = rcx; | |
| 5229 #else | |
| 5230 Register arg3 = rdx; | |
| 5231 Register arg2 = rsi; | |
| 5232 Register arg1 = rdi; | |
| 5233 #endif | |
| 5234 bool save_address = arg1.is(regs_.address()); | |
| 5235 if (save_address) __ push(regs_.address()); | |
|
Lasse Reichstein
2011/06/10 13:55:44
Put braces around then-block.
Don't trust that __
Erik Corry
2011/06/10 21:57:29
Done.
| |
| 5236 __ Move(arg1, regs_.object()); | |
| 5237 if (save_address) { | |
| 5238 __ pop(arg2); | |
| 5239 __ movq(arg2, Operand(arg2, 0)); | |
| 5240 } else { | |
| 5241 __ movq(arg2, Operand(regs_.address(), 0)); | |
| 5242 } | |
| 5243 __ LoadAddress(arg3, ExternalReference::isolate_address()); | |
| 5244 // TODO(gc): Create a fast version of this C function that does not duplicate | |
| 5245 // the checks done in the stub. | |
| 5246 int argument_count = 3; | |
| 5247 __ PrepareCallCFunction(argument_count); | |
| 5248 __ CallCFunction( | |
| 5249 ExternalReference::incremental_marking_record_write_function( | |
| 5250 masm->isolate()), | |
| 5251 argument_count); | |
| 5252 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); | |
| 5253 } | |
| 5254 | |
| 5255 | |
| 5256 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( | |
| 5257 MacroAssembler* masm, | |
| 5258 RecordWriteStub::OnNoNeedToInformIncrementalMarker on_no_need) { | |
| 5259 Label object_is_black; | |
| 5260 | |
| 5261 // Let's look at the color of the object: If it is not black we don't have | |
| 5262 // to inform the incremental marker. | |
| 5263 __ IsBlack(regs_.object(), | |
| 5264 regs_.scratch0(), | |
| 5265 regs_.scratch1(), | |
| 5266 &object_is_black, | |
| 5267 Label::kNear); | |
| 5268 | |
| 5269 regs_.Restore(masm); | |
| 5270 if (on_no_need == kRememberedSetOnNoNeedToInformIncrementalMarker) { | |
| 5271 __ RememberedSetHelper( | |
| 5272 address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd); | |
| 5273 } else { | |
| 5274 __ ret(0); | |
| 5275 } | |
| 5276 | |
| 5277 __ bind(&object_is_black); | |
| 5278 | |
| 5279 // TODO(gc): Add call to EnsureNotWhite here. | |
| 5280 // Fall through when we need to inform the incremental marker. | |
| 5210 } | 5281 } |
| 5211 | 5282 |
| 5212 | 5283 |
| 5213 #undef __ | 5284 #undef __ |
| 5214 | 5285 |
| 5215 } } // namespace v8::internal | 5286 } } // namespace v8::internal |
| 5216 | 5287 |
| 5217 #endif // V8_TARGET_ARCH_X64 | 5288 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |