OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 389 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
400 } | 400 } |
401 | 401 |
402 | 402 |
403 void MacroAssembler::StoreRoot(Register source, | 403 void MacroAssembler::StoreRoot(Register source, |
404 Heap::RootListIndex index, | 404 Heap::RootListIndex index, |
405 Condition cond) { | 405 Condition cond) { |
406 str(source, MemOperand(roots, index << kPointerSizeLog2), cond); | 406 str(source, MemOperand(roots, index << kPointerSizeLog2), cond); |
407 } | 407 } |
408 | 408 |
409 | 409 |
410 void MacroAssembler::RecordWriteHelper(Register object, | |
411 Register address, | |
412 Register scratch) { | |
413 if (emit_debug_code()) { | |
414 // Check that the object is not in new space. | |
415 Label not_in_new_space; | |
416 InNewSpace(object, scratch, ne, ¬_in_new_space); | |
417 Abort("new-space object passed to RecordWriteHelper"); | |
418 bind(¬_in_new_space); | |
419 } | |
420 | |
421 // Calculate page address. | |
422 Bfc(object, 0, kPageSizeBits); | |
423 | |
424 // Calculate region number. | |
425 Ubfx(address, address, Page::kRegionSizeLog2, | |
426 kPageSizeBits - Page::kRegionSizeLog2); | |
427 | |
428 // Mark region dirty. | |
429 ldr(scratch, MemOperand(object, Page::kDirtyFlagOffset)); | |
430 mov(ip, Operand(1)); | |
431 orr(scratch, scratch, Operand(ip, LSL, address)); | |
432 str(scratch, MemOperand(object, Page::kDirtyFlagOffset)); | |
433 } | |
434 | |
435 | |
436 void MacroAssembler::InNewSpace(Register object, | 410 void MacroAssembler::InNewSpace(Register object, |
437 Register scratch, | 411 Register scratch, |
438 Condition cond, | 412 Condition cond, |
439 Label* branch) { | 413 Label* branch) { |
440 ASSERT(cond == eq || cond == ne); | 414 ASSERT(cond == eq || cond == ne); |
441 and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate()))); | 415 and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate()))); |
442 cmp(scratch, Operand(ExternalReference::new_space_start(isolate()))); | 416 cmp(scratch, Operand(ExternalReference::new_space_start(isolate()))); |
443 b(cond, branch); | 417 b(cond, branch); |
444 } | 418 } |
445 | 419 |
446 | 420 |
447 // Will clobber 4 registers: object, offset, scratch, ip. The | 421 void MacroAssembler::RecordWriteField( |
448 // register 'object' contains a heap object pointer. The heap object | 422 Register object, |
449 // tag is shifted away. | 423 int offset, |
450 void MacroAssembler::RecordWrite(Register object, | 424 Register value, |
451 Operand offset, | 425 Register dst, |
452 Register scratch0, | 426 LinkRegisterStatus lr_status, |
453 Register scratch1) { | 427 SaveFPRegsMode save_fp, |
454 // The compiled code assumes that record write doesn't change the | 428 RememberedSetAction remembered_set_action, |
455 // context register, so we check that none of the clobbered | 429 SmiCheck smi_check) { |
456 // registers are cp. | 430 // First, check if a write barrier is even needed. The tests below |
457 ASSERT(!object.is(cp) && !scratch0.is(cp) && !scratch1.is(cp)); | 431 // catch stores of Smis. |
458 | |
459 Label done; | 432 Label done; |
460 | 433 |
461 // First, test that the object is not in the new space. We cannot set | 434 // Skip barrier if writing a smi. |
462 // region marks for new space pages. | 435 if (smi_check == INLINE_SMI_CHECK) { |
463 InNewSpace(object, scratch0, eq, &done); | 436 JumpIfSmi(value, &done); |
| 437 } |
464 | 438 |
465 // Add offset into the object. | 439 // Although the object register is tagged, the offset is relative to the start |
466 add(scratch0, object, offset); | 440 // of the object, so so offset must be a multiple of kPointerSize. |
| 441 ASSERT(IsAligned(offset, kPointerSize)); |
467 | 442 |
468 // Record the actual write. | 443 add(dst, object, Operand(offset - kHeapObjectTag)); |
469 RecordWriteHelper(object, scratch0, scratch1); | 444 if (emit_debug_code()) { |
| 445 Label ok; |
| 446 tst(dst, Operand((1 << kPointerSizeLog2) - 1)); |
| 447 b(eq, &ok); |
| 448 stop("Unaligned cell in write barrier"); |
| 449 bind(&ok); |
| 450 } |
| 451 |
| 452 RecordWrite(object, |
| 453 dst, |
| 454 value, |
| 455 lr_status, |
| 456 save_fp, |
| 457 remembered_set_action, |
| 458 OMIT_SMI_CHECK); |
470 | 459 |
471 bind(&done); | 460 bind(&done); |
472 | 461 |
473 // Clobber all input registers when running with the debug-code flag | 462 // Clobber clobbered input registers when running with the debug-code flag |
474 // turned on to provoke errors. | 463 // turned on to provoke errors. |
475 if (emit_debug_code()) { | 464 if (emit_debug_code()) { |
476 mov(object, Operand(BitCast<int32_t>(kZapValue))); | 465 mov(value, Operand(BitCast<int32_t>(kZapValue + 4))); |
477 mov(scratch0, Operand(BitCast<int32_t>(kZapValue))); | 466 mov(dst, Operand(BitCast<int32_t>(kZapValue + 8))); |
478 mov(scratch1, Operand(BitCast<int32_t>(kZapValue))); | |
479 } | 467 } |
480 } | 468 } |
481 | 469 |
482 | 470 |
483 // Will clobber 4 registers: object, address, scratch, ip. The | 471 // Will clobber 4 registers: object, address, scratch, ip. The |
484 // register 'object' contains a heap object pointer. The heap object | 472 // register 'object' contains a heap object pointer. The heap object |
485 // tag is shifted away. | 473 // tag is shifted away. |
486 void MacroAssembler::RecordWrite(Register object, | 474 void MacroAssembler::RecordWrite(Register object, |
487 Register address, | 475 Register address, |
488 Register scratch) { | 476 Register value, |
| 477 LinkRegisterStatus lr_status, |
| 478 SaveFPRegsMode fp_mode, |
| 479 RememberedSetAction remembered_set_action, |
| 480 SmiCheck smi_check) { |
489 // The compiled code assumes that record write doesn't change the | 481 // The compiled code assumes that record write doesn't change the |
490 // context register, so we check that none of the clobbered | 482 // context register, so we check that none of the clobbered |
491 // registers are cp. | 483 // registers are cp. |
492 ASSERT(!object.is(cp) && !address.is(cp) && !scratch.is(cp)); | 484 ASSERT(!address.is(cp) && !value.is(cp)); |
493 | 485 |
494 Label done; | 486 Label done; |
495 | 487 |
496 // First, test that the object is not in the new space. We cannot set | 488 if (smi_check == INLINE_SMI_CHECK) { |
497 // region marks for new space pages. | 489 ASSERT_EQ(0, kSmiTag); |
498 InNewSpace(object, scratch, eq, &done); | 490 tst(value, Operand(kSmiTagMask)); |
| 491 b(eq, &done); |
| 492 } |
| 493 |
| 494 CheckPageFlag(value, |
| 495 value, // Used as scratch. |
| 496 MemoryChunk::kPointersToHereAreInterestingMask, |
| 497 eq, |
| 498 &done); |
| 499 CheckPageFlag(object, |
| 500 value, // Used as scratch. |
| 501 MemoryChunk::kPointersFromHereAreInterestingMask, |
| 502 eq, |
| 503 &done); |
499 | 504 |
500 // Record the actual write. | 505 // Record the actual write. |
501 RecordWriteHelper(object, address, scratch); | 506 if (lr_status == kLRHasNotBeenSaved) { |
| 507 push(lr); |
| 508 } |
| 509 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode); |
| 510 CallStub(&stub); |
| 511 if (lr_status == kLRHasNotBeenSaved) { |
| 512 pop(lr); |
| 513 } |
502 | 514 |
503 bind(&done); | 515 bind(&done); |
504 | 516 |
505 // Clobber all input registers when running with the debug-code flag | 517 // Clobber clobbered registers when running with the debug-code flag |
506 // turned on to provoke errors. | 518 // turned on to provoke errors. |
507 if (emit_debug_code()) { | 519 if (emit_debug_code()) { |
508 mov(object, Operand(BitCast<int32_t>(kZapValue))); | 520 mov(address, Operand(BitCast<int32_t>(kZapValue + 12))); |
509 mov(address, Operand(BitCast<int32_t>(kZapValue))); | 521 mov(value, Operand(BitCast<int32_t>(kZapValue + 16))); |
510 mov(scratch, Operand(BitCast<int32_t>(kZapValue))); | 522 } |
| 523 } |
| 524 |
| 525 |
| 526 void MacroAssembler::RememberedSetHelper(Register address, |
| 527 Register scratch, |
| 528 SaveFPRegsMode fp_mode, |
| 529 RememberedSetFinalAction and_then) { |
| 530 Label done; |
| 531 // Load store buffer top. |
| 532 ExternalReference store_buffer = |
| 533 ExternalReference::store_buffer_top(isolate()); |
| 534 mov(ip, Operand(store_buffer)); |
| 535 ldr(scratch, MemOperand(ip)); |
| 536 // Store pointer to buffer and increment buffer top. |
| 537 str(address, MemOperand(scratch, kPointerSize, PostIndex)); |
| 538 // Write back new top of buffer. |
| 539 str(scratch, MemOperand(ip)); |
| 540 // Call stub on end of buffer. |
| 541 // Check for end of buffer. |
| 542 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit)); |
| 543 if (and_then == kFallThroughAtEnd) { |
| 544 b(eq, &done); |
| 545 } else { |
| 546 ASSERT(and_then == kReturnAtEnd); |
| 547 Ret(ne); |
| 548 } |
| 549 push(lr); |
| 550 StoreBufferOverflowStub store_buffer_overflow = |
| 551 StoreBufferOverflowStub(fp_mode); |
| 552 CallStub(&store_buffer_overflow); |
| 553 pop(lr); |
| 554 bind(&done); |
| 555 if (and_then == kReturnAtEnd) { |
| 556 Ret(); |
511 } | 557 } |
512 } | 558 } |
513 | 559 |
514 | 560 |
515 // Push and pop all registers that can hold pointers. | 561 // Push and pop all registers that can hold pointers. |
516 void MacroAssembler::PushSafepointRegisters() { | 562 void MacroAssembler::PushSafepointRegisters() { |
517 // Safepoints expect a block of contiguous register values starting with r0: | 563 // Safepoints expect a block of contiguous register values starting with r0: |
518 ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters); | 564 ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters); |
519 // Safepoints expect a block of kNumSafepointRegisters values on the | 565 // Safepoints expect a block of kNumSafepointRegisters values on the |
520 // stack, so adjust the stack for unsaved registers. | 566 // stack, so adjust the stack for unsaved registers. |
(...skipping 1907 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2428 | 2474 |
2429 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { | 2475 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { |
2430 CallRuntime(Runtime::FunctionForId(fid), num_arguments); | 2476 CallRuntime(Runtime::FunctionForId(fid), num_arguments); |
2431 } | 2477 } |
2432 | 2478 |
2433 | 2479 |
2434 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { | 2480 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { |
2435 const Runtime::Function* function = Runtime::FunctionForId(id); | 2481 const Runtime::Function* function = Runtime::FunctionForId(id); |
2436 mov(r0, Operand(function->nargs)); | 2482 mov(r0, Operand(function->nargs)); |
2437 mov(r1, Operand(ExternalReference(function, isolate()))); | 2483 mov(r1, Operand(ExternalReference(function, isolate()))); |
2438 CEntryStub stub(1); | 2484 CEntryStub stub(1, kSaveFPRegs); |
2439 stub.SaveDoubles(); | |
2440 CallStub(&stub); | 2485 CallStub(&stub); |
2441 } | 2486 } |
2442 | 2487 |
2443 | 2488 |
2444 void MacroAssembler::CallExternalReference(const ExternalReference& ext, | 2489 void MacroAssembler::CallExternalReference(const ExternalReference& ext, |
2445 int num_arguments) { | 2490 int num_arguments) { |
2446 mov(r0, Operand(num_arguments)); | 2491 mov(r0, Operand(num_arguments)); |
2447 mov(r1, Operand(ext)); | 2492 mov(r1, Operand(ext)); |
2448 | 2493 |
2449 CEntryStub stub(1); | 2494 CEntryStub stub(1); |
(...skipping 756 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3206 // Result was clobbered. Restore it. | 3251 // Result was clobbered. Restore it. |
3207 ldr(result, MemOperand(ldr_location)); | 3252 ldr(result, MemOperand(ldr_location)); |
3208 } | 3253 } |
3209 // Get the address of the constant. | 3254 // Get the address of the constant. |
3210 and_(result, result, Operand(kLdrOffsetMask)); | 3255 and_(result, result, Operand(kLdrOffsetMask)); |
3211 add(result, ldr_location, Operand(result)); | 3256 add(result, ldr_location, Operand(result)); |
3212 add(result, result, Operand(kPCRegOffset)); | 3257 add(result, result, Operand(kPCRegOffset)); |
3213 } | 3258 } |
3214 | 3259 |
3215 | 3260 |
| 3261 void MacroAssembler::CheckPageFlag( |
| 3262 Register object, |
| 3263 Register scratch, |
| 3264 int mask, |
| 3265 Condition cc, |
| 3266 Label* condition_met) { |
| 3267 and_(scratch, object, Operand(~Page::kPageAlignmentMask)); |
| 3268 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset)); |
| 3269 tst(scratch, Operand(mask)); |
| 3270 b(cc, condition_met); |
| 3271 } |
| 3272 |
| 3273 |
| 3274 void MacroAssembler::JumpIfBlack(Register object, |
| 3275 Register scratch0, |
| 3276 Register scratch1, |
| 3277 Label* on_black) { |
| 3278 HasColor(object, scratch0, scratch1, on_black, 1, 0); // kBlackBitPattern. |
| 3279 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); |
| 3280 } |
| 3281 |
| 3282 |
| 3283 void MacroAssembler::HasColor(Register object, |
| 3284 Register bitmap_scratch, |
| 3285 Register mask_scratch, |
| 3286 Label* has_color, |
| 3287 int first_bit, |
| 3288 int second_bit) { |
| 3289 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg)); |
| 3290 |
| 3291 GetMarkBits(object, bitmap_scratch, mask_scratch); |
| 3292 |
| 3293 Label other_color, word_boundary; |
| 3294 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); |
| 3295 tst(ip, Operand(mask_scratch)); |
| 3296 b(first_bit == 1 ? eq : ne, &other_color); |
| 3297 // Shift left 1 by adding. |
| 3298 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC); |
| 3299 b(eq, &word_boundary); |
| 3300 tst(ip, Operand(mask_scratch)); |
| 3301 b(second_bit == 1 ? ne : eq, has_color); |
| 3302 jmp(&other_color); |
| 3303 |
| 3304 bind(&word_boundary); |
| 3305 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize)); |
| 3306 tst(ip, Operand(1)); |
| 3307 b(second_bit == 1 ? ne : eq, has_color); |
| 3308 bind(&other_color); |
| 3309 } |
| 3310 |
| 3311 |
| 3312 // Detect some, but not all, common pointer-free objects. This is used by the |
| 3313 // incremental write barrier which doesn't care about oddballs (they are always |
| 3314 // marked black immediately so this code is not hit). |
| 3315 void MacroAssembler::JumpIfDataObject(Register value, |
| 3316 Register scratch, |
| 3317 Label* not_data_object) { |
| 3318 Label is_data_object; |
| 3319 ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset)); |
| 3320 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex); |
| 3321 b(eq, &is_data_object); |
| 3322 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1); |
| 3323 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80); |
| 3324 // If it's a string and it's not a cons string then it's an object containing |
| 3325 // no GC pointers. |
| 3326 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); |
| 3327 tst(scratch, Operand(kIsIndirectStringMask | kIsNotStringMask)); |
| 3328 b(ne, not_data_object); |
| 3329 bind(&is_data_object); |
| 3330 } |
| 3331 |
| 3332 |
| 3333 void MacroAssembler::GetMarkBits(Register addr_reg, |
| 3334 Register bitmap_reg, |
| 3335 Register mask_reg) { |
| 3336 ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg)); |
| 3337 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask)); |
| 3338 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2); |
| 3339 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2; |
| 3340 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits); |
| 3341 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2)); |
| 3342 mov(ip, Operand(1)); |
| 3343 mov(mask_reg, Operand(ip, LSL, mask_reg)); |
| 3344 } |
| 3345 |
| 3346 |
| 3347 void MacroAssembler::EnsureNotWhite( |
| 3348 Register value, |
| 3349 Register bitmap_scratch, |
| 3350 Register mask_scratch, |
| 3351 Register load_scratch, |
| 3352 Label* value_is_white_and_not_data) { |
| 3353 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ip)); |
| 3354 GetMarkBits(value, bitmap_scratch, mask_scratch); |
| 3355 |
| 3356 // If the value is black or grey we don't need to do anything. |
| 3357 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0); |
| 3358 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0); |
| 3359 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0); |
| 3360 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0); |
| 3361 |
| 3362 Label done; |
| 3363 |
| 3364 // Since both black and grey have a 1 in the first position and white does |
| 3365 // not have a 1 there we only need to check one bit. |
| 3366 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); |
| 3367 tst(mask_scratch, load_scratch); |
| 3368 b(ne, &done); |
| 3369 |
| 3370 if (FLAG_debug_code) { |
| 3371 // Check for impossible bit pattern. |
| 3372 Label ok; |
| 3373 // LSL may overflow, making the check conservative. |
| 3374 tst(load_scratch, Operand(mask_scratch, LSL, 1)); |
| 3375 b(eq, &ok); |
| 3376 stop("Impossible marking bit pattern"); |
| 3377 bind(&ok); |
| 3378 } |
| 3379 |
| 3380 // Value is white. We check whether it is data that doesn't need scanning. |
| 3381 // Currently only checks for HeapNumber and non-cons strings. |
| 3382 Register map = load_scratch; // Holds map while checking type. |
| 3383 Register length = load_scratch; // Holds length of object after testing type. |
| 3384 Label is_data_object; |
| 3385 |
| 3386 // Check for heap-number |
| 3387 ldr(map, FieldMemOperand(value, HeapObject::kMapOffset)); |
| 3388 CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
| 3389 mov(length, Operand(HeapNumber::kSize), LeaveCC, eq); |
| 3390 b(eq, &is_data_object); |
| 3391 |
| 3392 // Check for strings. |
| 3393 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1); |
| 3394 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80); |
| 3395 // If it's a string and it's not a cons string then it's an object containing |
| 3396 // no GC pointers. |
| 3397 Register instance_type = load_scratch; |
| 3398 ldrb(instance_type, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
| 3399 tst(instance_type, Operand(kIsIndirectStringMask | kIsNotStringMask)); |
| 3400 b(ne, value_is_white_and_not_data); |
| 3401 // It's a non-indirect (non-cons and non-slice) string. |
| 3402 // If it's external, the length is just ExternalString::kSize. |
| 3403 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2). |
| 3404 // External strings are the only ones with the kExternalStringTag bit |
| 3405 // set. |
| 3406 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag); |
| 3407 ASSERT_EQ(0, kConsStringTag & kExternalStringTag); |
| 3408 tst(instance_type, Operand(kExternalStringTag)); |
| 3409 mov(length, Operand(ExternalString::kSize), LeaveCC, ne); |
| 3410 b(ne, &is_data_object); |
| 3411 |
| 3412 // Sequential string, either ASCII or UC16. |
| 3413 // For ASCII (char-size of 1) we shift the smi tag away to get the length. |
| 3414 // For UC16 (char-size of 2) we just leave the smi tag in place, thereby |
| 3415 // getting the length multiplied by 2. |
| 3416 ASSERT(kAsciiStringTag == 4 && kStringEncodingMask == 4); |
| 3417 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
| 3418 ldr(ip, FieldMemOperand(value, String::kLengthOffset)); |
| 3419 tst(instance_type, Operand(kStringEncodingMask)); |
| 3420 mov(ip, Operand(ip, LSR, 1), LeaveCC, ne); |
| 3421 add(length, ip, Operand(SeqString::kHeaderSize + kObjectAlignmentMask)); |
| 3422 and_(length, length, Operand(~kObjectAlignmentMask)); |
| 3423 |
| 3424 bind(&is_data_object); |
| 3425 // Value is a data object, and it is white. Mark it black. Since we know |
| 3426 // that the object is white we can make it black by flipping one bit. |
| 3427 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); |
| 3428 orr(ip, ip, Operand(mask_scratch)); |
| 3429 str(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize)); |
| 3430 |
| 3431 and_(bitmap_scratch, bitmap_scratch, Operand(~Page::kPageAlignmentMask)); |
| 3432 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); |
| 3433 add(ip, ip, Operand(length)); |
| 3434 str(ip, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset)); |
| 3435 |
| 3436 bind(&done); |
| 3437 } |
| 3438 |
| 3439 |
3216 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { | 3440 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { |
3217 Usat(output_reg, 8, Operand(input_reg)); | 3441 Usat(output_reg, 8, Operand(input_reg)); |
3218 } | 3442 } |
3219 | 3443 |
3220 | 3444 |
3221 void MacroAssembler::ClampDoubleToUint8(Register result_reg, | 3445 void MacroAssembler::ClampDoubleToUint8(Register result_reg, |
3222 DoubleRegister input_reg, | 3446 DoubleRegister input_reg, |
3223 DoubleRegister temp_double_reg) { | 3447 DoubleRegister temp_double_reg) { |
3224 Label above_zero; | 3448 Label above_zero; |
3225 Label done; | 3449 Label done; |
(...skipping 29 matching lines...) Expand all Loading... |
3255 Register descriptors) { | 3479 Register descriptors) { |
3256 ldr(descriptors, | 3480 ldr(descriptors, |
3257 FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); | 3481 FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); |
3258 Label not_smi; | 3482 Label not_smi; |
3259 JumpIfNotSmi(descriptors, ¬_smi); | 3483 JumpIfNotSmi(descriptors, ¬_smi); |
3260 mov(descriptors, Operand(FACTORY->empty_descriptor_array())); | 3484 mov(descriptors, Operand(FACTORY->empty_descriptor_array())); |
3261 bind(¬_smi); | 3485 bind(¬_smi); |
3262 } | 3486 } |
3263 | 3487 |
3264 | 3488 |
| 3489 bool AreAliased(Register r1, Register r2, Register r3, Register r4) { |
| 3490 if (r1.is(r2)) return true; |
| 3491 if (r1.is(r3)) return true; |
| 3492 if (r1.is(r4)) return true; |
| 3493 if (r2.is(r3)) return true; |
| 3494 if (r2.is(r4)) return true; |
| 3495 if (r3.is(r4)) return true; |
| 3496 return false; |
| 3497 } |
| 3498 |
| 3499 |
3265 CodePatcher::CodePatcher(byte* address, int instructions) | 3500 CodePatcher::CodePatcher(byte* address, int instructions) |
3266 : address_(address), | 3501 : address_(address), |
3267 instructions_(instructions), | 3502 instructions_(instructions), |
3268 size_(instructions * Assembler::kInstrSize), | 3503 size_(instructions * Assembler::kInstrSize), |
3269 masm_(Isolate::Current(), address, size_ + Assembler::kGap) { | 3504 masm_(Isolate::Current(), address, size_ + Assembler::kGap) { |
3270 // Create a new macro assembler pointing to the address of the code to patch. | 3505 // Create a new macro assembler pointing to the address of the code to patch. |
3271 // The size is adjusted with kGap on order for the assembler to generate size | 3506 // The size is adjusted with kGap on order for the assembler to generate size |
3272 // bytes of instructions without failing with buffer size constraints. | 3507 // bytes of instructions without failing with buffer size constraints. |
3273 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); | 3508 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); |
3274 } | 3509 } |
(...skipping 22 matching lines...) Expand all Loading... |
3297 void CodePatcher::EmitCondition(Condition cond) { | 3532 void CodePatcher::EmitCondition(Condition cond) { |
3298 Instr instr = Assembler::instr_at(masm_.pc_); | 3533 Instr instr = Assembler::instr_at(masm_.pc_); |
3299 instr = (instr & ~kCondMask) | cond; | 3534 instr = (instr & ~kCondMask) | cond; |
3300 masm_.emit(instr); | 3535 masm_.emit(instr); |
3301 } | 3536 } |
3302 | 3537 |
3303 | 3538 |
3304 } } // namespace v8::internal | 3539 } } // namespace v8::internal |
3305 | 3540 |
3306 #endif // V8_TARGET_ARCH_ARM | 3541 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |