Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/arm/macro-assembler-arm.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/arm/regexp-macro-assembler-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 24 matching lines...) Expand all
35 #include "codegen.h" 35 #include "codegen.h"
36 #include "debug.h" 36 #include "debug.h"
37 #include "runtime.h" 37 #include "runtime.h"
38 38
39 namespace v8 { 39 namespace v8 {
40 namespace internal { 40 namespace internal {
41 41
42 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size) 42 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
43 : Assembler(arg_isolate, buffer, size), 43 : Assembler(arg_isolate, buffer, size),
44 generating_stub_(false), 44 generating_stub_(false),
45 allow_stub_calls_(true) { 45 allow_stub_calls_(true),
46 has_frame_(false) {
46 if (isolate() != NULL) { 47 if (isolate() != NULL) {
47 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), 48 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
48 isolate()); 49 isolate());
49 } 50 }
50 } 51 }
51 52
52 53
53 // We always generate arm code, never thumb code, even if V8 is compiled to 54 // We always generate arm code, never thumb code, even if V8 is compiled to
54 // thumb, so we require inter-working support 55 // thumb, so we require inter-working support
55 #if defined(__thumb__) && !defined(USE_THUMB_INTERWORK) 56 #if defined(__thumb__) && !defined(USE_THUMB_INTERWORK)
(...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after
399 } 400 }
400 401
401 402
402 void MacroAssembler::StoreRoot(Register source, 403 void MacroAssembler::StoreRoot(Register source,
403 Heap::RootListIndex index, 404 Heap::RootListIndex index,
404 Condition cond) { 405 Condition cond) {
405 str(source, MemOperand(roots, index << kPointerSizeLog2), cond); 406 str(source, MemOperand(roots, index << kPointerSizeLog2), cond);
406 } 407 }
407 408
408 409
409 void MacroAssembler::RecordWriteHelper(Register object,
410 Register address,
411 Register scratch) {
412 if (emit_debug_code()) {
413 // Check that the object is not in new space.
414 Label not_in_new_space;
415 InNewSpace(object, scratch, ne, &not_in_new_space);
416 Abort("new-space object passed to RecordWriteHelper");
417 bind(&not_in_new_space);
418 }
419
420 // Calculate page address.
421 Bfc(object, 0, kPageSizeBits);
422
423 // Calculate region number.
424 Ubfx(address, address, Page::kRegionSizeLog2,
425 kPageSizeBits - Page::kRegionSizeLog2);
426
427 // Mark region dirty.
428 ldr(scratch, MemOperand(object, Page::kDirtyFlagOffset));
429 mov(ip, Operand(1));
430 orr(scratch, scratch, Operand(ip, LSL, address));
431 str(scratch, MemOperand(object, Page::kDirtyFlagOffset));
432 }
433
434
435 void MacroAssembler::InNewSpace(Register object, 410 void MacroAssembler::InNewSpace(Register object,
436 Register scratch, 411 Register scratch,
437 Condition cond, 412 Condition cond,
438 Label* branch) { 413 Label* branch) {
439 ASSERT(cond == eq || cond == ne); 414 ASSERT(cond == eq || cond == ne);
440 and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate()))); 415 and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
441 cmp(scratch, Operand(ExternalReference::new_space_start(isolate()))); 416 cmp(scratch, Operand(ExternalReference::new_space_start(isolate())));
442 b(cond, branch); 417 b(cond, branch);
443 } 418 }
444 419
445 420
446 // Will clobber 4 registers: object, offset, scratch, ip. The 421 void MacroAssembler::RecordWriteField(
447 // register 'object' contains a heap object pointer. The heap object 422 Register object,
448 // tag is shifted away. 423 int offset,
449 void MacroAssembler::RecordWrite(Register object, 424 Register value,
450 Operand offset, 425 Register dst,
451 Register scratch0, 426 LinkRegisterStatus lr_status,
452 Register scratch1) { 427 SaveFPRegsMode save_fp,
453 // The compiled code assumes that record write doesn't change the 428 RememberedSetAction remembered_set_action,
454 // context register, so we check that none of the clobbered 429 SmiCheck smi_check) {
455 // registers are cp. 430 // First, check if a write barrier is even needed. The tests below
456 ASSERT(!object.is(cp) && !scratch0.is(cp) && !scratch1.is(cp)); 431 // catch stores of Smis.
457
458 Label done; 432 Label done;
459 433
460 // First, test that the object is not in the new space. We cannot set 434 // Skip barrier if writing a smi.
461 // region marks for new space pages. 435 if (smi_check == INLINE_SMI_CHECK) {
462 InNewSpace(object, scratch0, eq, &done); 436 JumpIfSmi(value, &done);
437 }
463 438
464 // Add offset into the object. 439 // Although the object register is tagged, the offset is relative to the start
465 add(scratch0, object, offset); 440 // of the object, so so offset must be a multiple of kPointerSize.
441 ASSERT(IsAligned(offset, kPointerSize));
466 442
467 // Record the actual write. 443 add(dst, object, Operand(offset - kHeapObjectTag));
468 RecordWriteHelper(object, scratch0, scratch1); 444 if (emit_debug_code()) {
445 Label ok;
446 tst(dst, Operand((1 << kPointerSizeLog2) - 1));
447 b(eq, &ok);
448 stop("Unaligned cell in write barrier");
449 bind(&ok);
450 }
451
452 RecordWrite(object,
453 dst,
454 value,
455 lr_status,
456 save_fp,
457 remembered_set_action,
458 OMIT_SMI_CHECK);
469 459
470 bind(&done); 460 bind(&done);
471 461
472 // Clobber all input registers when running with the debug-code flag 462 // Clobber clobbered input registers when running with the debug-code flag
473 // turned on to provoke errors. 463 // turned on to provoke errors.
474 if (emit_debug_code()) { 464 if (emit_debug_code()) {
475 mov(object, Operand(BitCast<int32_t>(kZapValue))); 465 mov(value, Operand(BitCast<int32_t>(kZapValue + 4)));
476 mov(scratch0, Operand(BitCast<int32_t>(kZapValue))); 466 mov(dst, Operand(BitCast<int32_t>(kZapValue + 8)));
477 mov(scratch1, Operand(BitCast<int32_t>(kZapValue)));
478 } 467 }
479 } 468 }
480 469
481 470
482 // Will clobber 4 registers: object, address, scratch, ip. The 471 // Will clobber 4 registers: object, address, scratch, ip. The
483 // register 'object' contains a heap object pointer. The heap object 472 // register 'object' contains a heap object pointer. The heap object
484 // tag is shifted away. 473 // tag is shifted away.
485 void MacroAssembler::RecordWrite(Register object, 474 void MacroAssembler::RecordWrite(Register object,
486 Register address, 475 Register address,
487 Register scratch) { 476 Register value,
477 LinkRegisterStatus lr_status,
478 SaveFPRegsMode fp_mode,
479 RememberedSetAction remembered_set_action,
480 SmiCheck smi_check) {
488 // The compiled code assumes that record write doesn't change the 481 // The compiled code assumes that record write doesn't change the
489 // context register, so we check that none of the clobbered 482 // context register, so we check that none of the clobbered
490 // registers are cp. 483 // registers are cp.
491 ASSERT(!object.is(cp) && !address.is(cp) && !scratch.is(cp)); 484 ASSERT(!address.is(cp) && !value.is(cp));
492 485
493 Label done; 486 Label done;
494 487
495 // First, test that the object is not in the new space. We cannot set 488 if (smi_check == INLINE_SMI_CHECK) {
496 // region marks for new space pages. 489 ASSERT_EQ(0, kSmiTag);
497 InNewSpace(object, scratch, eq, &done); 490 tst(value, Operand(kSmiTagMask));
491 b(eq, &done);
492 }
493
494 CheckPageFlag(value,
495 value, // Used as scratch.
496 MemoryChunk::kPointersToHereAreInterestingMask,
497 eq,
498 &done);
499 CheckPageFlag(object,
500 value, // Used as scratch.
501 MemoryChunk::kPointersFromHereAreInterestingMask,
502 eq,
503 &done);
498 504
499 // Record the actual write. 505 // Record the actual write.
500 RecordWriteHelper(object, address, scratch); 506 if (lr_status == kLRHasNotBeenSaved) {
507 push(lr);
508 }
509 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
510 CallStub(&stub);
511 if (lr_status == kLRHasNotBeenSaved) {
512 pop(lr);
513 }
501 514
502 bind(&done); 515 bind(&done);
503 516
504 // Clobber all input registers when running with the debug-code flag 517 // Clobber clobbered registers when running with the debug-code flag
505 // turned on to provoke errors. 518 // turned on to provoke errors.
506 if (emit_debug_code()) { 519 if (emit_debug_code()) {
507 mov(object, Operand(BitCast<int32_t>(kZapValue))); 520 mov(address, Operand(BitCast<int32_t>(kZapValue + 12)));
508 mov(address, Operand(BitCast<int32_t>(kZapValue))); 521 mov(value, Operand(BitCast<int32_t>(kZapValue + 16)));
509 mov(scratch, Operand(BitCast<int32_t>(kZapValue))); 522 }
523 }
524
525
526 void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
527 Register address,
528 Register scratch,
529 SaveFPRegsMode fp_mode,
530 RememberedSetFinalAction and_then) {
531 Label done;
532 if (FLAG_debug_code) {
533 Label ok;
534 JumpIfNotInNewSpace(object, scratch, &ok);
535 stop("Remembered set pointer is in new space");
536 bind(&ok);
537 }
538 // Load store buffer top.
539 ExternalReference store_buffer =
540 ExternalReference::store_buffer_top(isolate());
541 mov(ip, Operand(store_buffer));
542 ldr(scratch, MemOperand(ip));
543 // Store pointer to buffer and increment buffer top.
544 str(address, MemOperand(scratch, kPointerSize, PostIndex));
545 // Write back new top of buffer.
546 str(scratch, MemOperand(ip));
547 // Call stub on end of buffer.
548 // Check for end of buffer.
549 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
550 if (and_then == kFallThroughAtEnd) {
551 b(eq, &done);
552 } else {
553 ASSERT(and_then == kReturnAtEnd);
554 Ret(ne);
555 }
556 push(lr);
557 StoreBufferOverflowStub store_buffer_overflow =
558 StoreBufferOverflowStub(fp_mode);
559 CallStub(&store_buffer_overflow);
560 pop(lr);
561 bind(&done);
562 if (and_then == kReturnAtEnd) {
563 Ret();
510 } 564 }
511 } 565 }
512 566
513 567
514 // Push and pop all registers that can hold pointers. 568 // Push and pop all registers that can hold pointers.
515 void MacroAssembler::PushSafepointRegisters() { 569 void MacroAssembler::PushSafepointRegisters() {
516 // Safepoints expect a block of contiguous register values starting with r0: 570 // Safepoints expect a block of contiguous register values starting with r0:
517 ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters); 571 ASSERT(((1 << kNumSafepointSavedRegisters) - 1) == kSafepointSavedRegisters);
518 // Safepoints expect a block of kNumSafepointRegisters values on the 572 // Safepoints expect a block of kNumSafepointRegisters values on the
519 // stack, so adjust the stack for unsaved registers. 573 // stack, so adjust the stack for unsaved registers.
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after
954 } 1008 }
955 } 1009 }
956 1010
957 1011
958 void MacroAssembler::InvokeCode(Register code, 1012 void MacroAssembler::InvokeCode(Register code,
959 const ParameterCount& expected, 1013 const ParameterCount& expected,
960 const ParameterCount& actual, 1014 const ParameterCount& actual,
961 InvokeFlag flag, 1015 InvokeFlag flag,
962 const CallWrapper& call_wrapper, 1016 const CallWrapper& call_wrapper,
963 CallKind call_kind) { 1017 CallKind call_kind) {
1018 // You can't call a function without a valid frame.
1019 ASSERT(flag == JUMP_FUNCTION || has_frame());
1020
964 Label done; 1021 Label done;
965 1022
966 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag, 1023 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
967 call_wrapper, call_kind); 1024 call_wrapper, call_kind);
968 if (flag == CALL_FUNCTION) { 1025 if (flag == CALL_FUNCTION) {
969 call_wrapper.BeforeCall(CallSize(code)); 1026 call_wrapper.BeforeCall(CallSize(code));
970 SetCallKind(r5, call_kind); 1027 SetCallKind(r5, call_kind);
971 Call(code); 1028 Call(code);
972 call_wrapper.AfterCall(); 1029 call_wrapper.AfterCall();
973 } else { 1030 } else {
974 ASSERT(flag == JUMP_FUNCTION); 1031 ASSERT(flag == JUMP_FUNCTION);
975 SetCallKind(r5, call_kind); 1032 SetCallKind(r5, call_kind);
976 Jump(code); 1033 Jump(code);
977 } 1034 }
978 1035
979 // Continue here if InvokePrologue does handle the invocation due to 1036 // Continue here if InvokePrologue does handle the invocation due to
980 // mismatched parameter counts. 1037 // mismatched parameter counts.
981 bind(&done); 1038 bind(&done);
982 } 1039 }
983 1040
984 1041
985 void MacroAssembler::InvokeCode(Handle<Code> code, 1042 void MacroAssembler::InvokeCode(Handle<Code> code,
986 const ParameterCount& expected, 1043 const ParameterCount& expected,
987 const ParameterCount& actual, 1044 const ParameterCount& actual,
988 RelocInfo::Mode rmode, 1045 RelocInfo::Mode rmode,
989 InvokeFlag flag, 1046 InvokeFlag flag,
990 CallKind call_kind) { 1047 CallKind call_kind) {
1048 // You can't call a function without a valid frame.
1049 ASSERT(flag == JUMP_FUNCTION || has_frame());
1050
991 Label done; 1051 Label done;
992 1052
993 InvokePrologue(expected, actual, code, no_reg, &done, flag, 1053 InvokePrologue(expected, actual, code, no_reg, &done, flag,
994 NullCallWrapper(), call_kind); 1054 NullCallWrapper(), call_kind);
995 if (flag == CALL_FUNCTION) { 1055 if (flag == CALL_FUNCTION) {
996 SetCallKind(r5, call_kind); 1056 SetCallKind(r5, call_kind);
997 Call(code, rmode); 1057 Call(code, rmode);
998 } else { 1058 } else {
999 SetCallKind(r5, call_kind); 1059 SetCallKind(r5, call_kind);
1000 Jump(code, rmode); 1060 Jump(code, rmode);
1001 } 1061 }
1002 1062
1003 // Continue here if InvokePrologue does handle the invocation due to 1063 // Continue here if InvokePrologue does handle the invocation due to
1004 // mismatched parameter counts. 1064 // mismatched parameter counts.
1005 bind(&done); 1065 bind(&done);
1006 } 1066 }
1007 1067
1008 1068
1009 void MacroAssembler::InvokeFunction(Register fun, 1069 void MacroAssembler::InvokeFunction(Register fun,
1010 const ParameterCount& actual, 1070 const ParameterCount& actual,
1011 InvokeFlag flag, 1071 InvokeFlag flag,
1012 const CallWrapper& call_wrapper, 1072 const CallWrapper& call_wrapper,
1013 CallKind call_kind) { 1073 CallKind call_kind) {
1074 // You can't call a function without a valid frame.
1075 ASSERT(flag == JUMP_FUNCTION || has_frame());
1076
1014 // Contract with called JS functions requires that function is passed in r1. 1077 // Contract with called JS functions requires that function is passed in r1.
1015 ASSERT(fun.is(r1)); 1078 ASSERT(fun.is(r1));
1016 1079
1017 Register expected_reg = r2; 1080 Register expected_reg = r2;
1018 Register code_reg = r3; 1081 Register code_reg = r3;
1019 1082
1020 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); 1083 ldr(code_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1021 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1084 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1022 ldr(expected_reg, 1085 ldr(expected_reg,
1023 FieldMemOperand(code_reg, 1086 FieldMemOperand(code_reg,
1024 SharedFunctionInfo::kFormalParameterCountOffset)); 1087 SharedFunctionInfo::kFormalParameterCountOffset));
1025 mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize)); 1088 mov(expected_reg, Operand(expected_reg, ASR, kSmiTagSize));
1026 ldr(code_reg, 1089 ldr(code_reg,
1027 FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); 1090 FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1028 1091
1029 ParameterCount expected(expected_reg); 1092 ParameterCount expected(expected_reg);
1030 InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind); 1093 InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind);
1031 } 1094 }
1032 1095
1033 1096
1034 void MacroAssembler::InvokeFunction(JSFunction* function, 1097 void MacroAssembler::InvokeFunction(JSFunction* function,
1035 const ParameterCount& actual, 1098 const ParameterCount& actual,
1036 InvokeFlag flag, 1099 InvokeFlag flag,
1037 CallKind call_kind) { 1100 CallKind call_kind) {
1101 // You can't call a function without a valid frame.
1102 ASSERT(flag == JUMP_FUNCTION || has_frame());
1103
1038 ASSERT(function->is_compiled()); 1104 ASSERT(function->is_compiled());
1039 1105
1040 // Get the function and setup the context. 1106 // Get the function and setup the context.
1041 mov(r1, Operand(Handle<JSFunction>(function))); 1107 mov(r1, Operand(Handle<JSFunction>(function)));
1042 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); 1108 ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1043 1109
1044 // Invoke the cached code. 1110 // Invoke the cached code.
1045 Handle<Code> code(function->code()); 1111 Handle<Code> code(function->code());
1046 ParameterCount expected(function->shared()->formal_parameter_count()); 1112 ParameterCount expected(function->shared()->formal_parameter_count());
1047 if (V8::UseCrankshaft()) { 1113 if (V8::UseCrankshaft()) {
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1083 1149
1084 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); 1150 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
1085 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 1151 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1086 tst(scratch, Operand(kIsNotStringMask)); 1152 tst(scratch, Operand(kIsNotStringMask));
1087 b(ne, fail); 1153 b(ne, fail);
1088 } 1154 }
1089 1155
1090 1156
1091 #ifdef ENABLE_DEBUGGER_SUPPORT 1157 #ifdef ENABLE_DEBUGGER_SUPPORT
1092 void MacroAssembler::DebugBreak() { 1158 void MacroAssembler::DebugBreak() {
1093 ASSERT(allow_stub_calls());
1094 mov(r0, Operand(0, RelocInfo::NONE)); 1159 mov(r0, Operand(0, RelocInfo::NONE));
1095 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); 1160 mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
1096 CEntryStub ces(1); 1161 CEntryStub ces(1);
1162 ASSERT(AllowThisStubCall(&ces));
1097 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); 1163 Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1098 } 1164 }
1099 #endif 1165 #endif
1100 1166
1101 1167
1102 void MacroAssembler::PushTryHandler(CodeLocation try_location, 1168 void MacroAssembler::PushTryHandler(CodeLocation try_location,
1103 HandlerType type) { 1169 HandlerType type) {
1104 // Adjust this code if not the case. 1170 // Adjust this code if not the case.
1105 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize); 1171 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1106 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize); 1172 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
(...skipping 679 matching lines...) Expand 10 before | Expand all | Expand 10 after
1786 Heap::RootListIndex index) { 1852 Heap::RootListIndex index) {
1787 ASSERT(!obj.is(ip)); 1853 ASSERT(!obj.is(ip));
1788 LoadRoot(ip, index); 1854 LoadRoot(ip, index);
1789 cmp(obj, ip); 1855 cmp(obj, ip);
1790 } 1856 }
1791 1857
1792 1858
1793 void MacroAssembler::CheckFastElements(Register map, 1859 void MacroAssembler::CheckFastElements(Register map,
1794 Register scratch, 1860 Register scratch,
1795 Label* fail) { 1861 Label* fail) {
1796 STATIC_ASSERT(FAST_ELEMENTS == 0); 1862 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
1863 STATIC_ASSERT(FAST_ELEMENTS == 1);
1797 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset)); 1864 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
1798 cmp(scratch, Operand(Map::kMaximumBitField2FastElementValue)); 1865 cmp(scratch, Operand(Map::kMaximumBitField2FastElementValue));
1799 b(hi, fail); 1866 b(hi, fail);
1800 } 1867 }
1801 1868
1802 1869
1870 void MacroAssembler::CheckFastObjectElements(Register map,
1871 Register scratch,
1872 Label* fail) {
1873 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
1874 STATIC_ASSERT(FAST_ELEMENTS == 1);
1875 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
1876 cmp(scratch, Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
1877 b(ls, fail);
1878 cmp(scratch, Operand(Map::kMaximumBitField2FastElementValue));
1879 b(hi, fail);
1880 }
1881
1882
1883 void MacroAssembler::CheckFastSmiOnlyElements(Register map,
1884 Register scratch,
1885 Label* fail) {
1886 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
1887 ldrb(scratch, FieldMemOperand(map, Map::kBitField2Offset));
1888 cmp(scratch, Operand(Map::kMaximumBitField2FastSmiOnlyElementValue));
1889 b(hi, fail);
1890 }
1891
1892
1803 void MacroAssembler::CheckMap(Register obj, 1893 void MacroAssembler::CheckMap(Register obj,
1804 Register scratch, 1894 Register scratch,
1805 Handle<Map> map, 1895 Handle<Map> map,
1806 Label* fail, 1896 Label* fail,
1807 SmiCheckType smi_check_type) { 1897 SmiCheckType smi_check_type) {
1808 if (smi_check_type == DO_SMI_CHECK) { 1898 if (smi_check_type == DO_SMI_CHECK) {
1809 JumpIfSmi(obj, fail); 1899 JumpIfSmi(obj, fail);
1810 } 1900 }
1811 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); 1901 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
1812 mov(ip, Operand(map)); 1902 mov(ip, Operand(map));
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
1888 // in initial map. 1978 // in initial map.
1889 bind(&non_instance); 1979 bind(&non_instance);
1890 ldr(result, FieldMemOperand(result, Map::kConstructorOffset)); 1980 ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1891 1981
1892 // All done. 1982 // All done.
1893 bind(&done); 1983 bind(&done);
1894 } 1984 }
1895 1985
1896 1986
1897 void MacroAssembler::CallStub(CodeStub* stub, Condition cond) { 1987 void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
1898 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 1988 ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
1899 Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond); 1989 Call(stub->GetCode(), RelocInfo::CODE_TARGET, kNoASTId, cond);
1900 } 1990 }
1901 1991
1902 1992
1903 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond) { 1993 MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond) {
1904 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 1994 ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
1905 Object* result; 1995 Object* result;
1906 { MaybeObject* maybe_result = stub->TryGetCode(); 1996 { MaybeObject* maybe_result = stub->TryGetCode();
1907 if (!maybe_result->ToObject(&result)) return maybe_result; 1997 if (!maybe_result->ToObject(&result)) return maybe_result;
1908 } 1998 }
1909 Handle<Code> code(Code::cast(result)); 1999 Handle<Code> code(Code::cast(result));
1910 Call(code, RelocInfo::CODE_TARGET, kNoASTId, cond); 2000 Call(code, RelocInfo::CODE_TARGET, kNoASTId, cond);
1911 return result; 2001 return result;
1912 } 2002 }
1913 2003
1914 2004
1915 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) { 2005 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
1916 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs. 2006 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
1917 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond); 2007 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1918 } 2008 }
1919 2009
1920 2010
1921 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) { 2011 MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) {
1922 ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
1923 Object* result; 2012 Object* result;
1924 { MaybeObject* maybe_result = stub->TryGetCode(); 2013 { MaybeObject* maybe_result = stub->TryGetCode();
1925 if (!maybe_result->ToObject(&result)) return maybe_result; 2014 if (!maybe_result->ToObject(&result)) return maybe_result;
1926 } 2015 }
1927 Jump(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond); 2016 Jump(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET, cond);
1928 return result; 2017 return result;
1929 } 2018 }
1930 2019
1931 2020
1932 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { 2021 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2015 mov(r0, Operand(ExternalReference::isolate_address())); 2104 mov(r0, Operand(ExternalReference::isolate_address()));
2016 CallCFunction( 2105 CallCFunction(
2017 ExternalReference::delete_handle_scope_extensions(isolate()), 1); 2106 ExternalReference::delete_handle_scope_extensions(isolate()), 1);
2018 mov(r0, r4); 2107 mov(r0, r4);
2019 jmp(&leave_exit_frame); 2108 jmp(&leave_exit_frame);
2020 2109
2021 return result; 2110 return result;
2022 } 2111 }
2023 2112
2024 2113
2114 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
2115 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
2116 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
2117 }
2118
2119
2025 void MacroAssembler::IllegalOperation(int num_arguments) { 2120 void MacroAssembler::IllegalOperation(int num_arguments) {
2026 if (num_arguments > 0) { 2121 if (num_arguments > 0) {
2027 add(sp, sp, Operand(num_arguments * kPointerSize)); 2122 add(sp, sp, Operand(num_arguments * kPointerSize));
2028 } 2123 }
2029 LoadRoot(r0, Heap::kUndefinedValueRootIndex); 2124 LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2030 } 2125 }
2031 2126
2032 2127
2033 void MacroAssembler::IndexFromHash(Register hash, Register index) { 2128 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2034 // If the hash field contains an array index pick it out. The assert checks 2129 // If the hash field contains an array index pick it out. The assert checks
(...skipping 375 matching lines...) Expand 10 before | Expand all | Expand 10 after
2410 2505
2411 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { 2506 void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
2412 CallRuntime(Runtime::FunctionForId(fid), num_arguments); 2507 CallRuntime(Runtime::FunctionForId(fid), num_arguments);
2413 } 2508 }
2414 2509
2415 2510
2416 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { 2511 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
2417 const Runtime::Function* function = Runtime::FunctionForId(id); 2512 const Runtime::Function* function = Runtime::FunctionForId(id);
2418 mov(r0, Operand(function->nargs)); 2513 mov(r0, Operand(function->nargs));
2419 mov(r1, Operand(ExternalReference(function, isolate()))); 2514 mov(r1, Operand(ExternalReference(function, isolate())));
2420 CEntryStub stub(1); 2515 CEntryStub stub(1, kSaveFPRegs);
2421 stub.SaveDoubles();
2422 CallStub(&stub); 2516 CallStub(&stub);
2423 } 2517 }
2424 2518
2425 2519
2426 void MacroAssembler::CallExternalReference(const ExternalReference& ext, 2520 void MacroAssembler::CallExternalReference(const ExternalReference& ext,
2427 int num_arguments) { 2521 int num_arguments) {
2428 mov(r0, Operand(num_arguments)); 2522 mov(r0, Operand(num_arguments));
2429 mov(r1, Operand(ext)); 2523 mov(r1, Operand(ext));
2430 2524
2431 CEntryStub stub(1); 2525 CEntryStub stub(1);
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2484 #endif 2578 #endif
2485 mov(r1, Operand(builtin)); 2579 mov(r1, Operand(builtin));
2486 CEntryStub stub(1); 2580 CEntryStub stub(1);
2487 return TryTailCallStub(&stub); 2581 return TryTailCallStub(&stub);
2488 } 2582 }
2489 2583
2490 2584
2491 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, 2585 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2492 InvokeFlag flag, 2586 InvokeFlag flag,
2493 const CallWrapper& call_wrapper) { 2587 const CallWrapper& call_wrapper) {
2588 // You can't call a builtin without a valid frame.
2589 ASSERT(flag == JUMP_FUNCTION || has_frame());
2590
2494 GetBuiltinEntry(r2, id); 2591 GetBuiltinEntry(r2, id);
2495 if (flag == CALL_FUNCTION) { 2592 if (flag == CALL_FUNCTION) {
2496 call_wrapper.BeforeCall(CallSize(r2)); 2593 call_wrapper.BeforeCall(CallSize(r2));
2497 SetCallKind(r5, CALL_AS_METHOD); 2594 SetCallKind(r5, CALL_AS_METHOD);
2498 Call(r2); 2595 Call(r2);
2499 call_wrapper.AfterCall(); 2596 call_wrapper.AfterCall();
2500 } else { 2597 } else {
2501 ASSERT(flag == JUMP_FUNCTION); 2598 ASSERT(flag == JUMP_FUNCTION);
2502 SetCallKind(r5, CALL_AS_METHOD); 2599 SetCallKind(r5, CALL_AS_METHOD);
2503 Jump(r2); 2600 Jump(r2);
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
2615 // from the real pointer as a smi. 2712 // from the real pointer as a smi.
2616 intptr_t p1 = reinterpret_cast<intptr_t>(msg); 2713 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2617 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag; 2714 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2618 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi()); 2715 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2619 #ifdef DEBUG 2716 #ifdef DEBUG
2620 if (msg != NULL) { 2717 if (msg != NULL) {
2621 RecordComment("Abort message: "); 2718 RecordComment("Abort message: ");
2622 RecordComment(msg); 2719 RecordComment(msg);
2623 } 2720 }
2624 #endif 2721 #endif
2625 // Disable stub call restrictions to always allow calls to abort.
2626 AllowStubCallsScope allow_scope(this, true);
2627 2722
2628 mov(r0, Operand(p0)); 2723 mov(r0, Operand(p0));
2629 push(r0); 2724 push(r0);
2630 mov(r0, Operand(Smi::FromInt(p1 - p0))); 2725 mov(r0, Operand(Smi::FromInt(p1 - p0)));
2631 push(r0); 2726 push(r0);
2632 CallRuntime(Runtime::kAbort, 2); 2727 // Disable stub call restrictions to always allow calls to abort.
2728 if (!has_frame_) {
2729 // We don't actually want to generate a pile of code for this, so just
2730 // claim there is a stack frame, without generating one.
2731 FrameScope scope(this, StackFrame::NONE);
2732 CallRuntime(Runtime::kAbort, 2);
2733 } else {
2734 CallRuntime(Runtime::kAbort, 2);
2735 }
2633 // will not return here 2736 // will not return here
2634 if (is_const_pool_blocked()) { 2737 if (is_const_pool_blocked()) {
2635 // If the calling code cares about the exact number of 2738 // If the calling code cares about the exact number of
2636 // instructions generated, we insert padding here to keep the size 2739 // instructions generated, we insert padding here to keep the size
2637 // of the Abort macro constant. 2740 // of the Abort macro constant.
2638 static const int kExpectedAbortInstructions = 10; 2741 static const int kExpectedAbortInstructions = 10;
2639 int abort_instructions = InstructionsGeneratedSince(&abort_start); 2742 int abort_instructions = InstructionsGeneratedSince(&abort_start);
2640 ASSERT(abort_instructions <= kExpectedAbortInstructions); 2743 ASSERT(abort_instructions <= kExpectedAbortInstructions);
2641 while (abort_instructions++ < kExpectedAbortInstructions) { 2744 while (abort_instructions++ < kExpectedAbortInstructions) {
2642 nop(); 2745 nop();
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after
2923 b(eq, &done); 3026 b(eq, &done);
2924 bind(&byte_loop_1); 3027 bind(&byte_loop_1);
2925 ldrb(scratch, MemOperand(src, 1, PostIndex)); 3028 ldrb(scratch, MemOperand(src, 1, PostIndex));
2926 strb(scratch, MemOperand(dst, 1, PostIndex)); 3029 strb(scratch, MemOperand(dst, 1, PostIndex));
2927 sub(length, length, Operand(1), SetCC); 3030 sub(length, length, Operand(1), SetCC);
2928 b(ne, &byte_loop_1); 3031 b(ne, &byte_loop_1);
2929 bind(&done); 3032 bind(&done);
2930 } 3033 }
2931 3034
2932 3035
3036 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
3037 Register end_offset,
3038 Register filler) {
3039 Label loop, entry;
3040 b(&entry);
3041 bind(&loop);
3042 str(filler, MemOperand(start_offset, kPointerSize, PostIndex));
3043 bind(&entry);
3044 cmp(start_offset, end_offset);
3045 b(lt, &loop);
3046 }
3047
3048
2933 void MacroAssembler::CountLeadingZeros(Register zeros, // Answer. 3049 void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
2934 Register source, // Input. 3050 Register source, // Input.
2935 Register scratch) { 3051 Register scratch) {
2936 ASSERT(!zeros.is(source) || !source.is(scratch)); 3052 ASSERT(!zeros.is(source) || !source.is(scratch));
2937 ASSERT(!zeros.is(scratch)); 3053 ASSERT(!zeros.is(scratch));
2938 ASSERT(!scratch.is(ip)); 3054 ASSERT(!scratch.is(ip));
2939 ASSERT(!source.is(ip)); 3055 ASSERT(!source.is(ip));
2940 ASSERT(!zeros.is(ip)); 3056 ASSERT(!zeros.is(ip));
2941 #ifdef CAN_USE_ARMV5_INSTRUCTIONS 3057 #ifdef CAN_USE_ARMV5_INSTRUCTIONS
2942 clz(zeros, source); // This instruction is only supported after ARM5. 3058 clz(zeros, source); // This instruction is only supported after ARM5.
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
3082 } else { 3198 } else {
3083 Move(r2, reg); 3199 Move(r2, reg);
3084 vmov(r0, r1, dreg); 3200 vmov(r0, r1, dreg);
3085 } 3201 }
3086 } 3202 }
3087 3203
3088 3204
3089 void MacroAssembler::CallCFunction(ExternalReference function, 3205 void MacroAssembler::CallCFunction(ExternalReference function,
3090 int num_reg_arguments, 3206 int num_reg_arguments,
3091 int num_double_arguments) { 3207 int num_double_arguments) {
3092 CallCFunctionHelper(no_reg, 3208 mov(ip, Operand(function));
3093 function, 3209 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
3094 ip,
3095 num_reg_arguments,
3096 num_double_arguments);
3097 } 3210 }
3098 3211
3099 3212
3100 void MacroAssembler::CallCFunction(Register function, 3213 void MacroAssembler::CallCFunction(Register function,
3101 Register scratch, 3214 int num_reg_arguments,
3102 int num_reg_arguments, 3215 int num_double_arguments) {
3103 int num_double_arguments) { 3216 CallCFunctionHelper(function, num_reg_arguments, num_double_arguments);
3104 CallCFunctionHelper(function,
3105 ExternalReference::the_hole_value_location(isolate()),
3106 scratch,
3107 num_reg_arguments,
3108 num_double_arguments);
3109 } 3217 }
3110 3218
3111 3219
3112 void MacroAssembler::CallCFunction(ExternalReference function, 3220 void MacroAssembler::CallCFunction(ExternalReference function,
3113 int num_arguments) { 3221 int num_arguments) {
3114 CallCFunction(function, num_arguments, 0); 3222 CallCFunction(function, num_arguments, 0);
3115 } 3223 }
3116 3224
3117 3225
3118 void MacroAssembler::CallCFunction(Register function, 3226 void MacroAssembler::CallCFunction(Register function,
3119 Register scratch,
3120 int num_arguments) { 3227 int num_arguments) {
3121 CallCFunction(function, scratch, num_arguments, 0); 3228 CallCFunction(function, num_arguments, 0);
3122 } 3229 }
3123 3230
3124 3231
3125 void MacroAssembler::CallCFunctionHelper(Register function, 3232 void MacroAssembler::CallCFunctionHelper(Register function,
3126 ExternalReference function_reference,
3127 Register scratch,
3128 int num_reg_arguments, 3233 int num_reg_arguments,
3129 int num_double_arguments) { 3234 int num_double_arguments) {
3235 ASSERT(has_frame());
3130 // Make sure that the stack is aligned before calling a C function unless 3236 // Make sure that the stack is aligned before calling a C function unless
3131 // running in the simulator. The simulator has its own alignment check which 3237 // running in the simulator. The simulator has its own alignment check which
3132 // provides more information. 3238 // provides more information.
3133 #if defined(V8_HOST_ARCH_ARM) 3239 #if defined(V8_HOST_ARCH_ARM)
3134 if (emit_debug_code()) { 3240 if (emit_debug_code()) {
3135 int frame_alignment = OS::ActivationFrameAlignment(); 3241 int frame_alignment = OS::ActivationFrameAlignment();
3136 int frame_alignment_mask = frame_alignment - 1; 3242 int frame_alignment_mask = frame_alignment - 1;
3137 if (frame_alignment > kPointerSize) { 3243 if (frame_alignment > kPointerSize) {
3138 ASSERT(IsPowerOf2(frame_alignment)); 3244 ASSERT(IsPowerOf2(frame_alignment));
3139 Label alignment_as_expected; 3245 Label alignment_as_expected;
3140 tst(sp, Operand(frame_alignment_mask)); 3246 tst(sp, Operand(frame_alignment_mask));
3141 b(eq, &alignment_as_expected); 3247 b(eq, &alignment_as_expected);
3142 // Don't use Check here, as it will call Runtime_Abort possibly 3248 // Don't use Check here, as it will call Runtime_Abort possibly
3143 // re-entering here. 3249 // re-entering here.
3144 stop("Unexpected alignment"); 3250 stop("Unexpected alignment");
3145 bind(&alignment_as_expected); 3251 bind(&alignment_as_expected);
3146 } 3252 }
3147 } 3253 }
3148 #endif 3254 #endif
3149 3255
3150 // Just call directly. The function called cannot cause a GC, or 3256 // Just call directly. The function called cannot cause a GC, or
3151 // allow preemption, so the return address in the link register 3257 // allow preemption, so the return address in the link register
3152 // stays correct. 3258 // stays correct.
3153 if (function.is(no_reg)) {
3154 mov(scratch, Operand(function_reference));
3155 function = scratch;
3156 }
3157 Call(function); 3259 Call(function);
3158 int stack_passed_arguments = CalculateStackPassedWords( 3260 int stack_passed_arguments = CalculateStackPassedWords(
3159 num_reg_arguments, num_double_arguments); 3261 num_reg_arguments, num_double_arguments);
3160 if (ActivationFrameAlignment() > kPointerSize) { 3262 if (ActivationFrameAlignment() > kPointerSize) {
3161 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize)); 3263 ldr(sp, MemOperand(sp, stack_passed_arguments * kPointerSize));
3162 } else { 3264 } else {
3163 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize))); 3265 add(sp, sp, Operand(stack_passed_arguments * sizeof(kPointerSize)));
3164 } 3266 }
3165 } 3267 }
3166 3268
(...skipping 11 matching lines...) Expand all
3178 // Result was clobbered. Restore it. 3280 // Result was clobbered. Restore it.
3179 ldr(result, MemOperand(ldr_location)); 3281 ldr(result, MemOperand(ldr_location));
3180 } 3282 }
3181 // Get the address of the constant. 3283 // Get the address of the constant.
3182 and_(result, result, Operand(kLdrOffsetMask)); 3284 and_(result, result, Operand(kLdrOffsetMask));
3183 add(result, ldr_location, Operand(result)); 3285 add(result, ldr_location, Operand(result));
3184 add(result, result, Operand(kPCRegOffset)); 3286 add(result, result, Operand(kPCRegOffset));
3185 } 3287 }
3186 3288
3187 3289
3290 void MacroAssembler::CheckPageFlag(
3291 Register object,
3292 Register scratch,
3293 int mask,
3294 Condition cc,
3295 Label* condition_met) {
3296 and_(scratch, object, Operand(~Page::kPageAlignmentMask));
3297 ldr(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
3298 tst(scratch, Operand(mask));
3299 b(cc, condition_met);
3300 }
3301
3302
3303 void MacroAssembler::JumpIfBlack(Register object,
3304 Register scratch0,
3305 Register scratch1,
3306 Label* on_black) {
3307 HasColor(object, scratch0, scratch1, on_black, 1, 0); // kBlackBitPattern.
3308 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3309 }
3310
3311
3312 void MacroAssembler::HasColor(Register object,
3313 Register bitmap_scratch,
3314 Register mask_scratch,
3315 Label* has_color,
3316 int first_bit,
3317 int second_bit) {
3318 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, no_reg));
3319
3320 GetMarkBits(object, bitmap_scratch, mask_scratch);
3321
3322 Label other_color, word_boundary;
3323 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3324 tst(ip, Operand(mask_scratch));
3325 b(first_bit == 1 ? eq : ne, &other_color);
3326 // Shift left 1 by adding.
3327 add(mask_scratch, mask_scratch, Operand(mask_scratch), SetCC);
3328 b(eq, &word_boundary);
3329 tst(ip, Operand(mask_scratch));
3330 b(second_bit == 1 ? ne : eq, has_color);
3331 jmp(&other_color);
3332
3333 bind(&word_boundary);
3334 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize));
3335 tst(ip, Operand(1));
3336 b(second_bit == 1 ? ne : eq, has_color);
3337 bind(&other_color);
3338 }
3339
3340
3341 // Detect some, but not all, common pointer-free objects. This is used by the
3342 // incremental write barrier which doesn't care about oddballs (they are always
3343 // marked black immediately so this code is not hit).
3344 void MacroAssembler::JumpIfDataObject(Register value,
3345 Register scratch,
3346 Label* not_data_object) {
3347 Label is_data_object;
3348 ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
3349 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
3350 b(eq, &is_data_object);
3351 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3352 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3353 // If it's a string and it's not a cons string then it's an object containing
3354 // no GC pointers.
3355 ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
3356 tst(scratch, Operand(kIsIndirectStringMask | kIsNotStringMask));
3357 b(ne, not_data_object);
3358 bind(&is_data_object);
3359 }
3360
3361
3362 void MacroAssembler::GetMarkBits(Register addr_reg,
3363 Register bitmap_reg,
3364 Register mask_reg) {
3365 ASSERT(!AreAliased(addr_reg, bitmap_reg, mask_reg, no_reg));
3366 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3367 Ubfx(mask_reg, addr_reg, kPointerSizeLog2, Bitmap::kBitsPerCellLog2);
3368 const int kLowBits = kPointerSizeLog2 + Bitmap::kBitsPerCellLog2;
3369 Ubfx(ip, addr_reg, kLowBits, kPageSizeBits - kLowBits);
3370 add(bitmap_reg, bitmap_reg, Operand(ip, LSL, kPointerSizeLog2));
3371 mov(ip, Operand(1));
3372 mov(mask_reg, Operand(ip, LSL, mask_reg));
3373 }
3374
3375
3376 void MacroAssembler::EnsureNotWhite(
3377 Register value,
3378 Register bitmap_scratch,
3379 Register mask_scratch,
3380 Register load_scratch,
3381 Label* value_is_white_and_not_data) {
3382 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ip));
3383 GetMarkBits(value, bitmap_scratch, mask_scratch);
3384
3385 // If the value is black or grey we don't need to do anything.
3386 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3387 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3388 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
3389 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3390
3391 Label done;
3392
3393 // Since both black and grey have a 1 in the first position and white does
3394 // not have a 1 there we only need to check one bit.
3395 ldr(load_scratch, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3396 tst(mask_scratch, load_scratch);
3397 b(ne, &done);
3398
3399 if (FLAG_debug_code) {
3400 // Check for impossible bit pattern.
3401 Label ok;
3402 // LSL may overflow, making the check conservative.
3403 tst(load_scratch, Operand(mask_scratch, LSL, 1));
3404 b(eq, &ok);
3405 stop("Impossible marking bit pattern");
3406 bind(&ok);
3407 }
3408
3409 // Value is white. We check whether it is data that doesn't need scanning.
3410 // Currently only checks for HeapNumber and non-cons strings.
3411 Register map = load_scratch; // Holds map while checking type.
3412 Register length = load_scratch; // Holds length of object after testing type.
3413 Label is_data_object;
3414
3415 // Check for heap-number
3416 ldr(map, FieldMemOperand(value, HeapObject::kMapOffset));
3417 CompareRoot(map, Heap::kHeapNumberMapRootIndex);
3418 mov(length, Operand(HeapNumber::kSize), LeaveCC, eq);
3419 b(eq, &is_data_object);
3420
3421 // Check for strings.
3422 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3423 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3424 // If it's a string and it's not a cons string then it's an object containing
3425 // no GC pointers.
3426 Register instance_type = load_scratch;
3427 ldrb(instance_type, FieldMemOperand(map, Map::kInstanceTypeOffset));
3428 tst(instance_type, Operand(kIsIndirectStringMask | kIsNotStringMask));
3429 b(ne, value_is_white_and_not_data);
3430 // It's a non-indirect (non-cons and non-slice) string.
3431 // If it's external, the length is just ExternalString::kSize.
3432 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3433 // External strings are the only ones with the kExternalStringTag bit
3434 // set.
3435 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
3436 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
3437 tst(instance_type, Operand(kExternalStringTag));
3438 mov(length, Operand(ExternalString::kSize), LeaveCC, ne);
3439 b(ne, &is_data_object);
3440
3441 // Sequential string, either ASCII or UC16.
3442 // For ASCII (char-size of 1) we shift the smi tag away to get the length.
3443 // For UC16 (char-size of 2) we just leave the smi tag in place, thereby
3444 // getting the length multiplied by 2.
3445 ASSERT(kAsciiStringTag == 4 && kStringEncodingMask == 4);
3446 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3447 ldr(ip, FieldMemOperand(value, String::kLengthOffset));
3448 tst(instance_type, Operand(kStringEncodingMask));
3449 mov(ip, Operand(ip, LSR, 1), LeaveCC, ne);
3450 add(length, ip, Operand(SeqString::kHeaderSize + kObjectAlignmentMask));
3451 and_(length, length, Operand(~kObjectAlignmentMask));
3452
3453 bind(&is_data_object);
3454 // Value is a data object, and it is white. Mark it black. Since we know
3455 // that the object is white we can make it black by flipping one bit.
3456 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3457 orr(ip, ip, Operand(mask_scratch));
3458 str(ip, MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3459
3460 and_(bitmap_scratch, bitmap_scratch, Operand(~Page::kPageAlignmentMask));
3461 ldr(ip, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3462 add(ip, ip, Operand(length));
3463 str(ip, MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3464
3465 bind(&done);
3466 }
3467
3468
3188 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) { 3469 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
3189 Usat(output_reg, 8, Operand(input_reg)); 3470 Usat(output_reg, 8, Operand(input_reg));
3190 } 3471 }
3191 3472
3192 3473
3193 void MacroAssembler::ClampDoubleToUint8(Register result_reg, 3474 void MacroAssembler::ClampDoubleToUint8(Register result_reg,
3194 DoubleRegister input_reg, 3475 DoubleRegister input_reg,
3195 DoubleRegister temp_double_reg) { 3476 DoubleRegister temp_double_reg) {
3196 Label above_zero; 3477 Label above_zero;
3197 Label done; 3478 Label done;
(...skipping 29 matching lines...) Expand all
3227 Register descriptors) { 3508 Register descriptors) {
3228 ldr(descriptors, 3509 ldr(descriptors,
3229 FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset)); 3510 FieldMemOperand(map, Map::kInstanceDescriptorsOrBitField3Offset));
3230 Label not_smi; 3511 Label not_smi;
3231 JumpIfNotSmi(descriptors, &not_smi); 3512 JumpIfNotSmi(descriptors, &not_smi);
3232 mov(descriptors, Operand(FACTORY->empty_descriptor_array())); 3513 mov(descriptors, Operand(FACTORY->empty_descriptor_array()));
3233 bind(&not_smi); 3514 bind(&not_smi);
3234 } 3515 }
3235 3516
3236 3517
3518 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
3519 if (r1.is(r2)) return true;
3520 if (r1.is(r3)) return true;
3521 if (r1.is(r4)) return true;
3522 if (r2.is(r3)) return true;
3523 if (r2.is(r4)) return true;
3524 if (r3.is(r4)) return true;
3525 return false;
3526 }
3527
3528
3237 CodePatcher::CodePatcher(byte* address, int instructions) 3529 CodePatcher::CodePatcher(byte* address, int instructions)
3238 : address_(address), 3530 : address_(address),
3239 instructions_(instructions), 3531 instructions_(instructions),
3240 size_(instructions * Assembler::kInstrSize), 3532 size_(instructions * Assembler::kInstrSize),
3241 masm_(Isolate::Current(), address, size_ + Assembler::kGap) { 3533 masm_(Isolate::Current(), address, size_ + Assembler::kGap) {
3242 // Create a new macro assembler pointing to the address of the code to patch. 3534 // Create a new macro assembler pointing to the address of the code to patch.
3243 // The size is adjusted with kGap on order for the assembler to generate size 3535 // The size is adjusted with kGap on order for the assembler to generate size
3244 // bytes of instructions without failing with buffer size constraints. 3536 // bytes of instructions without failing with buffer size constraints.
3245 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap); 3537 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3246 } 3538 }
(...skipping 22 matching lines...) Expand all
3269 void CodePatcher::EmitCondition(Condition cond) { 3561 void CodePatcher::EmitCondition(Condition cond) {
3270 Instr instr = Assembler::instr_at(masm_.pc_); 3562 Instr instr = Assembler::instr_at(masm_.pc_);
3271 instr = (instr & ~kCondMask) | cond; 3563 instr = (instr & ~kCondMask) | cond;
3272 masm_.emit(instr); 3564 masm_.emit(instr);
3273 } 3565 }
3274 3566
3275 3567
3276 } } // namespace v8::internal 3568 } } // namespace v8::internal
3277 3569
3278 #endif // V8_TARGET_ARCH_ARM 3570 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/arm/regexp-macro-assembler-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698