Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/macro-assembler-arm.cc

Issue 3186: Refactor the enum RelocMode changing the naming scheme from lower case to... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 12 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/macro-assembler-arm.h ('k') | src/macro-assembler-ia32.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
77 77
78 void MacroAssembler::Jump(Register target, Condition cond) { 78 void MacroAssembler::Jump(Register target, Condition cond) {
79 #if USE_BX 79 #if USE_BX
80 bx(target, cond); 80 bx(target, cond);
81 #else 81 #else
82 mov(pc, Operand(target), LeaveCC, cond); 82 mov(pc, Operand(target), LeaveCC, cond);
83 #endif 83 #endif
84 } 84 }
85 85
86 86
87 void MacroAssembler::Jump(intptr_t target, RelocMode rmode, Condition cond) { 87 void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
88 Condition cond) {
88 #if USE_BX 89 #if USE_BX
89 mov(ip, Operand(target, rmode), LeaveCC, cond); 90 mov(ip, Operand(target, rmode), LeaveCC, cond);
90 bx(ip, cond); 91 bx(ip, cond);
91 #else 92 #else
92 mov(pc, Operand(target, rmode), LeaveCC, cond); 93 mov(pc, Operand(target, rmode), LeaveCC, cond);
93 #endif 94 #endif
94 } 95 }
95 96
96 97
97 void MacroAssembler::Jump(byte* target, RelocMode rmode, Condition cond) { 98 void MacroAssembler::Jump(byte* target, RelocInfo::Mode rmode,
98 ASSERT(!is_code_target(rmode)); 99 Condition cond) {
100 ASSERT(!RelocInfo::IsCodeTarget(rmode));
99 Jump(reinterpret_cast<intptr_t>(target), rmode, cond); 101 Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
100 } 102 }
101 103
102 104
103 void MacroAssembler::Jump(Handle<Code> code, RelocMode rmode, Condition cond) { 105 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
104 ASSERT(is_code_target(rmode)); 106 Condition cond) {
107 ASSERT(RelocInfo::IsCodeTarget(rmode));
105 // 'code' is always generated ARM code, never THUMB code 108 // 'code' is always generated ARM code, never THUMB code
106 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond); 109 Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
107 } 110 }
108 111
109 112
110 void MacroAssembler::Call(Register target, Condition cond) { 113 void MacroAssembler::Call(Register target, Condition cond) {
111 #if USE_BLX 114 #if USE_BLX
112 blx(target, cond); 115 blx(target, cond);
113 #else 116 #else
114 // set lr for return at current pc + 8 117 // set lr for return at current pc + 8
115 mov(lr, Operand(pc), LeaveCC, cond); 118 mov(lr, Operand(pc), LeaveCC, cond);
116 mov(pc, Operand(target), LeaveCC, cond); 119 mov(pc, Operand(target), LeaveCC, cond);
117 #endif 120 #endif
118 } 121 }
119 122
120 123
121 void MacroAssembler::Call(intptr_t target, RelocMode rmode, Condition cond) { 124 void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
125 Condition cond) {
122 #if !defined(__arm__) 126 #if !defined(__arm__)
123 if (rmode == runtime_entry) { 127 if (rmode == RelocInfo::RUNTIME_ENTRY) {
124 mov(r2, Operand(target, rmode), LeaveCC, cond); 128 mov(r2, Operand(target, rmode), LeaveCC, cond);
125 // Set lr for return at current pc + 8. 129 // Set lr for return at current pc + 8.
126 mov(lr, Operand(pc), LeaveCC, cond); 130 mov(lr, Operand(pc), LeaveCC, cond);
127 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. 131 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
128 // Notify the simulator of the transition to C code. 132 // Notify the simulator of the transition to C code.
129 swi(assembler::arm::call_rt_r2); 133 swi(assembler::arm::call_rt_r2);
130 } else { 134 } else {
131 // set lr for return at current pc + 8 135 // set lr for return at current pc + 8
132 mov(lr, Operand(pc), LeaveCC, cond); 136 mov(lr, Operand(pc), LeaveCC, cond);
133 // emit a ldr<cond> pc, [pc + offset of target in constant pool] 137 // emit a ldr<cond> pc, [pc + offset of target in constant pool]
134 mov(pc, Operand(target, rmode), LeaveCC, cond); 138 mov(pc, Operand(target, rmode), LeaveCC, cond);
135 } 139 }
136 #else 140 #else
137 // Set lr for return at current pc + 8. 141 // Set lr for return at current pc + 8.
138 mov(lr, Operand(pc), LeaveCC, cond); 142 mov(lr, Operand(pc), LeaveCC, cond);
139 // Emit a ldr<cond> pc, [pc + offset of target in constant pool]. 143 // Emit a ldr<cond> pc, [pc + offset of target in constant pool].
140 mov(pc, Operand(target, rmode), LeaveCC, cond); 144 mov(pc, Operand(target, rmode), LeaveCC, cond);
141 #endif // !defined(__arm__) 145 #endif // !defined(__arm__)
142 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a 146 // If USE_BLX is defined, we could emit a 'mov ip, target', followed by a
143 // 'blx ip'; however, the code would not be shorter than the above sequence 147 // 'blx ip'; however, the code would not be shorter than the above sequence
144 // and the target address of the call would be referenced by the first 148 // and the target address of the call would be referenced by the first
145 // instruction rather than the second one, which would make it harder to patch 149 // instruction rather than the second one, which would make it harder to patch
146 // (two instructions before the return address, instead of one). 150 // (two instructions before the return address, instead of one).
147 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr)); 151 ASSERT(kTargetAddrToReturnAddrDist == sizeof(Instr));
148 } 152 }
149 153
150 154
151 void MacroAssembler::Call(byte* target, RelocMode rmode, Condition cond) { 155 void MacroAssembler::Call(byte* target, RelocInfo::Mode rmode,
152 ASSERT(!is_code_target(rmode)); 156 Condition cond) {
157 ASSERT(!RelocInfo::IsCodeTarget(rmode));
153 Call(reinterpret_cast<intptr_t>(target), rmode, cond); 158 Call(reinterpret_cast<intptr_t>(target), rmode, cond);
154 } 159 }
155 160
156 161
157 void MacroAssembler::Call(Handle<Code> code, RelocMode rmode, Condition cond) { 162 void MacroAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
158 ASSERT(is_code_target(rmode)); 163 Condition cond) {
164 ASSERT(RelocInfo::IsCodeTarget(rmode));
159 // 'code' is always generated ARM code, never THUMB code 165 // 'code' is always generated ARM code, never THUMB code
160 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond); 166 Call(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
161 } 167 }
162 168
163 169
164 void MacroAssembler::Ret() { 170 void MacroAssembler::Ret() {
165 #if USE_BX 171 #if USE_BX
166 bx(lr); 172 bx(lr);
167 #else 173 #else
168 mov(pc, Operand(lr)); 174 mov(pc, Operand(lr));
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
323 329
324 if (!definitely_matches) { 330 if (!definitely_matches) {
325 if (!code_constant.is_null()) { 331 if (!code_constant.is_null()) {
326 mov(r3, Operand(code_constant)); 332 mov(r3, Operand(code_constant));
327 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); 333 add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
328 } 334 }
329 335
330 Handle<Code> adaptor = 336 Handle<Code> adaptor =
331 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline)); 337 Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
332 if (flag == CALL_FUNCTION) { 338 if (flag == CALL_FUNCTION) {
333 Call(adaptor, code_target); 339 Call(adaptor, RelocInfo::CODE_TARGET);
334 b(done); 340 b(done);
335 } else { 341 } else {
336 Jump(adaptor, code_target); 342 Jump(adaptor, RelocInfo::CODE_TARGET);
337 } 343 }
338 bind(&regular_invoke); 344 bind(&regular_invoke);
339 } 345 }
340 } 346 }
341 347
342 348
343 void MacroAssembler::InvokeCode(Register code, 349 void MacroAssembler::InvokeCode(Register code,
344 const ParameterCount& expected, 350 const ParameterCount& expected,
345 const ParameterCount& actual, 351 const ParameterCount& actual,
346 InvokeFlag flag) { 352 InvokeFlag flag) {
347 Label done; 353 Label done;
348 354
349 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag); 355 InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
350 if (flag == CALL_FUNCTION) { 356 if (flag == CALL_FUNCTION) {
351 Call(code); 357 Call(code);
352 } else { 358 } else {
353 ASSERT(flag == JUMP_FUNCTION); 359 ASSERT(flag == JUMP_FUNCTION);
354 Jump(code); 360 Jump(code);
355 } 361 }
356 362
357 // Continue here if InvokePrologue does handle the invocation due to 363 // Continue here if InvokePrologue does handle the invocation due to
358 // mismatched parameter counts. 364 // mismatched parameter counts.
359 bind(&done); 365 bind(&done);
360 } 366 }
361 367
362 368
363 void MacroAssembler::InvokeCode(Handle<Code> code, 369 void MacroAssembler::InvokeCode(Handle<Code> code,
364 const ParameterCount& expected, 370 const ParameterCount& expected,
365 const ParameterCount& actual, 371 const ParameterCount& actual,
366 RelocMode rmode, 372 RelocInfo::Mode rmode,
367 InvokeFlag flag) { 373 InvokeFlag flag) {
368 Label done; 374 Label done;
369 375
370 InvokePrologue(expected, actual, code, no_reg, &done, flag); 376 InvokePrologue(expected, actual, code, no_reg, &done, flag);
371 if (flag == CALL_FUNCTION) { 377 if (flag == CALL_FUNCTION) {
372 Call(code, rmode); 378 Call(code, rmode);
373 } else { 379 } else {
374 Jump(code, rmode); 380 Jump(code, rmode);
375 } 381 }
376 382
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after
596 // object. 602 // object.
597 ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kSecurityTokenOffset)); 603 ldr(scratch, FieldMemOperand(scratch, JSGlobalObject::kSecurityTokenOffset));
598 ldr(ip, FieldMemOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset)); 604 ldr(ip, FieldMemOperand(holder_reg, JSGlobalObject::kSecurityTokenOffset));
599 cmp(scratch, Operand(ip)); 605 cmp(scratch, Operand(ip));
600 b(ne, miss); 606 b(ne, miss);
601 } 607 }
602 608
603 609
604 void MacroAssembler::CallStub(CodeStub* stub) { 610 void MacroAssembler::CallStub(CodeStub* stub) {
605 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs 611 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
606 Call(stub->GetCode(), code_target); 612 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
607 }
608
609
610 void MacroAssembler::CallJSExitStub(CodeStub* stub) {
611 ASSERT(allow_stub_calls()); // stub calls are not allowed in some stubs
612 Call(stub->GetCode(), exit_js_frame);
613 } 613 }
614 614
615 615
616 void MacroAssembler::StubReturn(int argc) { 616 void MacroAssembler::StubReturn(int argc) {
617 ASSERT(argc >= 1 && generating_stub()); 617 ASSERT(argc >= 1 && generating_stub());
618 if (argc > 1) 618 if (argc > 1)
619 add(sp, sp, Operand((argc - 1) * kPointerSize)); 619 add(sp, sp, Operand((argc - 1) * kPointerSize));
620 Ret(); 620 Ret();
621 } 621 }
622 622
(...skipping 28 matching lines...) Expand all
651 } 651 }
652 652
653 653
654 void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) { 654 void MacroAssembler::JumpToBuiltin(const ExternalReference& builtin) {
655 #if defined(__thumb__) 655 #if defined(__thumb__)
656 // Thumb mode builtin. 656 // Thumb mode builtin.
657 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1); 657 ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
658 #endif 658 #endif
659 mov(r1, Operand(builtin)); 659 mov(r1, Operand(builtin));
660 CEntryStub stub; 660 CEntryStub stub;
661 Jump(stub.GetCode(), code_target); 661 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
662 } 662 }
663 663
664 664
665 Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id, 665 Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
666 bool* resolved) { 666 bool* resolved) {
667 // Contract with compiled functions is that the function is passed in r1. 667 // Contract with compiled functions is that the function is passed in r1.
668 int builtins_offset = 668 int builtins_offset =
669 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize); 669 JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
670 ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); 670 ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
671 ldr(r1, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset)); 671 ldr(r1, FieldMemOperand(r1, GlobalObject::kBuiltinsOffset));
672 ldr(r1, FieldMemOperand(r1, builtins_offset)); 672 ldr(r1, FieldMemOperand(r1, builtins_offset));
673 673
674 return Builtins::GetCode(id, resolved); 674 return Builtins::GetCode(id, resolved);
675 } 675 }
676 676
677 677
678 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, 678 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
679 InvokeJSFlags flags) { 679 InvokeJSFlags flags) {
680 bool resolved; 680 bool resolved;
681 Handle<Code> code = ResolveBuiltin(id, &resolved); 681 Handle<Code> code = ResolveBuiltin(id, &resolved);
682 682
683 if (flags == CALL_JS) { 683 if (flags == CALL_JS) {
684 Call(code, code_target); 684 Call(code, RelocInfo::CODE_TARGET);
685 } else { 685 } else {
686 ASSERT(flags == JUMP_JS); 686 ASSERT(flags == JUMP_JS);
687 Jump(code, code_target); 687 Jump(code, RelocInfo::CODE_TARGET);
688 } 688 }
689 689
690 if (!resolved) { 690 if (!resolved) {
691 const char* name = Builtins::GetName(id); 691 const char* name = Builtins::GetName(id);
692 int argc = Builtins::GetArgumentsCount(id); 692 int argc = Builtins::GetArgumentsCount(id);
693 uint32_t flags = 693 uint32_t flags =
694 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) | 694 Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
695 Bootstrapper::FixupFlagsIsPCRelative::encode(true); 695 Bootstrapper::FixupFlagsIsPCRelative::encode(true);
696 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name }; 696 Unresolved entry = { pc_offset() - sizeof(Instr), flags, name };
697 unresolved_.Add(entry); 697 unresolved_.Add(entry);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
748 #endif 748 #endif
749 mov(r0, Operand(p0)); 749 mov(r0, Operand(p0));
750 push(r0); 750 push(r0);
751 mov(r0, Operand(Smi::FromInt(p1 - p0))); 751 mov(r0, Operand(Smi::FromInt(p1 - p0)));
752 push(r0); 752 push(r0);
753 CallRuntime(Runtime::kAbort, 2); 753 CallRuntime(Runtime::kAbort, 2);
754 // will not return here 754 // will not return here
755 } 755 }
756 756
757 } } // namespace v8::internal 757 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/macro-assembler-arm.h ('k') | src/macro-assembler-ia32.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698