Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/mips/virtual-frame-mips.cc

Issue 549079: Support for MIPS in architecture independent files.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 10 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28
29
30 #include "v8.h"
31
32 #include "codegen-inl.h"
33 #include "register-allocator-inl.h"
34 #include "scopes.h"
35
36 namespace v8 {
37 namespace internal {
38
39 // -------------------------------------------------------------------------
40 // VirtualFrame implementation.
41
42 #define __ ACCESS_MASM(masm())
43
44
45 // On entry to a function, the virtual frame already contains the
46 // receiver and the parameters. All initial frame elements are in
47 // memory.
48 VirtualFrame::VirtualFrame()
49 : elements_(parameter_count() + local_count() + kPreallocatedElements),
50 stack_pointer_(parameter_count()) { // 0-based index of TOS.
51 UNIMPLEMENTED_();
52 // for (int i = 0; i <= stack_pointer_; i++) {
53 // elements_.Add(FrameElement::MemoryElement());
54 // }
55 // for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) {
56 // register_locations_[i] = kIllegalIndex;
57 // }
58 }
59
60
61 void VirtualFrame::SyncElementBelowStackPointer(int index) {
62 UNREACHABLE();
63 }
64
65
66 void VirtualFrame::SyncElementByPushing(int index) {
67 UNREACHABLE();
68 }
69
70
71 void VirtualFrame::SyncRange(int begin, int end) {
72 UNIMPLEMENTED_();
73 // All elements are in memory on ARM (ie, synced).
74 //#ifdef DEBUG
75 // for (int i = begin; i <= end; i++) {
76 // ASSERT(elements_[i].is_synced());
77 // }
78 //#endif
79 }
80
81
82 void VirtualFrame::MergeTo(VirtualFrame* expected) {
83 UNIMPLEMENTED_();
84 // // ARM frames are currently always in memory.
85 // // TOCHECK: I presume it is the same on MIPS
86 // ASSERT(Equals(expected));
87 }
88
89
90 //void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) {
91 // UNREACHABLE();
92 //}
93
94
95 //void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) {
96 // UNREACHABLE();
97 //}
98
99
100 //void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) {
101 // UNREACHABLE();
102 //}
103
104 void VirtualFrame::Enter() {
105 UNIMPLEMENTED_();
106 // Comment cmnt(masm(), "[ Enter JS frame");
107 //
108 //#ifdef DEBUG
109 // // Verify that a1 contains a JS function. The following code relies
110 // if (FLAG_debug_code) {
111 // Label map_check, done;
112 //
113 // __ andi(t0, a1, Operand(kSmiTagMask));
114 // __ bcond(ne, &map_check, zero_reg, Operand(t0));
115 // __ stop("VirtualFrame::Enter - a1 is not a function (smi check).");
116 // __ bind(&map_check);
117 //
118 //// __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
119 //// __ b(eq, &done);
120 // // TOCHECK : Register use.
121 // __ GetObjectType(a1, a2, a2);
122 // __ bcond(eq, &done, a2, Operand(JS_FUNCTION_TYPE));
123 // __ stop("VirtualFrame::Enter - a1 is not a function (map check).");
124 // __ bind(&done);
125 // }
126 //#endif // DEBUG
127 //
128 // // We are about to push four values to the frame.
129 // Adjust(4);
130 //// __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
131 //// __ multi_push(a1.bit() | cp.bit() | fp.bit() | ra.bit());
132 // // We don't use multi_push to optimize the code.
133 // __ addiu(sp,sp,-16);
134 // __ sw(ra, MemOperand(sp, 12));
135 // __ sw(fp, MemOperand(sp, 8));
136 // __ sw(cp, MemOperand(sp, 4));
137 // __ sw(a1, MemOperand(sp, 0));
138 //
139 // // Adjust FP to point to saved FP.
140 //// __ add(fp, sp, Operand(2 * kPointerSize));
141 // __ addiu(fp, sp, Operand(2 * kPointerSize));
142 //
143 //// cgen()->allocator()->Unuse(t9);
144 // cgen()->allocator()->Unuse(a1);
145 // cgen()->allocator()->Unuse(ra);
146 }
147
148
149 void VirtualFrame::Exit() {
150 UNIMPLEMENTED_();
151 // Comment cmnt(masm(), "[ Exit JS frame");
152 // // Record the location of the JS exit code for patching when setting
153 // // break point.
154 // __ RecordJSReturn();
155 //
156 // // Drop the execution stack down to the frame pointer and restore the caller
157 // // frame pointer and return address.
158 // __ mov(sp, fp);
159 //
160 // // Cf end of Enter() function.
161 //// __ ldm(ia_w, sp, fp.bit() | lr.bit());
162 // __ lw(fp, MemOperand(sp, 0));
163 // __ lw(ra, MemOperand(sp, 4));
164 // __ addiu(sp,sp,8); // Restore sp at its position before the register save ar ea.
165 }
166
167
168 void VirtualFrame::AllocateStackSlots() {
169 UNIMPLEMENTED_();
170 // int count = local_count();
171 // if (count > 0) {
172 // Comment cmnt(masm(), "[ Allocate space for locals");
173 // Adjust(count);
174 // // Initialize stack slots with 'undefined' value.
175 // __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
176 // }
177 //// if (FLAG_check_stack) {
178 ////// __ LoadRoot(r2, Heap::kStackLimitRootIndex);
179 //// __ LoadRoot(a2, Heap::kStackLimitRootIndex);
180 //// }
181 // __ addiu(sp, sp, -count*kPointerSize);
182 // for (int i = 0; i < count; i++) {
183 //// __ push(ip);
184 // __ sw(ip, MemOperand(sp, (count-i-1)*kPointerSize));
185 // }
186 //// if (FLAG_check_stack) {
187 //// // TODO : implement this. FLAG_check_stack "if"
188 //// // ARM comment
189 //// // Put the lr setup instruction in the delay slot. The kInstrSize is ad ded
190 //// // to the implicit 8 byte offset that always applies to operations with pc
191 //// // and gives a return address 12 bytes down.
192 //// masm()->add(lr, pc, Operand(Assembler::kInstrSize));
193 //// masm()->cmp(sp, Operand(r2));
194 //// StackCheckStub stub;
195 //// // Call the stub if lower.
196 //// masm()->mov(pc,
197 //// Operand(reinterpret_cast<intptr_t>(stub.GetCode().location() ),
198 //// RelocInfo::CODE_TARGET),
199 //// LeaveCC,
200 //// lo);
201 //// }
202 }
203
204
205 void VirtualFrame::SaveContextRegister() {
206 UNIMPLEMENTED();
207 }
208
209
210 void VirtualFrame::RestoreContextRegister() {
211 UNIMPLEMENTED();
212 }
213
214
215 void VirtualFrame::PushReceiverSlotAddress() {
216 UNIMPLEMENTED();
217 }
218
219
220 int VirtualFrame::InvalidateFrameSlotAt(int index) {
221 return kIllegalIndex;
222 }
223
224
225 void VirtualFrame::TakeFrameSlotAt(int index) {
226 UNIMPLEMENTED();
227 }
228
229
230 void VirtualFrame::StoreToFrameSlotAt(int index) {
231 UNIMPLEMENTED();
232 }
233
234
235 void VirtualFrame::PushTryHandler(HandlerType type) {
236 UNIMPLEMENTED_();
237 // // Grow the expression stack by handler size less one (the return
238 // // address in lr is already counted by a call instruction).
239 // Adjust(kHandlerSize - 1);
240 // __ PushTryHandler(IN_JAVASCRIPT, type);
241 }
242
243
244 void VirtualFrame::RawCallStub(CodeStub* stub) {
245 UNIMPLEMENTED_();
246 // ASSERT(cgen()->HasValidEntryRegisters());
247 // __ CallStub(stub);
248 }
249
250
251 void VirtualFrame::CallStub(CodeStub* stub, Result* arg) {
252 UNIMPLEMENTED_();
253 // PrepareForCall(0, 0);
254 // arg->Unuse();
255 // RawCallStub(stub);
256 }
257
258
259 void VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) {
260 UNIMPLEMENTED_();
261 // PrepareForCall(0, 0);
262 // arg0->Unuse();
263 // arg1->Unuse();
264 // RawCallStub(stub);
265 }
266
267
268 void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
269 UNIMPLEMENTED_();
270 // PrepareForCall(arg_count, arg_count);
271 // ASSERT(cgen()->HasValidEntryRegisters());
272 // __ CallRuntime(f, arg_count);
273 }
274
275
276 void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
277 UNIMPLEMENTED_();
278 // PrepareForCall(arg_count, arg_count);
279 // ASSERT(cgen()->HasValidEntryRegisters());
280 // __ CallRuntime(id, arg_count);
281 }
282
283
284 void VirtualFrame::CallAlignedRuntime(Runtime::Function* f, int arg_count) {
285 UNIMPLEMENTED();
286 // PrepareForCall(arg_count, arg_count);
287 // ASSERT(cgen()->HasValidEntryRegisters());
288 // __ CallRuntime(f, arg_count);
289 }
290
291
292 void VirtualFrame::CallAlignedRuntime(Runtime::FunctionId id, int arg_count) {
293 UNIMPLEMENTED();
294 // PrepareForCall(arg_count, arg_count);
295 // ASSERT(cgen()->HasValidEntryRegisters());
296 // __ CallRuntime(id, arg_count);
297 }
298
299
300 void VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
301 InvokeJSFlags flags,
302 Result* arg_count_register,
303 int arg_count) {
304 UNIMPLEMENTED_();
305 // ASSERT(arg_count_register->reg().is(a0));
306 // PrepareForCall(arg_count, arg_count);
307 // arg_count_register->Unuse();
308 // __ InvokeBuiltin(id, flags);
309 }
310
311
312 void VirtualFrame::RawCallCodeObject(Handle<Code> code,
313 RelocInfo::Mode rmode) {
314 UNIMPLEMENTED_();
315 // ASSERT(cgen()->HasValidEntryRegisters());
316 // // We don't align here because it may cause the arguments to be unaligned wi th
317 // // sp when entering the function.
318 // __ Call(code, rmode); // Call the code
319 }
320
321
322 void VirtualFrame::CallCodeObject(Handle<Code> code,
323 RelocInfo::Mode rmode,
324 int dropped_args) {
325 UNIMPLEMENTED_();
326 // int spilled_args = 0;
327 // switch (code->kind()) {
328 // case Code::CALL_IC:
329 // spilled_args = dropped_args + 1;
330 // break;
331 // case Code::FUNCTION:
332 // spilled_args = dropped_args + 1;
333 // break;
334 // case Code::KEYED_LOAD_IC:
335 // ASSERT(dropped_args == 0);
336 // spilled_args = 2;
337 // break;
338 // default:
339 // // The other types of code objects are called with values
340 // // in specific registers, and are handled in functions with
341 // // a different signature.
342 // UNREACHABLE();
343 // break;
344 // }
345 // PrepareForCall(spilled_args, dropped_args);
346 // RawCallCodeObject(code, rmode);
347 // // Do NOT add a nop. Some function use the branch delay slot (eg VisitCall)
348 }
349
350
351 void VirtualFrame::CallCodeObject(Handle<Code> code,
352 RelocInfo::Mode rmode,
353 Result* arg,
354 int dropped_args) {
355 UNIMPLEMENTED_();
356 // int spilled_args = 0;
357 // switch (code->kind()) {
358 // case Code::LOAD_IC:
359 // ASSERT(arg->reg().is(a2));
360 // ASSERT(dropped_args == 0);
361 // spilled_args = 1;
362 // break;
363 // case Code::KEYED_STORE_IC:
364 // ASSERT(arg->reg().is(a0));
365 // ASSERT(dropped_args == 0);
366 // spilled_args = 2;
367 // break;
368 // default:
369 // // No other types of code objects are called with values
370 // // in exactly one register.
371 // UNREACHABLE();
372 // break;
373 // }
374 //// PrepareForCall(spilled_args, dropped_args);
375 // arg->Unuse();
376 // RawCallCodeObject(code, rmode);
377 // // Do NOT add a nop. Some function use the branch delay slot (eg VisitCall)
378 }
379
380
381 void VirtualFrame::CallCodeObject(Handle<Code> code,
382 RelocInfo::Mode rmode,
383 Result* arg0,
384 Result* arg1,
385 int dropped_args,
386 bool set_auto_args_slots) {
387 UNIMPLEMENTED_();
388 // int spilled_args = 1;
389 // switch (code->kind()) {
390 // case Code::STORE_IC:
391 //// ASSERT(arg0->reg().is(r0));
392 //// ASSERT(arg1->reg().is(r2));
393 // ASSERT(arg0->reg().is(a0));
394 // ASSERT(arg1->reg().is(a2));
395 // ASSERT(dropped_args == 0);
396 // spilled_args = 1;
397 // break;
398 // case Code::BUILTIN:
399 // ASSERT(*code == Builtins::builtin(Builtins::JSConstructCall));
400 //// ASSERT(arg0->reg().is(r0));
401 //// ASSERT(arg1->reg().is(r1));
402 // ASSERT(arg0->reg().is(a0));
403 // ASSERT(arg1->reg().is(a1));
404 // spilled_args = dropped_args + 1;
405 // break;
406 // default:
407 // // No other types of code objects are called with values
408 // // in exactly two registers.
409 // UNREACHABLE();
410 // break;
411 // }
412 // PrepareForCall(spilled_args, dropped_args);
413 // arg0->Unuse();
414 // arg1->Unuse();
415 // RawCallCodeObject(code, rmode);
416 // if(set_auto_args_slots && !(code->kind() == Code::BUILTIN)) {
417 //// __ addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
418 // }
419 }
420
421
422 void VirtualFrame::Drop(int count) {
423 UNIMPLEMENTED_();
424 // ASSERT(count >= 0);
425 // ASSERT(height() >= count);
426 // int num_virtual_elements = (element_count() - 1) - stack_pointer_;
427 //
428 // // Emit code to lower the stack pointer if necessary.
429 // if (num_virtual_elements < count) {
430 // int num_dropped = count - num_virtual_elements;
431 // stack_pointer_ -= num_dropped;
432 // __ addiu(sp, sp, Operand(num_dropped * kPointerSize));
433 // }
434 //
435 // // Discard elements from the virtual frame and free any registers.
436 // for (int i = 0; i < count; i++) {
437 // FrameElement dropped = elements_.RemoveLast();
438 // if (dropped.is_register()) {
439 // Unuse(dropped.reg());
440 // }
441 // }
442 }
443
444
445 void VirtualFrame::DropFromVFrameOnly(int count) {
446 UNIMPLEMENTED_();
447 // ASSERT(count >= 0);
448 // ASSERT(height() >= count);
449 // int num_virtual_elements = (element_count() - 1) - stack_pointer_;
450 //
451 // // Emit code to lower the stack pointer if necessary.
452 // if (num_virtual_elements < count) {
453 // int num_dropped = count - num_virtual_elements;
454 // stack_pointer_ -= num_dropped;
455 // }
456 //
457 // // Discard elements from the virtual frame and free any registers.
458 // for (int i = 0; i < count; i++) {
459 // FrameElement dropped = elements_.RemoveLast();
460 // if (dropped.is_register()) {
461 // Unuse(dropped.reg());
462 // }
463 // }
464 }
465
466
467 Result VirtualFrame::Pop() {
468 return Result();
469 }
470
471
472 void VirtualFrame::EmitPop(Register reg) {
473 UNIMPLEMENTED_();
474 // ASSERT(stack_pointer_ == element_count() - 1);
475 // stack_pointer_--;
476 // elements_.RemoveLast();
477 // __ pop(reg);
478 }
479
480 void VirtualFrame::EmitMultiPop(RegList regs) {
481 UNIMPLEMENTED_();
482 // ASSERT(stack_pointer_ == element_count() - 1);
483 // for (int16_t i = RegisterAllocatorConstants::kNumRegisters; --i>=0;) {
484 // if((regs & (1<<i)) != 0 ) {
485 // stack_pointer_--;
486 // elements_.RemoveLast();
487 // }
488 // }
489 // __ multi_pop(regs);
490 //}
491 //void VirtualFrame::EmitMultiPopReversed(RegList regs) {
492 // ASSERT(stack_pointer_ == element_count() - 1);
493 // for (int16_t i = RegisterAllocatorConstants::kNumRegisters; --i>=0;) {
494 // if((regs & (1<<i)) != 0 ) {
495 // stack_pointer_--;
496 // elements_.RemoveLast();
497 // }
498 // }
499 // __ multi_pop_reversed(regs);
500 }
501
502
503 void VirtualFrame::EmitPush(Register reg) {
504 UNIMPLEMENTED_();
505 // ASSERT(stack_pointer_ == element_count() - 1);
506 // elements_.Add(FrameElement::MemoryElement());
507 // stack_pointer_++;
508 // __ push(reg);
509 }
510
511 void VirtualFrame::EmitMultiPush(RegList regs) {
512 UNIMPLEMENTED_();
513 // ASSERT(stack_pointer_ == element_count() - 1);
514 // for (int16_t i = 0; i< RegisterAllocatorConstants::kNumRegisters; i++) {
515 // if((regs & (1<<i)) != 0 ) {
516 // elements_.Add(FrameElement::MemoryElement());
517 // stack_pointer_++;
518 // }
519 // }
520 // __ multi_push(regs);
521 //}
522 //void VirtualFrame::EmitMultiPushReversed(RegList regs) {
523 // ASSERT(stack_pointer_ == element_count() - 1);
524 // for (int16_t i = 0; i< RegisterAllocatorConstants::kNumRegisters; i++) {
525 // if((regs & (1<<i)) != 0 ) {
526 // elements_.Add(FrameElement::MemoryElement());
527 // stack_pointer_++;
528 // }
529 // }
530 // __ multi_push_reversed(regs);
531 }
532
533 void VirtualFrame::EmitArgumentSlots(RegList reglist) {
534 UNIMPLEMENTED_();
535 // ASSERT(stack_pointer_ == element_count() - 1);
536 // ASSERT(is_uint4(reglist));
537 // __ addiu(sp, sp, 4*kPointerSize);
538 // for (int i=0; i<4; i++) {
539 // elements_.Add(FrameElement::MemoryElement());
540 // stack_pointer_++;
541 // if(reglist & 1<<i) { __ sw(RegisterAllocator::ToRegister(i+4), MemOperand( sp, 4-i));}
542 // }
543 }
544
545 #undef __
546
547 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698