OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
297 __ InvokeBuiltin(id, flags); | 297 __ InvokeBuiltin(id, flags); |
298 } | 298 } |
299 | 299 |
300 | 300 |
301 void VirtualFrame::CallLoadIC(RelocInfo::Mode mode) { | 301 void VirtualFrame::CallLoadIC(RelocInfo::Mode mode) { |
302 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); | 302 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); |
303 CallCodeObject(ic, mode, 0); | 303 CallCodeObject(ic, mode, 0); |
304 } | 304 } |
305 | 305 |
306 | 306 |
| 307 void VirtualFrame::CallStoreIC(Handle<String> name, bool is_contextual) { |
| 308 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); |
| 309 PopToR0(); |
| 310 if (is_contextual) { |
| 311 SpillAll(); |
| 312 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 313 } else { |
| 314 EmitPop(r1); |
| 315 SpillAll(); |
| 316 } |
| 317 __ mov(r2, Operand(name)); |
| 318 CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); |
| 319 } |
| 320 |
| 321 |
307 void VirtualFrame::CallKeyedLoadIC() { | 322 void VirtualFrame::CallKeyedLoadIC() { |
308 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 323 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
309 CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); | 324 CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); |
310 } | 325 } |
311 | 326 |
312 | 327 |
313 void VirtualFrame::CallKeyedStoreIC() { | 328 void VirtualFrame::CallKeyedStoreIC() { |
| 329 ASSERT(SpilledScope::is_spilled()); |
314 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); | 330 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); |
315 CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); | 331 CallCodeObject(ic, RelocInfo::CODE_TARGET, 0); |
316 } | 332 } |
317 | 333 |
318 | 334 |
319 void VirtualFrame::CallCodeObject(Handle<Code> code, | 335 void VirtualFrame::CallCodeObject(Handle<Code> code, |
320 RelocInfo::Mode rmode, | 336 RelocInfo::Mode rmode, |
321 int dropped_args) { | 337 int dropped_args) { |
322 switch (code->kind()) { | 338 switch (code->kind()) { |
323 case Code::CALL_IC: | 339 case Code::CALL_IC: |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
470 top_of_stack_state_ = kStateAfterPush[top_of_stack_state_]; | 486 top_of_stack_state_ = kStateAfterPush[top_of_stack_state_]; |
471 Register answer = kTopRegister[top_of_stack_state_]; | 487 Register answer = kTopRegister[top_of_stack_state_]; |
472 __ pop(answer); | 488 __ pop(answer); |
473 return answer; | 489 return answer; |
474 } else { | 490 } else { |
475 return kTopRegister[top_of_stack_state_]; | 491 return kTopRegister[top_of_stack_state_]; |
476 } | 492 } |
477 } | 493 } |
478 | 494 |
479 | 495 |
| 496 void VirtualFrame::Dup() { |
| 497 AssertIsNotSpilled(); |
| 498 switch (top_of_stack_state_) { |
| 499 case NO_TOS_REGISTERS: |
| 500 __ ldr(r0, MemOperand(sp, 0)); |
| 501 top_of_stack_state_ = R0_TOS; |
| 502 break; |
| 503 case R0_TOS: |
| 504 __ mov(r1, r0); |
| 505 top_of_stack_state_ = R0_R1_TOS; |
| 506 break; |
| 507 case R1_TOS: |
| 508 __ mov(r0, r1); |
| 509 top_of_stack_state_ = R0_R1_TOS; |
| 510 break; |
| 511 case R0_R1_TOS: |
| 512 __ push(r1); |
| 513 __ mov(r1, r0); |
| 514 // No need to change state as r0 and r1 now contains the same value. |
| 515 break; |
| 516 case R1_R0_TOS: |
| 517 __ push(r0); |
| 518 __ mov(r0, r1); |
| 519 // No need to change state as r0 and r1 now contains the same value. |
| 520 break; |
| 521 default: |
| 522 UNREACHABLE(); |
| 523 } |
| 524 element_count_++; |
| 525 } |
| 526 |
| 527 |
480 Register VirtualFrame::PopToRegister(Register but_not_to_this_one) { | 528 Register VirtualFrame::PopToRegister(Register but_not_to_this_one) { |
481 ASSERT(but_not_to_this_one.is(r0) || | 529 ASSERT(but_not_to_this_one.is(r0) || |
482 but_not_to_this_one.is(r1) || | 530 but_not_to_this_one.is(r1) || |
483 but_not_to_this_one.is(no_reg)); | 531 but_not_to_this_one.is(no_reg)); |
484 AssertIsNotSpilled(); | 532 AssertIsNotSpilled(); |
485 element_count_--; | 533 element_count_--; |
486 if (top_of_stack_state_ == NO_TOS_REGISTERS) { | 534 if (top_of_stack_state_ == NO_TOS_REGISTERS) { |
487 if (but_not_to_this_one.is(r0)) { | 535 if (but_not_to_this_one.is(r0)) { |
488 __ pop(r1); | 536 __ pop(r1); |
489 return r1; | 537 return r1; |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
608 default: | 656 default: |
609 UNREACHABLE(); | 657 UNREACHABLE(); |
610 break; | 658 break; |
611 } | 659 } |
612 ASSERT(register_allocation_map_ == 0); // Not yet implemented. | 660 ASSERT(register_allocation_map_ == 0); // Not yet implemented. |
613 } | 661 } |
614 | 662 |
615 #undef __ | 663 #undef __ |
616 | 664 |
617 } } // namespace v8::internal | 665 } } // namespace v8::internal |
OLD | NEW |