| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 459 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 470 __ push(descriptor->register_params_[i]); | 470 __ push(descriptor->register_params_[i]); |
| 471 } | 471 } |
| 472 ExternalReference miss = descriptor->miss_handler(); | 472 ExternalReference miss = descriptor->miss_handler(); |
| 473 __ CallExternalReference(miss, descriptor->register_param_count_); | 473 __ CallExternalReference(miss, descriptor->register_param_count_); |
| 474 } | 474 } |
| 475 | 475 |
| 476 __ Ret(); | 476 __ Ret(); |
| 477 } | 477 } |
| 478 | 478 |
| 479 | 479 |
| 480 void FastNewBlockContextStub::Generate(MacroAssembler* masm) { | |
| 481 // Stack layout on entry: | |
| 482 // | |
| 483 // [sp]: function. | |
| 484 // [sp + kPointerSize]: serialized scope info | |
| 485 | |
| 486 // Try to allocate the context in new space. | |
| 487 Label gc; | |
| 488 int length = slots_ + Context::MIN_CONTEXT_SLOTS; | |
| 489 __ Allocate(FixedArray::SizeFor(length), v0, a1, a2, &gc, TAG_OBJECT); | |
| 490 | |
| 491 // Load the function from the stack. | |
| 492 __ lw(a3, MemOperand(sp, 0)); | |
| 493 | |
| 494 // Load the serialized scope info from the stack. | |
| 495 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); | |
| 496 | |
| 497 // Set up the object header. | |
| 498 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex); | |
| 499 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); | |
| 500 __ li(a2, Operand(Smi::FromInt(length))); | |
| 501 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); | |
| 502 | |
| 503 // If this block context is nested in the native context we get a smi | |
| 504 // sentinel instead of a function. The block context should get the | |
| 505 // canonical empty function of the native context as its closure which | |
| 506 // we still have to look up. | |
| 507 Label after_sentinel; | |
| 508 __ JumpIfNotSmi(a3, &after_sentinel); | |
| 509 if (FLAG_debug_code) { | |
| 510 __ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg)); | |
| 511 } | |
| 512 __ lw(a3, GlobalObjectOperand()); | |
| 513 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); | |
| 514 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); | |
| 515 __ bind(&after_sentinel); | |
| 516 | |
| 517 // Set up the fixed slots, copy the global object from the previous context. | |
| 518 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | |
| 519 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); | |
| 520 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX)); | |
| 521 __ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX)); | |
| 522 __ sw(a2, ContextOperand(v0, Context::GLOBAL_OBJECT_INDEX)); | |
| 523 | |
| 524 // Initialize the rest of the slots to the hole value. | |
| 525 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); | |
| 526 for (int i = 0; i < slots_; i++) { | |
| 527 __ sw(a1, ContextOperand(v0, i + Context::MIN_CONTEXT_SLOTS)); | |
| 528 } | |
| 529 | |
| 530 // Remove the on-stack argument and return. | |
| 531 __ mov(cp, v0); | |
| 532 __ DropAndRet(2); | |
| 533 | |
| 534 // Need to collect. Call into runtime system. | |
| 535 __ bind(&gc); | |
| 536 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); | |
| 537 } | |
| 538 | |
| 539 | |
| 540 // Takes a Smi and converts to an IEEE 64 bit floating point value in two | 480 // Takes a Smi and converts to an IEEE 64 bit floating point value in two |
| 541 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and | 481 // registers. The format is 1 sign bit, 11 exponent bits (biased 1023) and |
| 542 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a | 482 // 52 fraction bits (20 in the first word, 32 in the second). Zeros is a |
| 543 // scratch register. Destroys the source register. No GC occurs during this | 483 // scratch register. Destroys the source register. No GC occurs during this |
| 544 // stub so you don't have to set up the frame. | 484 // stub so you don't have to set up the frame. |
| 545 class ConvertToDoubleStub : public PlatformCodeStub { | 485 class ConvertToDoubleStub : public PlatformCodeStub { |
| 546 public: | 486 public: |
| 547 ConvertToDoubleStub(Register result_reg_1, | 487 ConvertToDoubleStub(Register result_reg_1, |
| 548 Register result_reg_2, | 488 Register result_reg_2, |
| 549 Register source_reg, | 489 Register source_reg, |
| (...skipping 5243 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5793 restore_context ? | 5733 restore_context ? |
| 5794 &context_restore_operand : NULL); | 5734 &context_restore_operand : NULL); |
| 5795 } | 5735 } |
| 5796 | 5736 |
| 5797 | 5737 |
| 5798 #undef __ | 5738 #undef __ |
| 5799 | 5739 |
| 5800 } } // namespace v8::internal | 5740 } } // namespace v8::internal |
| 5801 | 5741 |
| 5802 #endif // V8_TARGET_ARCH_MIPS | 5742 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |