OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
180 | 180 |
181 // Allocate space for locals and initialize them. This also checks | 181 // Allocate space for locals and initialize them. This also checks |
182 // for stack overflow. | 182 // for stack overflow. |
183 frame_->AllocateStackSlots(); | 183 frame_->AllocateStackSlots(); |
184 // Initialize the function return target after the locals are set | 184 // Initialize the function return target after the locals are set |
185 // up, because it needs the expected frame height from the frame. | 185 // up, because it needs the expected frame height from the frame. |
186 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); | 186 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); |
187 function_return_is_shadowed_ = false; | 187 function_return_is_shadowed_ = false; |
188 | 188 |
189 VirtualFrame::SpilledScope spilled_scope; | 189 VirtualFrame::SpilledScope spilled_scope; |
190 if (scope_->num_heap_slots() > 0) { | 190 int heap_slots = scope_->num_heap_slots(); |
| 191 if (heap_slots > 0) { |
191 // Allocate local context. | 192 // Allocate local context. |
192 // Get outer context and create a new context based on it. | 193 // Get outer context and create a new context based on it. |
193 __ ldr(r0, frame_->Function()); | 194 __ ldr(r0, frame_->Function()); |
194 frame_->EmitPush(r0); | 195 frame_->EmitPush(r0); |
195 frame_->CallRuntime(Runtime::kNewContext, 1); // r0 holds the result | 196 if (heap_slots <= FastNewContextStub::kMaximumSlots) { |
| 197 FastNewContextStub stub(heap_slots); |
| 198 frame_->CallStub(&stub, 1); |
| 199 } else { |
| 200 frame_->CallRuntime(Runtime::kNewContext, 1); |
| 201 } |
196 | 202 |
197 #ifdef DEBUG | 203 #ifdef DEBUG |
198 JumpTarget verified_true; | 204 JumpTarget verified_true; |
199 __ cmp(r0, Operand(cp)); | 205 __ cmp(r0, Operand(cp)); |
200 verified_true.Branch(eq); | 206 verified_true.Branch(eq); |
201 __ stop("NewContext: r0 is expected to be the same as cp"); | 207 __ stop("NewContext: r0 is expected to be the same as cp"); |
202 verified_true.Bind(); | 208 verified_true.Bind(); |
203 #endif | 209 #endif |
204 // Update context local. | 210 // Update context local. |
205 __ str(cp, frame_->Context()); | 211 __ str(cp, frame_->Context()); |
(...skipping 4219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4425 __ Ret(); | 4431 __ Ret(); |
4426 | 4432 |
4427 // Create a new closure through the slower runtime call. | 4433 // Create a new closure through the slower runtime call. |
4428 __ bind(&gc); | 4434 __ bind(&gc); |
4429 __ push(cp); | 4435 __ push(cp); |
4430 __ push(r3); | 4436 __ push(r3); |
4431 __ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1); | 4437 __ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1); |
4432 } | 4438 } |
4433 | 4439 |
4434 | 4440 |
| 4441 void FastNewContextStub::Generate(MacroAssembler* masm) { |
| 4442 // Try to allocate the context in new space. |
| 4443 Label gc; |
| 4444 int length = slots_ + Context::MIN_CONTEXT_SLOTS; |
| 4445 |
| 4446 // Pop the function from the stack. |
| 4447 __ pop(r3); |
| 4448 |
| 4449 // Attempt to allocate the context in new space. |
| 4450 __ AllocateInNewSpace(length + (FixedArray::kHeaderSize / kPointerSize), |
| 4451 r0, |
| 4452 r1, |
| 4453 r2, |
| 4454 &gc, |
| 4455 TAG_OBJECT); |
| 4456 |
| 4457 // Setup the object header. |
| 4458 __ LoadRoot(r2, Heap::kContextMapRootIndex); |
| 4459 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 4460 __ mov(r2, Operand(length)); |
| 4461 __ str(r2, FieldMemOperand(r0, Array::kLengthOffset)); |
| 4462 |
| 4463 // Setup the fixed slots. |
| 4464 __ mov(r1, Operand(Smi::FromInt(0))); |
| 4465 __ str(r3, MemOperand(r0, Context::SlotOffset(Context::CLOSURE_INDEX))); |
| 4466 __ str(r0, MemOperand(r0, Context::SlotOffset(Context::FCONTEXT_INDEX))); |
| 4467 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::PREVIOUS_INDEX))); |
| 4468 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::EXTENSION_INDEX))); |
| 4469 |
| 4470 // Copy the global object from the surrounding context. |
| 4471 __ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 4472 __ str(r1, MemOperand(r0, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 4473 |
| 4474 // Initialize the rest of the slots to undefined. |
| 4475 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |
| 4476 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) { |
| 4477 __ str(r1, MemOperand(r0, Context::SlotOffset(i))); |
| 4478 } |
| 4479 |
| 4480 // Return. The on-stack parameter has already been popped. |
| 4481 __ mov(cp, r0); |
| 4482 __ Ret(); |
| 4483 |
| 4484 // Need to collect. Call into runtime system. |
| 4485 __ bind(&gc); |
| 4486 __ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1); |
| 4487 } |
| 4488 |
| 4489 |
4435 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz | 4490 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz |
4436 // instruction. On pre-ARM5 hardware this routine gives the wrong answer for 0 | 4491 // instruction. On pre-ARM5 hardware this routine gives the wrong answer for 0 |
4437 // (31 instead of 32). | 4492 // (31 instead of 32). |
4438 static void CountLeadingZeros( | 4493 static void CountLeadingZeros( |
4439 MacroAssembler* masm, | 4494 MacroAssembler* masm, |
4440 Register source, | 4495 Register source, |
4441 Register scratch, | 4496 Register scratch, |
4442 Register zeros) { | 4497 Register zeros) { |
4443 #ifdef CAN_USE_ARMV5_INSTRUCTIONS | 4498 #ifdef CAN_USE_ARMV5_INSTRUCTIONS |
4444 __ clz(zeros, source); // This instruction is only supported after ARM5. | 4499 __ clz(zeros, source); // This instruction is only supported after ARM5. |
(...skipping 2164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6609 ASSERT((static_cast<unsigned>(cc_) >> 26) < (1 << 16)); | 6664 ASSERT((static_cast<unsigned>(cc_) >> 26) < (1 << 16)); |
6610 int nnn_value = (never_nan_nan_ ? 2 : 0); | 6665 int nnn_value = (never_nan_nan_ ? 2 : 0); |
6611 if (cc_ != eq) nnn_value = 0; // Avoid duplicate stubs. | 6666 if (cc_ != eq) nnn_value = 0; // Avoid duplicate stubs. |
6612 return (static_cast<unsigned>(cc_) >> 26) | nnn_value | (strict_ ? 1 : 0); | 6667 return (static_cast<unsigned>(cc_) >> 26) | nnn_value | (strict_ ? 1 : 0); |
6613 } | 6668 } |
6614 | 6669 |
6615 | 6670 |
6616 #undef __ | 6671 #undef __ |
6617 | 6672 |
6618 } } // namespace v8::internal | 6673 } } // namespace v8::internal |
OLD | NEW |