| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2292 #endif | 2292 #endif |
| 2293 // Ignore the return value. | 2293 // Ignore the return value. |
| 2294 ASSERT(frame_->height() == original_height); | 2294 ASSERT(frame_->height() == original_height); |
| 2295 } | 2295 } |
| 2296 | 2296 |
| 2297 | 2297 |
| 2298 void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) { | 2298 void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) { |
| 2299 VirtualFrame::SpilledScope spilled_scope; | 2299 VirtualFrame::SpilledScope spilled_scope; |
| 2300 ASSERT(boilerplate->IsBoilerplate()); | 2300 ASSERT(boilerplate->IsBoilerplate()); |
| 2301 | 2301 |
| 2302 // Create a new closure. | |
| 2303 frame_->EmitPush(cp); | |
| 2304 __ mov(r0, Operand(boilerplate)); | 2302 __ mov(r0, Operand(boilerplate)); |
| 2305 frame_->EmitPush(r0); | 2303 // Use the fast case closure allocation code that allocates in new |
| 2306 frame_->CallRuntime(Runtime::kNewClosure, 2); | 2304 // space for nested functions that don't need literals cloning. |
| 2307 frame_->EmitPush(r0); | 2305 if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) { |
| 2306 FastNewClosureStub stub; |
| 2307 frame_->EmitPush(r0); |
| 2308 frame_->CallStub(&stub, 1); |
| 2309 frame_->EmitPush(r0); |
| 2310 } else { |
| 2311 // Create a new closure. |
| 2312 frame_->EmitPush(cp); |
| 2313 frame_->EmitPush(r0); |
| 2314 frame_->CallRuntime(Runtime::kNewClosure, 2); |
| 2315 frame_->EmitPush(r0); |
| 2316 } |
| 2308 } | 2317 } |
| 2309 | 2318 |
| 2310 | 2319 |
| 2311 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { | 2320 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { |
| 2312 #ifdef DEBUG | 2321 #ifdef DEBUG |
| 2313 int original_height = frame_->height(); | 2322 int original_height = frame_->height(); |
| 2314 #endif | 2323 #endif |
| 2315 VirtualFrame::SpilledScope spilled_scope; | 2324 VirtualFrame::SpilledScope spilled_scope; |
| 2316 Comment cmnt(masm_, "[ FunctionLiteral"); | 2325 Comment cmnt(masm_, "[ FunctionLiteral"); |
| 2317 | 2326 |
| (...skipping 2059 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4377 frame->EmitPush(r0); | 4386 frame->EmitPush(r0); |
| 4378 break; | 4387 break; |
| 4379 } | 4388 } |
| 4380 | 4389 |
| 4381 default: | 4390 default: |
| 4382 UNREACHABLE(); | 4391 UNREACHABLE(); |
| 4383 } | 4392 } |
| 4384 } | 4393 } |
| 4385 | 4394 |
| 4386 | 4395 |
| 4396 void FastNewClosureStub::Generate(MacroAssembler* masm) { |
| 4397 // Clone the boilerplate in new space. Set the context to the |
| 4398 // current context in cp. |
| 4399 Label gc; |
| 4400 |
| 4401 // Pop the boilerplate function from the stack. |
| 4402 __ pop(r3); |
| 4403 |
| 4404 // Attempt to allocate new JSFunction in new space. |
| 4405 __ AllocateInNewSpace(JSFunction::kSize / kPointerSize, |
| 4406 r0, |
| 4407 r1, |
| 4408 r2, |
| 4409 &gc, |
| 4410 TAG_OBJECT); |
| 4411 |
| 4412 // Compute the function map in the current global context and set that |
| 4413 // as the map of the allocated object. |
| 4414 __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); |
| 4415 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset)); |
| 4416 __ ldr(r2, MemOperand(r2, Context::SlotOffset(Context::FUNCTION_MAP_INDEX))); |
| 4417 __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 4418 |
| 4419 // Clone the rest of the boilerplate fields. We don't have to update |
| 4420 // the write barrier because the allocated object is in new space. |
| 4421 for (int offset = kPointerSize; |
| 4422 offset < JSFunction::kSize; |
| 4423 offset += kPointerSize) { |
| 4424 if (offset == JSFunction::kContextOffset) { |
| 4425 __ str(cp, FieldMemOperand(r0, offset)); |
| 4426 } else { |
| 4427 __ ldr(r1, FieldMemOperand(r3, offset)); |
| 4428 __ str(r1, FieldMemOperand(r0, offset)); |
| 4429 } |
| 4430 } |
| 4431 |
| 4432 // Return result. The argument boilerplate has been popped already. |
| 4433 __ Ret(); |
| 4434 |
| 4435 // Create a new closure through the slower runtime call. |
| 4436 __ bind(&gc); |
| 4437 __ push(cp); |
| 4438 __ push(r3); |
| 4439 __ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1); |
| 4440 } |
| 4441 |
| 4442 |
| 4387 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz | 4443 // Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz |
| 4388 // instruction. On pre-ARM5 hardware this routine gives the wrong answer for 0 | 4444 // instruction. On pre-ARM5 hardware this routine gives the wrong answer for 0 |
| 4389 // (31 instead of 32). | 4445 // (31 instead of 32). |
| 4390 static void CountLeadingZeros( | 4446 static void CountLeadingZeros( |
| 4391 MacroAssembler* masm, | 4447 MacroAssembler* masm, |
| 4392 Register source, | 4448 Register source, |
| 4393 Register scratch, | 4449 Register scratch, |
| 4394 Register zeros) { | 4450 Register zeros) { |
| 4395 #ifdef CAN_USE_ARMV5_INSTRUCTIONS | 4451 #ifdef CAN_USE_ARMV5_INSTRUCTIONS |
| 4396 __ clz(zeros, source); // This instruction is only supported after ARM5. | 4452 __ clz(zeros, source); // This instruction is only supported after ARM5. |
| (...skipping 2169 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6566 ASSERT((static_cast<unsigned>(cc_) >> 26) < (1 << 16)); | 6622 ASSERT((static_cast<unsigned>(cc_) >> 26) < (1 << 16)); |
| 6567 int nnn_value = (never_nan_nan_ ? 2 : 0); | 6623 int nnn_value = (never_nan_nan_ ? 2 : 0); |
| 6568 if (cc_ != eq) nnn_value = 0; // Avoid duplicate stubs. | 6624 if (cc_ != eq) nnn_value = 0; // Avoid duplicate stubs. |
| 6569 return (static_cast<unsigned>(cc_) >> 26) | nnn_value | (strict_ ? 1 : 0); | 6625 return (static_cast<unsigned>(cc_) >> 26) | nnn_value | (strict_ ? 1 : 0); |
| 6570 } | 6626 } |
| 6571 | 6627 |
| 6572 | 6628 |
| 6573 #undef __ | 6629 #undef __ |
| 6574 | 6630 |
| 6575 } } // namespace v8::internal | 6631 } } // namespace v8::internal |
| OLD | NEW |