Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(117)

Side by Side Diff: src/ia32/codegen-ia32.cc

Issue 487017: Refactor Reference so that SetValue and GetValue pop the reference state. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 10 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/codegen-ia32.h ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 620 matching lines...) Expand 10 before | Expand all | Expand 10 after
632 } 632 }
633 } 633 }
634 if (!skip_arguments) { 634 if (!skip_arguments) {
635 StoreToSlot(arguments->slot(), NOT_CONST_INIT); 635 StoreToSlot(arguments->slot(), NOT_CONST_INIT);
636 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind(); 636 if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
637 } 637 }
638 StoreToSlot(shadow->slot(), NOT_CONST_INIT); 638 StoreToSlot(shadow->slot(), NOT_CONST_INIT);
639 return frame_->Pop(); 639 return frame_->Pop();
640 } 640 }
641 641
642 //------------------------------------------------------------------------------
643 // CodeGenerator implementation of variables, lookups, and stores.
642 644
643 Reference::Reference(CodeGenerator* cgen, Expression* expression) 645 Reference::Reference(CodeGenerator* cgen,
644 : cgen_(cgen), expression_(expression), type_(ILLEGAL) { 646 Expression* expression,
647 bool persist_after_get)
648 : cgen_(cgen),
649 expression_(expression),
650 type_(ILLEGAL),
651 persist_after_get_(persist_after_get) {
645 cgen->LoadReference(this); 652 cgen->LoadReference(this);
646 } 653 }
647 654
648 655
649 Reference::~Reference() { 656 Reference::~Reference() {
650 cgen_->UnloadReference(this); 657 ASSERT(is_unloaded() || is_illegal());
651 } 658 }
652 659
653 660
654 void CodeGenerator::LoadReference(Reference* ref) { 661 void CodeGenerator::LoadReference(Reference* ref) {
655 // References are loaded from both spilled and unspilled code. Set the 662 // References are loaded from both spilled and unspilled code. Set the
656 // state to unspilled to allow that (and explicitly spill after 663 // state to unspilled to allow that (and explicitly spill after
657 // construction at the construction sites). 664 // construction at the construction sites).
658 bool was_in_spilled_code = in_spilled_code_; 665 bool was_in_spilled_code = in_spilled_code_;
659 in_spilled_code_ = false; 666 in_spilled_code_ = false;
660 667
(...skipping 29 matching lines...) Expand all
690 } 697 }
691 698
692 in_spilled_code_ = was_in_spilled_code; 699 in_spilled_code_ = was_in_spilled_code;
693 } 700 }
694 701
695 702
696 void CodeGenerator::UnloadReference(Reference* ref) { 703 void CodeGenerator::UnloadReference(Reference* ref) {
697 // Pop a reference from the stack while preserving TOS. 704 // Pop a reference from the stack while preserving TOS.
698 Comment cmnt(masm_, "[ UnloadReference"); 705 Comment cmnt(masm_, "[ UnloadReference");
699 frame_->Nip(ref->size()); 706 frame_->Nip(ref->size());
707 ref->set_unloaded();
700 } 708 }
701 709
702 710
703 // ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and 711 // ECMA-262, section 9.2, page 30: ToBoolean(). Pop the top of stack and
704 // convert it to a boolean in the condition code register or jump to 712 // convert it to a boolean in the condition code register or jump to
705 // 'false_target'/'true_target' as appropriate. 713 // 'false_target'/'true_target' as appropriate.
706 void CodeGenerator::ToBoolean(ControlDestination* dest) { 714 void CodeGenerator::ToBoolean(ControlDestination* dest) {
707 Comment cmnt(masm_, "[ ToBoolean"); 715 Comment cmnt(masm_, "[ ToBoolean");
708 716
709 // The value to convert should be popped from the frame. 717 // The value to convert should be popped from the frame.
(...skipping 1580 matching lines...) Expand 10 before | Expand all | Expand 10 after
2290 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP; 2298 InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
2291 CallFunctionStub call_function(arg_count, in_loop, flags); 2299 CallFunctionStub call_function(arg_count, in_loop, flags);
2292 Result answer = frame_->CallStub(&call_function, arg_count + 1); 2300 Result answer = frame_->CallStub(&call_function, arg_count + 1);
2293 // Restore context and replace function on the stack with the 2301 // Restore context and replace function on the stack with the
2294 // result of the stub invocation. 2302 // result of the stub invocation.
2295 frame_->RestoreContextRegister(); 2303 frame_->RestoreContextRegister();
2296 frame_->SetElementAt(0, &answer); 2304 frame_->SetElementAt(0, &answer);
2297 } 2305 }
2298 2306
2299 2307
2300 void CodeGenerator::CallApplyLazy(Property* apply, 2308 void CodeGenerator::CallApplyLazy(Expression* applicand,
2301 Expression* receiver, 2309 Expression* receiver,
2302 VariableProxy* arguments, 2310 VariableProxy* arguments,
2303 int position) { 2311 int position) {
2312 // An optimized implementation of expressions of the form
2313 // x.apply(y, arguments).
2314 // If the arguments object of the scope has not been allocated,
2315 // and x.apply is Function.prototype.apply, this optimization
2316 // just copies y and the arguments of the current function on the
2317 // stack, as receiver and arguments, and calls x.
2318 // In the implementation comments, we call x the applicand
2319 // and y the receiver.
2304 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 2320 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
2305 ASSERT(arguments->IsArguments()); 2321 ASSERT(arguments->IsArguments());
2306 2322
2307 JumpTarget slow, done; 2323 // Load applicand.apply onto the stack. This will usually
2308
2309 // Load the apply function onto the stack. This will usually
2310 // give us a megamorphic load site. Not super, but it works. 2324 // give us a megamorphic load site. Not super, but it works.
2311 Reference ref(this, apply); 2325 Load(applicand);
2312 ref.GetValue(); 2326 Handle<String> name = Factory::LookupAsciiSymbol("apply");
2313 ASSERT(ref.type() == Reference::NAMED); 2327 frame()->Push(name);
2328 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
2329 __ nop();
2330 frame()->Push(&answer);
2314 2331
2315 // Load the receiver and the existing arguments object onto the 2332 // Load the receiver and the existing arguments object onto the
2316 // expression stack. Avoid allocating the arguments object here. 2333 // expression stack. Avoid allocating the arguments object here.
2317 Load(receiver); 2334 Load(receiver);
2318 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF); 2335 LoadFromSlot(scope_->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
2319 2336
2320 // Emit the source position information after having loaded the 2337 // Emit the source position information after having loaded the
2321 // receiver and the arguments. 2338 // receiver and the arguments.
2322 CodeForSourcePosition(position); 2339 CodeForSourcePosition(position);
2340 // Contents of frame at this point:
2341 // Frame[0]: arguments object of the current function or the hole.
2342 // Frame[1]: receiver
2343 // Frame[2]: applicand.apply
2344 // Frame[3]: applicand.
2323 2345
2324 // Check if the arguments object has been lazily allocated 2346 // Check if the arguments object has been lazily allocated
2325 // already. If so, just use that instead of copying the arguments 2347 // already. If so, just use that instead of copying the arguments
2326 // from the stack. This also deals with cases where a local variable 2348 // from the stack. This also deals with cases where a local variable
2327 // named 'arguments' has been introduced. 2349 // named 'arguments' has been introduced.
2328 frame_->Dup(); 2350 frame_->Dup();
2329 Result probe = frame_->Pop(); 2351 Result probe = frame_->Pop();
2330 bool try_lazy = true; 2352 { VirtualFrame::SpilledScope spilled_scope;
2331 if (probe.is_constant()) { 2353 Label slow, done;
2332 try_lazy = probe.handle()->IsTheHole(); 2354 bool try_lazy = true;
2333 } else { 2355 if (probe.is_constant()) {
2334 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value())); 2356 try_lazy = probe.handle()->IsTheHole();
2335 probe.Unuse(); 2357 } else {
2336 slow.Branch(not_equal); 2358 __ cmp(Operand(probe.reg()), Immediate(Factory::the_hole_value()));
2337 } 2359 probe.Unuse();
2360 __ j(not_equal, &slow);
2361 }
2338 2362
2339 if (try_lazy) { 2363 if (try_lazy) {
2340 JumpTarget build_args; 2364 Label build_args;
2365 // Get rid of the arguments object probe.
2366 frame_->Drop(); // Can be called on a spilled frame.
2367 // Stack now has 3 elements on it.
2368 // Contents of stack at this point:
2369 // esp[0]: receiver
2370 // esp[1]: applicand.apply
2371 // esp[2]: applicand.
2341 2372
2342 // Get rid of the arguments object probe. 2373 // Check that the receiver really is a JavaScript object.
2343 frame_->Drop(); 2374 __ mov(eax, Operand(esp, 0));
2344 2375 __ test(eax, Immediate(kSmiTagMask));
2345 // Before messing with the execution stack, we sync all 2376 __ j(zero, &build_args);
2346 // elements. This is bound to happen anyway because we're
2347 // about to call a function.
2348 frame_->SyncRange(0, frame_->element_count() - 1);
2349
2350 // Check that the receiver really is a JavaScript object.
2351 { frame_->PushElementAt(0);
2352 Result receiver = frame_->Pop();
2353 receiver.ToRegister();
2354 __ test(receiver.reg(), Immediate(kSmiTagMask));
2355 build_args.Branch(zero);
2356 Result tmp = allocator_->Allocate();
2357 // We allow all JSObjects including JSFunctions. As long as 2377 // We allow all JSObjects including JSFunctions. As long as
2358 // JS_FUNCTION_TYPE is the last instance type and it is right 2378 // JS_FUNCTION_TYPE is the last instance type and it is right
2359 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper 2379 // after LAST_JS_OBJECT_TYPE, we do not have to check the upper
2360 // bound. 2380 // bound.
2361 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); 2381 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2362 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); 2382 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2363 __ CmpObjectType(receiver.reg(), FIRST_JS_OBJECT_TYPE, tmp.reg()); 2383 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
2364 build_args.Branch(less); 2384 __ j(below, &build_args);
2385
2386 // Check that applicand.apply is Function.prototype.apply.
2387 __ mov(eax, Operand(esp, kPointerSize));
2388 __ test(eax, Immediate(kSmiTagMask));
2389 __ j(zero, &build_args);
2390 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ecx);
2391 __ j(not_equal, &build_args);
2392 __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2393 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
2394 __ cmp(FieldOperand(ecx, SharedFunctionInfo::kCodeOffset),
2395 Immediate(apply_code));
2396 __ j(not_equal, &build_args);
2397
2398 // Check that applicand is a function.
2399 __ mov(edi, Operand(esp, 2 * kPointerSize));
2400 __ test(edi, Immediate(kSmiTagMask));
2401 __ j(zero, &build_args);
2402 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2403 __ j(not_equal, &build_args);
2404
2405 // Copy the arguments to this function possibly from the
2406 // adaptor frame below it.
2407 Label invoke, adapted;
2408 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2409 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2410 __ cmp(Operand(ecx),
2411 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2412 __ j(equal, &adapted);
2413
2414 // No arguments adaptor frame. Copy fixed number of arguments.
2415 __ mov(eax, Immediate(scope_->num_parameters()));
2416 for (int i = 0; i < scope_->num_parameters(); i++) {
2417 __ push(frame_->ParameterAt(i));
2418 }
2419 __ jmp(&invoke);
2420
2421 // Arguments adaptor frame present. Copy arguments from there, but
2422 // avoid copying too many arguments to avoid stack overflows.
2423 __ bind(&adapted);
2424 static const uint32_t kArgumentsLimit = 1 * KB;
2425 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2426 __ SmiUntag(eax);
2427 __ mov(ecx, Operand(eax));
2428 __ cmp(eax, kArgumentsLimit);
2429 __ j(above, &build_args);
2430
2431 // Loop through the arguments pushing them onto the execution
2432 // stack. We don't inform the virtual frame of the push, so we don't
2433 // have to worry about getting rid of the elements from the virtual
2434 // frame.
2435 Label loop;
2436 // ecx is a small non-negative integer, due to the test above.
2437 __ test(ecx, Operand(ecx));
2438 __ j(zero, &invoke);
2439 __ bind(&loop);
2440 __ push(Operand(edx, ecx, times_pointer_size, 1 * kPointerSize));
2441 __ dec(ecx);
2442 __ j(not_zero, &loop);
2443
2444 // Invoke the function.
2445 __ bind(&invoke);
2446 ParameterCount actual(eax);
2447 __ InvokeFunction(edi, actual, CALL_FUNCTION);
2448 // Drop applicand.apply and applicand from the stack, and push
2449 // the result of the function call, but leave the spilled frame
2450 // unchanged, with 3 elements, so it is correct when we compile the
2451 // slow-case code.
2452 __ add(Operand(esp), Immediate(2 * kPointerSize));
2453 __ push(eax);
2454 // Stack now has 1 element:
2455 // esp[0]: result
2456 __ jmp(&done);
2457
2458 // Slow-case: Allocate the arguments object since we know it isn't
2459 // there, and fall-through to the slow-case where we call
2460 // applicand.apply.
2461 __ bind(&build_args);
2462 // Stack now has 3 elements, because we have jumped from where:
2463 // esp[0]: receiver
2464 // esp[1]: applicand.apply
2465 // esp[2]: applicand.
2466
2467 // StoreArgumentsObject requires a correct frame, and may modify it.
2468 Result arguments_object = StoreArgumentsObject(false);
2469 frame_->SpillAll();
2470 arguments_object.ToRegister();
2471 frame_->EmitPush(arguments_object.reg());
2472 arguments_object.Unuse();
2473 // Stack and frame now have 4 elements.
2474 __ bind(&slow);
2365 } 2475 }
2366 2476
2367 // Verify that we're invoking Function.prototype.apply. 2477 // Generic computation of x.apply(y, args) with no special optimization.
2368 { frame_->PushElementAt(1); 2478 // Flip applicand.apply and applicand on the stack, so
2369 Result apply = frame_->Pop(); 2479 // applicand looks like the receiver of the applicand.apply call.
2370 apply.ToRegister(); 2480 // Then process it as a normal function call.
2371 __ test(apply.reg(), Immediate(kSmiTagMask)); 2481 __ mov(eax, Operand(esp, 3 * kPointerSize));
2372 build_args.Branch(zero); 2482 __ mov(ebx, Operand(esp, 2 * kPointerSize));
2373 Result tmp = allocator_->Allocate(); 2483 __ mov(Operand(esp, 2 * kPointerSize), eax);
2374 __ CmpObjectType(apply.reg(), JS_FUNCTION_TYPE, tmp.reg()); 2484 __ mov(Operand(esp, 3 * kPointerSize), ebx);
2375 build_args.Branch(not_equal);
2376 __ mov(tmp.reg(),
2377 FieldOperand(apply.reg(), JSFunction::kSharedFunctionInfoOffset));
2378 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply));
2379 __ cmp(FieldOperand(tmp.reg(), SharedFunctionInfo::kCodeOffset),
2380 Immediate(apply_code));
2381 build_args.Branch(not_equal);
2382 }
2383 2485
2384 // Get the function receiver from the stack. Check that it 2486 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
2385 // really is a function. 2487 Result res = frame_->CallStub(&call_function, 3);
2386 __ mov(edi, Operand(esp, 2 * kPointerSize)); 2488 // The function and its two arguments have been dropped.
2387 __ test(edi, Immediate(kSmiTagMask)); 2489 frame_->Drop(1); // Drop the receiver as well.
2388 build_args.Branch(zero); 2490 res.ToRegister();
2389 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2491 frame_->EmitPush(res.reg());
2390 build_args.Branch(not_equal); 2492 // Stack now has 1 element:
2391 2493 // esp[0]: result
2392 // Copy the arguments to this function possibly from the 2494 if (try_lazy) __ bind(&done);
2393 // adaptor frame below it. 2495 } // End of spilled scope.
2394 Label invoke, adapted; 2496 // Restore the context register after a call.
2395 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2396 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
2397 __ cmp(Operand(ecx),
2398 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2399 __ j(equal, &adapted);
2400
2401 // No arguments adaptor frame. Copy fixed number of arguments.
2402 __ mov(eax, Immediate(scope_->num_parameters()));
2403 for (int i = 0; i < scope_->num_parameters(); i++) {
2404 __ push(frame_->ParameterAt(i));
2405 }
2406 __ jmp(&invoke);
2407
2408 // Arguments adaptor frame present. Copy arguments from there, but
2409 // avoid copying too many arguments to avoid stack overflows.
2410 __ bind(&adapted);
2411 static const uint32_t kArgumentsLimit = 1 * KB;
2412 __ mov(eax, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2413 __ SmiUntag(eax);
2414 __ mov(ecx, Operand(eax));
2415 __ cmp(eax, kArgumentsLimit);
2416 build_args.Branch(above);
2417
2418 // Loop through the arguments pushing them onto the execution
2419 // stack. We don't inform the virtual frame of the push, so we don't
2420 // have to worry about getting rid of the elements from the virtual
2421 // frame.
2422 Label loop;
2423 __ bind(&loop);
2424 __ test(ecx, Operand(ecx));
2425 __ j(zero, &invoke);
2426 __ push(Operand(edx, ecx, times_4, 1 * kPointerSize));
2427 __ dec(ecx);
2428 __ jmp(&loop);
2429
2430 // Invoke the function. The virtual frame knows about the receiver
2431 // so make sure to forget that explicitly.
2432 __ bind(&invoke);
2433 ParameterCount actual(eax);
2434 __ InvokeFunction(edi, actual, CALL_FUNCTION);
2435 frame_->Forget(1);
2436 Result result = allocator()->Allocate(eax);
2437 frame_->SetElementAt(0, &result);
2438 done.Jump();
2439
2440 // Slow-case: Allocate the arguments object since we know it isn't
2441 // there, and fall-through to the slow-case where we call
2442 // Function.prototype.apply.
2443 build_args.Bind();
2444 Result arguments_object = StoreArgumentsObject(false);
2445 frame_->Push(&arguments_object);
2446 slow.Bind();
2447 }
2448
2449 // Flip the apply function and the function to call on the stack, so
2450 // the function looks like the receiver of the apply call. This way,
2451 // the generic Function.prototype.apply implementation can deal with
2452 // the call like it usually does.
2453 Result a2 = frame_->Pop();
2454 Result a1 = frame_->Pop();
2455 Result ap = frame_->Pop();
2456 Result fn = frame_->Pop();
2457 frame_->Push(&ap);
2458 frame_->Push(&fn);
2459 frame_->Push(&a1);
2460 frame_->Push(&a2);
2461 CallFunctionStub call_function(2, NOT_IN_LOOP, NO_CALL_FUNCTION_FLAGS);
2462 Result res = frame_->CallStub(&call_function, 3);
2463 frame_->Push(&res);
2464
2465 // All done. Restore context register after call.
2466 if (try_lazy) done.Bind();
2467 frame_->RestoreContextRegister(); 2497 frame_->RestoreContextRegister();
2468 } 2498 }
2469 2499
2470 2500
2471 class DeferredStackCheck: public DeferredCode { 2501 class DeferredStackCheck: public DeferredCode {
2472 public: 2502 public:
2473 DeferredStackCheck() { 2503 DeferredStackCheck() {
2474 set_comment("[ DeferredStackCheck"); 2504 set_comment("[ DeferredStackCheck");
2475 } 2505 }
2476 2506
(...skipping 1019 matching lines...) Expand 10 before | Expand all | Expand 10 after
3496 end_del_check.Bind(); 3526 end_del_check.Bind();
3497 // Store the entry in the 'each' expression and take another spin in the 3527 // Store the entry in the 'each' expression and take another spin in the
3498 // loop. edx: i'th entry of the enum cache (or string there of) 3528 // loop. edx: i'th entry of the enum cache (or string there of)
3499 frame_->EmitPush(ebx); 3529 frame_->EmitPush(ebx);
3500 { Reference each(this, node->each()); 3530 { Reference each(this, node->each());
3501 // Loading a reference may leave the frame in an unspilled state. 3531 // Loading a reference may leave the frame in an unspilled state.
3502 frame_->SpillAll(); 3532 frame_->SpillAll();
3503 if (!each.is_illegal()) { 3533 if (!each.is_illegal()) {
3504 if (each.size() > 0) { 3534 if (each.size() > 0) {
3505 frame_->EmitPush(frame_->ElementAt(each.size())); 3535 frame_->EmitPush(frame_->ElementAt(each.size()));
3506 } 3536 each.SetValue(NOT_CONST_INIT);
3507 // If the reference was to a slot we rely on the convenient property 3537 frame_->Drop(2);
3508 // that it doesn't matter whether a value (eg, ebx pushed above) is 3538 } else {
3509 // right on top of or right underneath a zero-sized reference. 3539 // If the reference was to a slot we rely on the convenient property
3510 each.SetValue(NOT_CONST_INIT); 3540 // that it doesn't matter whether a value (eg, ebx pushed above) is
3511 if (each.size() > 0) { 3541 // right on top of or right underneath a zero-sized reference.
3512 // It's safe to pop the value lying on top of the reference before 3542 each.SetValue(NOT_CONST_INIT);
3513 // unloading the reference itself (which preserves the top of stack,
3514 // ie, now the topmost value of the non-zero sized reference), since
3515 // we will discard the top of stack after unloading the reference
3516 // anyway.
3517 frame_->Drop(); 3543 frame_->Drop();
3518 } 3544 }
3519 } 3545 }
3520 } 3546 }
3521 // Unloading a reference may leave the frame in an unspilled state. 3547 // Unloading a reference may leave the frame in an unspilled state.
3522 frame_->SpillAll(); 3548 frame_->SpillAll();
3523 3549
3524 // Discard the i'th entry pushed above or else the remainder of the
3525 // reference, whichever is currently on top of the stack.
3526 frame_->Drop();
3527
3528 // Body. 3550 // Body.
3529 CheckStack(); // TODO(1222600): ignore if body contains calls. 3551 CheckStack(); // TODO(1222600): ignore if body contains calls.
3530 VisitAndSpill(node->body()); 3552 VisitAndSpill(node->body());
3531 3553
3532 // Next. Reestablish a spilled frame in case we are coming here via 3554 // Next. Reestablish a spilled frame in case we are coming here via
3533 // a continue in the body. 3555 // a continue in the body.
3534 node->continue_target()->Bind(); 3556 node->continue_target()->Bind();
3535 frame_->SpillAll(); 3557 frame_->SpillAll();
3536 frame_->EmitPop(eax); 3558 frame_->EmitPop(eax);
3537 __ add(Operand(eax), Immediate(Smi::FromInt(1))); 3559 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
(...skipping 1029 matching lines...) Expand 10 before | Expand all | Expand 10 after
4567 Comment cmnt(masm_, "[ CatchExtensionObject"); 4589 Comment cmnt(masm_, "[ CatchExtensionObject");
4568 Load(node->key()); 4590 Load(node->key());
4569 Load(node->value()); 4591 Load(node->value());
4570 Result result = 4592 Result result =
4571 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2); 4593 frame_->CallRuntime(Runtime::kCreateCatchExtensionObject, 2);
4572 frame_->Push(&result); 4594 frame_->Push(&result);
4573 } 4595 }
4574 4596
4575 4597
4576 void CodeGenerator::VisitAssignment(Assignment* node) { 4598 void CodeGenerator::VisitAssignment(Assignment* node) {
4599 #ifdef DEBUG
4600 int original_height = frame_->height();
4601 #endif
4577 Comment cmnt(masm_, "[ Assignment"); 4602 Comment cmnt(masm_, "[ Assignment");
4578 4603
4579 { Reference target(this, node->target()); 4604 { Reference target(this, node->target(), node->is_compound());
4580 if (target.is_illegal()) { 4605 if (target.is_illegal()) {
4581 // Fool the virtual frame into thinking that we left the assignment's 4606 // Fool the virtual frame into thinking that we left the assignment's
4582 // value on the frame. 4607 // value on the frame.
4583 frame_->Push(Smi::FromInt(0)); 4608 frame_->Push(Smi::FromInt(0));
4584 return; 4609 return;
4585 } 4610 }
4586 Variable* var = node->target()->AsVariableProxy()->AsVariable(); 4611 Variable* var = node->target()->AsVariableProxy()->AsVariable();
4587 4612
4588 if (node->starts_initialization_block()) { 4613 if (node->starts_initialization_block()) {
4589 ASSERT(target.type() == Reference::NAMED || 4614 ASSERT(target.type() == Reference::NAMED ||
4590 target.type() == Reference::KEYED); 4615 target.type() == Reference::KEYED);
4591 // Change to slow case in the beginning of an initialization 4616 // Change to slow case in the beginning of an initialization
4592 // block to avoid the quadratic behavior of repeatedly adding 4617 // block to avoid the quadratic behavior of repeatedly adding
4593 // fast properties. 4618 // fast properties.
4594 4619
4595 // The receiver is the argument to the runtime call. It is the 4620 // The receiver is the argument to the runtime call. It is the
4596 // first value pushed when the reference was loaded to the 4621 // first value pushed when the reference was loaded to the
4597 // frame. 4622 // frame.
4598 frame_->PushElementAt(target.size() - 1); 4623 frame_->PushElementAt(target.size() - 1);
4599 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1); 4624 Result ignored = frame_->CallRuntime(Runtime::kToSlowProperties, 1);
4600 } 4625 }
4626 if (node->ends_initialization_block()) {
4627 // Add an extra copy of the receiver to the frame, so that it can be
4628 // converted back to fast case after the assignment.
4629 ASSERT(target.type() == Reference::NAMED ||
4630 target.type() == Reference::KEYED);
4631 if (target.type() == Reference::NAMED) {
4632 frame_->Dup();
4633 // Dup target receiver on stack.
4634 } else {
4635 ASSERT(target.type() == Reference::KEYED);
4636 Result temp = frame_->Pop();
4637 frame_->Dup();
4638 frame_->Push(&temp);
4639 }
4640 }
4601 if (node->op() == Token::ASSIGN || 4641 if (node->op() == Token::ASSIGN ||
4602 node->op() == Token::INIT_VAR || 4642 node->op() == Token::INIT_VAR ||
4603 node->op() == Token::INIT_CONST) { 4643 node->op() == Token::INIT_CONST) {
4604 Load(node->value()); 4644 Load(node->value());
4605 4645
4606 } else { 4646 } else { // Assignment is a compound assignment.
4607 Literal* literal = node->value()->AsLiteral(); 4647 Literal* literal = node->value()->AsLiteral();
4608 bool overwrite_value = 4648 bool overwrite_value =
4609 (node->value()->AsBinaryOperation() != NULL && 4649 (node->value()->AsBinaryOperation() != NULL &&
4610 node->value()->AsBinaryOperation()->ResultOverwriteAllowed()); 4650 node->value()->AsBinaryOperation()->ResultOverwriteAllowed());
4611 Variable* right_var = node->value()->AsVariableProxy()->AsVariable(); 4651 Variable* right_var = node->value()->AsVariableProxy()->AsVariable();
4612 // There are two cases where the target is not read in the right hand 4652 // There are two cases where the target is not read in the right hand
4613 // side, that are easy to test for: the right hand side is a literal, 4653 // side, that are easy to test for: the right hand side is a literal,
4614 // or the right hand side is a different variable. TakeValue invalidates 4654 // or the right hand side is a different variable. TakeValue invalidates
4615 // the target, with an implicit promise that it will be written to again 4655 // the target, with an implicit promise that it will be written to again
4616 // before it is read. 4656 // before it is read.
4617 if (literal != NULL || (right_var != NULL && right_var != var)) { 4657 if (literal != NULL || (right_var != NULL && right_var != var)) {
4618 target.TakeValue(); 4658 target.TakeValue();
4619 } else { 4659 } else {
4620 target.GetValue(); 4660 target.GetValue();
4621 } 4661 }
4622 Load(node->value()); 4662 Load(node->value());
4623 GenericBinaryOperation(node->binary_op(), 4663 GenericBinaryOperation(node->binary_op(),
4624 node->type(), 4664 node->type(),
4625 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE); 4665 overwrite_value ? OVERWRITE_RIGHT : NO_OVERWRITE);
4626 } 4666 }
4627 4667
4628 if (var != NULL && 4668 if (var != NULL &&
4629 var->mode() == Variable::CONST && 4669 var->mode() == Variable::CONST &&
4630 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) { 4670 node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
4631 // Assignment ignored - leave the value on the stack. 4671 // Assignment ignored - leave the value on the stack.
4672 UnloadReference(&target);
4632 } else { 4673 } else {
4633 CodeForSourcePosition(node->position()); 4674 CodeForSourcePosition(node->position());
4634 if (node->op() == Token::INIT_CONST) { 4675 if (node->op() == Token::INIT_CONST) {
4635 // Dynamic constant initializations must use the function context 4676 // Dynamic constant initializations must use the function context
4636 // and initialize the actual constant declared. Dynamic variable 4677 // and initialize the actual constant declared. Dynamic variable
4637 // initializations are simply assignments and use SetValue. 4678 // initializations are simply assignments and use SetValue.
4638 target.SetValue(CONST_INIT); 4679 target.SetValue(CONST_INIT);
4639 } else { 4680 } else {
4640 target.SetValue(NOT_CONST_INIT); 4681 target.SetValue(NOT_CONST_INIT);
4641 } 4682 }
4642 if (node->ends_initialization_block()) { 4683 if (node->ends_initialization_block()) {
4643 ASSERT(target.type() == Reference::NAMED || 4684 ASSERT(target.type() == Reference::UNLOADED);
4644 target.type() == Reference::KEYED);
4645 // End of initialization block. Revert to fast case. The 4685 // End of initialization block. Revert to fast case. The
4646 // argument to the runtime call is the receiver, which is the 4686 // argument to the runtime call is the extra copy of the receiver,
4647 // first value pushed as part of the reference, which is below 4687 // which is below the value of the assignment.
4648 // the lhs value. 4688 // Swap the receiver and the value of the assignment expression.
4649 frame_->PushElementAt(target.size()); 4689 Result lhs = frame_->Pop();
4690 Result receiver = frame_->Pop();
4691 frame_->Push(&lhs);
4692 frame_->Push(&receiver);
4650 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1); 4693 Result ignored = frame_->CallRuntime(Runtime::kToFastProperties, 1);
4651 } 4694 }
4652 } 4695 }
4653 } 4696 }
4697 ASSERT(frame_->height() == original_height + 1);
4654 } 4698 }
4655 4699
4656 4700
4657 void CodeGenerator::VisitThrow(Throw* node) { 4701 void CodeGenerator::VisitThrow(Throw* node) {
4658 Comment cmnt(masm_, "[ Throw"); 4702 Comment cmnt(masm_, "[ Throw");
4659 Load(node->exception()); 4703 Load(node->exception());
4660 Result result = frame_->CallRuntime(Runtime::kThrow, 1); 4704 Result result = frame_->CallRuntime(Runtime::kThrow, 1);
4661 frame_->Push(&result); 4705 frame_->Push(&result);
4662 } 4706 }
4663 4707
(...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after
4806 4850
4807 Handle<String> name = Handle<String>::cast(literal->handle()); 4851 Handle<String> name = Handle<String>::cast(literal->handle());
4808 4852
4809 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION && 4853 if (ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION &&
4810 name->IsEqualTo(CStrVector("apply")) && 4854 name->IsEqualTo(CStrVector("apply")) &&
4811 args->length() == 2 && 4855 args->length() == 2 &&
4812 args->at(1)->AsVariableProxy() != NULL && 4856 args->at(1)->AsVariableProxy() != NULL &&
4813 args->at(1)->AsVariableProxy()->IsArguments()) { 4857 args->at(1)->AsVariableProxy()->IsArguments()) {
4814 // Use the optimized Function.prototype.apply that avoids 4858 // Use the optimized Function.prototype.apply that avoids
4815 // allocating lazily allocated arguments objects. 4859 // allocating lazily allocated arguments objects.
4816 CallApplyLazy(property, 4860 CallApplyLazy(property->obj(),
4817 args->at(0), 4861 args->at(0),
4818 args->at(1)->AsVariableProxy(), 4862 args->at(1)->AsVariableProxy(),
4819 node->position()); 4863 node->position());
4820 4864
4821 } else { 4865 } else {
4822 // Push the receiver onto the frame. 4866 // Push the receiver onto the frame.
4823 Load(property->obj()); 4867 Load(property->obj());
4824 4868
4825 // Load the arguments. 4869 // Load the arguments.
4826 int arg_count = args->length(); 4870 int arg_count = args->length();
(...skipping 12 matching lines...) Expand all
4839 frame_->RestoreContextRegister(); 4883 frame_->RestoreContextRegister();
4840 frame_->Push(&result); 4884 frame_->Push(&result);
4841 } 4885 }
4842 4886
4843 } else { 4887 } else {
4844 // ------------------------------------------- 4888 // -------------------------------------------
4845 // JavaScript example: 'array[index](1, 2, 3)' 4889 // JavaScript example: 'array[index](1, 2, 3)'
4846 // ------------------------------------------- 4890 // -------------------------------------------
4847 4891
4848 // Load the function to call from the property through a reference. 4892 // Load the function to call from the property through a reference.
4849 Reference ref(this, property);
4850 ref.GetValue();
4851 4893
4852 // Pass receiver to called function. 4894 // Pass receiver to called function.
4853 if (property->is_synthetic()) { 4895 if (property->is_synthetic()) {
4896 Reference ref(this, property);
4897 ref.GetValue();
4854 // Use global object as receiver. 4898 // Use global object as receiver.
4855 LoadGlobalReceiver(); 4899 LoadGlobalReceiver();
4856 } else { 4900 } else {
4857 // The reference's size is non-negative. 4901 Load(property->obj());
4858 frame_->PushElementAt(ref.size()); 4902 Load(property->key());
4903 Result function = EmitKeyedLoad(false);
4904 frame_->Drop(); // Key.
4905 Result receiver = frame_->Pop();
4906 frame_->Push(&function);
4907 frame_->Push(&receiver);
4859 } 4908 }
4860 4909
4861 // Call the function. 4910 // Call the function.
4862 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position()); 4911 CallWithArguments(args, RECEIVER_MIGHT_BE_VALUE, node->position());
4863 } 4912 }
4864 4913
4865 } else { 4914 } else {
4866 // ---------------------------------- 4915 // ----------------------------------
4867 // JavaScript example: 'foo(1, 2, 3)' // foo is not global 4916 // JavaScript example: 'foo(1, 2, 3)' // foo is not global
4868 // ---------------------------------- 4917 // ----------------------------------
(...skipping 890 matching lines...) Expand 10 before | Expand all | Expand 10 after
5759 5808
5760 Variable* var = node->expression()->AsVariableProxy()->AsVariable(); 5809 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
5761 bool is_const = (var != NULL && var->mode() == Variable::CONST); 5810 bool is_const = (var != NULL && var->mode() == Variable::CONST);
5762 5811
5763 // Postfix operations need a stack slot under the reference to hold 5812 // Postfix operations need a stack slot under the reference to hold
5764 // the old value while the new value is being stored. This is so that 5813 // the old value while the new value is being stored. This is so that
5765 // in the case that storing the new value requires a call, the old 5814 // in the case that storing the new value requires a call, the old
5766 // value will be in the frame to be spilled. 5815 // value will be in the frame to be spilled.
5767 if (is_postfix) frame_->Push(Smi::FromInt(0)); 5816 if (is_postfix) frame_->Push(Smi::FromInt(0));
5768 5817
5769 { Reference target(this, node->expression()); 5818 // A constant reference is not saved to, so a constant reference is not a
5819 // compound assignment reference.
5820 { Reference target(this, node->expression(), !is_const);
5770 if (target.is_illegal()) { 5821 if (target.is_illegal()) {
5771 // Spoof the virtual frame to have the expected height (one higher 5822 // Spoof the virtual frame to have the expected height (one higher
5772 // than on entry). 5823 // than on entry).
5773 if (!is_postfix) frame_->Push(Smi::FromInt(0)); 5824 if (!is_postfix) frame_->Push(Smi::FromInt(0));
5774 return; 5825 return;
5775 } 5826 }
5776 target.TakeValue(); 5827 target.TakeValue();
5777 5828
5778 Result new_value = frame_->Pop(); 5829 Result new_value = frame_->Pop();
5779 new_value.ToRegister(); 5830 new_value.ToRegister();
(...skipping 582 matching lines...) Expand 10 before | Expand all | Expand 10 after
6362 // instruction that gets patched and coverage code gets in the way. 6413 // instruction that gets patched and coverage code gets in the way.
6363 masm_->test(eax, Immediate(-delta_to_patch_site)); 6414 masm_->test(eax, Immediate(-delta_to_patch_site));
6364 // Restore value (returned from store IC), key and receiver 6415 // Restore value (returned from store IC), key and receiver
6365 // registers. 6416 // registers.
6366 if (!value_.is(eax)) __ mov(value_, eax); 6417 if (!value_.is(eax)) __ mov(value_, eax);
6367 __ pop(key_); 6418 __ pop(key_);
6368 __ pop(receiver_); 6419 __ pop(receiver_);
6369 } 6420 }
6370 6421
6371 6422
6423 Result CodeGenerator::EmitKeyedLoad(bool is_global) {
6424 Comment cmnt(masm_, "[ Load from keyed Property");
6425 // Inline array load code if inside of a loop. We do not know
6426 // the receiver map yet, so we initially generate the code with
6427 // a check against an invalid map. In the inline cache code, we
6428 // patch the map check if appropriate.
6429 if (loop_nesting() > 0) {
6430 Comment cmnt(masm_, "[ Inlined load from keyed Property");
6431
6432 Result key = frame_->Pop();
6433 Result receiver = frame_->Pop();
6434 key.ToRegister();
6435 receiver.ToRegister();
6436
6437 // Use a fresh temporary to load the elements without destroying
6438 // the receiver which is needed for the deferred slow case.
6439 Result elements = allocator()->Allocate();
6440 ASSERT(elements.is_valid());
6441
6442 // Use a fresh temporary for the index and later the loaded
6443 // value.
6444 Result index = allocator()->Allocate();
6445 ASSERT(index.is_valid());
6446
6447 DeferredReferenceGetKeyedValue* deferred =
6448 new DeferredReferenceGetKeyedValue(index.reg(),
6449 receiver.reg(),
6450 key.reg(),
6451 is_global);
6452
6453 // Check that the receiver is not a smi (only needed if this
6454 // is not a load from the global context) and that it has the
6455 // expected map.
6456 if (!is_global) {
6457 __ test(receiver.reg(), Immediate(kSmiTagMask));
6458 deferred->Branch(zero);
6459 }
6460
6461 // Initially, use an invalid map. The map is patched in the IC
6462 // initialization code.
6463 __ bind(deferred->patch_site());
6464 // Use masm-> here instead of the double underscore macro since extra
6465 // coverage code can interfere with the patching.
6466 masm_->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
6467 Immediate(Factory::null_value()));
6468 deferred->Branch(not_equal);
6469
6470 // Check that the key is a smi.
6471 __ test(key.reg(), Immediate(kSmiTagMask));
6472 deferred->Branch(not_zero);
6473
6474 // Get the elements array from the receiver and check that it
6475 // is not a dictionary.
6476 __ mov(elements.reg(),
6477 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6478 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
6479 Immediate(Factory::fixed_array_map()));
6480 deferred->Branch(not_equal);
6481
6482 // Shift the key to get the actual index value and check that
6483 // it is within bounds.
6484 __ mov(index.reg(), key.reg());
6485 __ SmiUntag(index.reg());
6486 __ cmp(index.reg(),
6487 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
6488 deferred->Branch(above_equal);
6489
6490 // Load and check that the result is not the hole. We could
6491 // reuse the index or elements register for the value.
6492 //
6493 // TODO(206): Consider whether it makes sense to try some
6494 // heuristic about which register to reuse. For example, if
6495 // one is eax, the we can reuse that one because the value
6496 // coming from the deferred code will be in eax.
6497 Result value = index;
6498 __ mov(value.reg(), Operand(elements.reg(),
6499 index.reg(),
6500 times_4,
6501 FixedArray::kHeaderSize - kHeapObjectTag));
6502 elements.Unuse();
6503 index.Unuse();
6504 __ cmp(Operand(value.reg()), Immediate(Factory::the_hole_value()));
6505 deferred->Branch(equal);
6506 __ IncrementCounter(&Counters::keyed_load_inline, 1);
6507
6508 deferred->BindExit();
6509 // Restore the receiver and key to the frame and push the
6510 // result on top of it.
6511 frame_->Push(&receiver);
6512 frame_->Push(&key);
6513 return value;
6514 } else {
6515 Comment cmnt(masm_, "[ Load from keyed Property");
6516 RelocInfo::Mode mode = is_global
6517 ? RelocInfo::CODE_TARGET_CONTEXT
6518 : RelocInfo::CODE_TARGET;
6519 Result answer = frame_->CallKeyedLoadIC(mode);
6520 // Make sure that we do not have a test instruction after the
6521 // call. A test instruction after the call is used to
6522 // indicate that we have generated an inline version of the
6523 // keyed load. The explicit nop instruction is here because
6524 // the push that follows might be peep-hole optimized away.
6525 __ nop();
6526 return answer;
6527 }
6528 }
6529
6530
6372 #undef __ 6531 #undef __
6373 #define __ ACCESS_MASM(masm) 6532 #define __ ACCESS_MASM(masm)
6374 6533
6375 6534
6376 Handle<String> Reference::GetName() { 6535 Handle<String> Reference::GetName() {
6377 ASSERT(type_ == NAMED); 6536 ASSERT(type_ == NAMED);
6378 Property* property = expression_->AsProperty(); 6537 Property* property = expression_->AsProperty();
6379 if (property == NULL) { 6538 if (property == NULL) {
6380 // Global variable reference treated as a named property reference. 6539 // Global variable reference treated as a named property reference.
6381 VariableProxy* proxy = expression_->AsVariableProxy(); 6540 VariableProxy* proxy = expression_->AsVariableProxy();
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
6474 6633
6475 __ IncrementCounter(&Counters::named_load_inline, 1); 6634 __ IncrementCounter(&Counters::named_load_inline, 1);
6476 deferred->BindExit(); 6635 deferred->BindExit();
6477 cgen_->frame()->Push(&receiver); 6636 cgen_->frame()->Push(&receiver);
6478 cgen_->frame()->Push(&value); 6637 cgen_->frame()->Push(&value);
6479 } 6638 }
6480 break; 6639 break;
6481 } 6640 }
6482 6641
6483 case KEYED: { 6642 case KEYED: {
6484 Comment cmnt(masm, "[ Load from keyed Property");
6485 Variable* var = expression_->AsVariableProxy()->AsVariable(); 6643 Variable* var = expression_->AsVariableProxy()->AsVariable();
6486 bool is_global = var != NULL; 6644 bool is_global = var != NULL;
6487 ASSERT(!is_global || var->is_global()); 6645 ASSERT(!is_global || var->is_global());
6488 6646 Result value = cgen_->EmitKeyedLoad(is_global);
6489 // Inline array load code if inside of a loop. We do not know 6647 cgen_->frame()->Push(&value);
6490 // the receiver map yet, so we initially generate the code with
6491 // a check against an invalid map. In the inline cache code, we
6492 // patch the map check if appropriate.
6493 if (cgen_->loop_nesting() > 0) {
6494 Comment cmnt(masm, "[ Inlined load from keyed Property");
6495
6496 Result key = cgen_->frame()->Pop();
6497 Result receiver = cgen_->frame()->Pop();
6498 key.ToRegister();
6499 receiver.ToRegister();
6500
6501 // Use a fresh temporary to load the elements without destroying
6502 // the receiver which is needed for the deferred slow case.
6503 Result elements = cgen_->allocator()->Allocate();
6504 ASSERT(elements.is_valid());
6505
6506 // Use a fresh temporary for the index and later the loaded
6507 // value.
6508 Result index = cgen_->allocator()->Allocate();
6509 ASSERT(index.is_valid());
6510
6511 DeferredReferenceGetKeyedValue* deferred =
6512 new DeferredReferenceGetKeyedValue(index.reg(),
6513 receiver.reg(),
6514 key.reg(),
6515 is_global);
6516
6517 // Check that the receiver is not a smi (only needed if this
6518 // is not a load from the global context) and that it has the
6519 // expected map.
6520 if (!is_global) {
6521 __ test(receiver.reg(), Immediate(kSmiTagMask));
6522 deferred->Branch(zero);
6523 }
6524
6525 // Initially, use an invalid map. The map is patched in the IC
6526 // initialization code.
6527 __ bind(deferred->patch_site());
6528 // Use masm-> here instead of the double underscore macro since extra
6529 // coverage code can interfere with the patching.
6530 masm->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
6531 Immediate(Factory::null_value()));
6532 deferred->Branch(not_equal);
6533
6534 // Check that the key is a smi.
6535 __ test(key.reg(), Immediate(kSmiTagMask));
6536 deferred->Branch(not_zero);
6537
6538 // Get the elements array from the receiver and check that it
6539 // is not a dictionary.
6540 __ mov(elements.reg(),
6541 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
6542 __ cmp(FieldOperand(elements.reg(), HeapObject::kMapOffset),
6543 Immediate(Factory::fixed_array_map()));
6544 deferred->Branch(not_equal);
6545
6546 // Shift the key to get the actual index value and check that
6547 // it is within bounds.
6548 __ mov(index.reg(), key.reg());
6549 __ SmiUntag(index.reg());
6550 __ cmp(index.reg(),
6551 FieldOperand(elements.reg(), FixedArray::kLengthOffset));
6552 deferred->Branch(above_equal);
6553
6554 // Load and check that the result is not the hole. We could
6555 // reuse the index or elements register for the value.
6556 //
6557 // TODO(206): Consider whether it makes sense to try some
6558 // heuristic about which register to reuse. For example, if
6559 // one is eax, the we can reuse that one because the value
6560 // coming from the deferred code will be in eax.
6561 Result value = index;
6562 __ mov(value.reg(), Operand(elements.reg(),
6563 index.reg(),
6564 times_4,
6565 FixedArray::kHeaderSize - kHeapObjectTag));
6566 elements.Unuse();
6567 index.Unuse();
6568 __ cmp(Operand(value.reg()), Immediate(Factory::the_hole_value()));
6569 deferred->Branch(equal);
6570 __ IncrementCounter(&Counters::keyed_load_inline, 1);
6571
6572 deferred->BindExit();
6573 // Restore the receiver and key to the frame and push the
6574 // result on top of it.
6575 cgen_->frame()->Push(&receiver);
6576 cgen_->frame()->Push(&key);
6577 cgen_->frame()->Push(&value);
6578
6579 } else {
6580 Comment cmnt(masm, "[ Load from keyed Property");
6581 RelocInfo::Mode mode = is_global
6582 ? RelocInfo::CODE_TARGET_CONTEXT
6583 : RelocInfo::CODE_TARGET;
6584 Result answer = cgen_->frame()->CallKeyedLoadIC(mode);
6585 // Make sure that we do not have a test instruction after the
6586 // call. A test instruction after the call is used to
6587 // indicate that we have generated an inline version of the
6588 // keyed load. The explicit nop instruction is here because
6589 // the push that follows might be peep-hole optimized away.
6590 __ nop();
6591 cgen_->frame()->Push(&answer);
6592 }
6593 break; 6648 break;
6594 } 6649 }
6595 6650
6596 default: 6651 default:
6597 UNREACHABLE(); 6652 UNREACHABLE();
6598 } 6653 }
6654
6655 if (!persist_after_get_) {
6656 cgen_->UnloadReference(this);
6657 }
6599 } 6658 }
6600 6659
6601 6660
6602 void Reference::TakeValue() { 6661 void Reference::TakeValue() {
6603 // For non-constant frame-allocated slots, we invalidate the value in the 6662 // For non-constant frame-allocated slots, we invalidate the value in the
6604 // slot. For all others, we fall back on GetValue. 6663 // slot. For all others, we fall back on GetValue.
6605 ASSERT(!cgen_->in_spilled_code()); 6664 ASSERT(!cgen_->in_spilled_code());
6606 ASSERT(!is_illegal()); 6665 ASSERT(!is_illegal());
6607 if (type_ != SLOT) { 6666 if (type_ != SLOT) {
6608 GetValue(); 6667 GetValue();
(...skipping 13 matching lines...) Expand all
6622 // Only non-constant, frame-allocated parameters and locals can 6681 // Only non-constant, frame-allocated parameters and locals can
6623 // reach here. Be careful not to use the optimizations for arguments 6682 // reach here. Be careful not to use the optimizations for arguments
6624 // object access since it may not have been initialized yet. 6683 // object access since it may not have been initialized yet.
6625 ASSERT(!slot->is_arguments()); 6684 ASSERT(!slot->is_arguments());
6626 if (slot->type() == Slot::PARAMETER) { 6685 if (slot->type() == Slot::PARAMETER) {
6627 cgen_->frame()->TakeParameterAt(slot->index()); 6686 cgen_->frame()->TakeParameterAt(slot->index());
6628 } else { 6687 } else {
6629 ASSERT(slot->type() == Slot::LOCAL); 6688 ASSERT(slot->type() == Slot::LOCAL);
6630 cgen_->frame()->TakeLocalAt(slot->index()); 6689 cgen_->frame()->TakeLocalAt(slot->index());
6631 } 6690 }
6691
6692 ASSERT(persist_after_get_);
6693 // Do not unload the reference, because it is used in SetValue.
6632 } 6694 }
6633 6695
6634 6696
6635 void Reference::SetValue(InitState init_state) { 6697 void Reference::SetValue(InitState init_state) {
6636 ASSERT(cgen_->HasValidEntryRegisters()); 6698 ASSERT(cgen_->HasValidEntryRegisters());
6637 ASSERT(!is_illegal()); 6699 ASSERT(!is_illegal());
6638 MacroAssembler* masm = cgen_->masm(); 6700 MacroAssembler* masm = cgen_->masm();
6639 switch (type_) { 6701 switch (type_) {
6640 case SLOT: { 6702 case SLOT: {
6641 Comment cmnt(masm, "[ Store to Slot"); 6703 Comment cmnt(masm, "[ Store to Slot");
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after
6751 // keyed store. 6813 // keyed store.
6752 __ nop(); 6814 __ nop();
6753 cgen_->frame()->Push(&answer); 6815 cgen_->frame()->Push(&answer);
6754 } 6816 }
6755 break; 6817 break;
6756 } 6818 }
6757 6819
6758 default: 6820 default:
6759 UNREACHABLE(); 6821 UNREACHABLE();
6760 } 6822 }
6823 cgen_->UnloadReference(this);
6761 } 6824 }
6762 6825
6763 6826
6764 void FastNewClosureStub::Generate(MacroAssembler* masm) { 6827 void FastNewClosureStub::Generate(MacroAssembler* masm) {
6765 // Clone the boilerplate in new space. Set the context to the 6828 // Clone the boilerplate in new space. Set the context to the
6766 // current context in esi. 6829 // current context in esi.
6767 Label gc; 6830 Label gc;
6768 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT); 6831 __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
6769 6832
6770 // Get the boilerplate function from the stack. 6833 // Get the boilerplate function from the stack.
(...skipping 3256 matching lines...) Expand 10 before | Expand all | Expand 10 after
10027 10090
10028 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater) 10091 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
10029 // tagged as a small integer. 10092 // tagged as a small integer.
10030 __ bind(&runtime); 10093 __ bind(&runtime);
10031 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1); 10094 __ TailCallRuntime(ExternalReference(Runtime::kStringCompare), 2, 1);
10032 } 10095 }
10033 10096
10034 #undef __ 10097 #undef __
10035 10098
10036 } } // namespace v8::internal 10099 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/ia32/codegen-ia32.h ('k') | src/x64/codegen-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698