Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(254)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 146029: x64 code generation for construct calls, declaring global variables... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
116 scope_(NULL), 116 scope_(NULL),
117 frame_(NULL), 117 frame_(NULL),
118 allocator_(NULL), 118 allocator_(NULL),
119 state_(NULL), 119 state_(NULL),
120 loop_nesting_(0), 120 loop_nesting_(0),
121 function_return_is_shadowed_(false), 121 function_return_is_shadowed_(false),
122 in_spilled_code_(false) { 122 in_spilled_code_(false) {
123 } 123 }
124 124
125 125
126 void CodeGenerator::DeclareGlobals(Handle<FixedArray> a) { 126 void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
127 UNIMPLEMENTED(); 127 // Call the runtime to declare the globals. The inevitable call
128 // will sync frame elements to memory anyway, so we do it eagerly to
129 // allow us to push the arguments directly into place.
130 frame_->SyncRange(0, frame_->element_count() - 1);
131
132 __ movq(kScratchRegister, pairs, RelocInfo::EMBEDDED_OBJECT);
133 frame_->EmitPush(kScratchRegister);
134 frame_->EmitPush(rsi); // The context is the second argument.
135 frame_->EmitPush(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
136 Result ignored = frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
137 // Return value is ignored.
128 } 138 }
129 139
140
130 void CodeGenerator::TestCodeGenerator() { 141 void CodeGenerator::TestCodeGenerator() {
131 // Compile a function from a string, and run it. 142 // Compile a function from a string, and run it.
132 143
133 // Set flags appropriately for this stage of implementation. 144 // Set flags appropriately for this stage of implementation.
134 // TODO(X64): Make ic work, and stop disabling them. 145 // TODO(X64): Make ic work, and stop disabling them.
135 // These settings stick - remove them when we don't want them anymore. 146 // These settings stick - remove them when we don't want them anymore.
136 #ifdef DEBUG 147 #ifdef DEBUG
137 FLAG_print_builtin_source = true; 148 FLAG_print_builtin_source = true;
138 FLAG_print_builtin_ast = true; 149 FLAG_print_builtin_ast = true;
139 #endif 150 #endif
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
279 Comment cmnt(masm_, "[ function body"); 290 Comment cmnt(masm_, "[ function body");
280 #ifdef DEBUG 291 #ifdef DEBUG
281 bool is_builtin = Bootstrapper::IsActive(); 292 bool is_builtin = Bootstrapper::IsActive();
282 bool should_trace = 293 bool should_trace =
283 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; 294 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
284 if (should_trace) { 295 if (should_trace) {
285 frame_->CallRuntime(Runtime::kDebugTrace, 0); 296 frame_->CallRuntime(Runtime::kDebugTrace, 0);
286 // Ignore the return value. 297 // Ignore the return value.
287 } 298 }
288 #endif 299 #endif
300 VisitStatements(body);
301
302 // Handle the return from the function.
303 if (has_valid_frame()) {
304 // If there is a valid frame, control flow can fall off the end of
305 // the body. In that case there is an implicit return statement.
306 ASSERT(!function_return_is_shadowed_);
307 CodeForReturnPosition(function);
308 frame_->PrepareForReturn();
309 Result undefined(Factory::undefined_value());
310 if (function_return_.is_bound()) {
311 function_return_.Jump(&undefined);
312 } else {
313 function_return_.Bind(&undefined);
314 GenerateReturnSequence(&undefined);
315 }
316 } else if (function_return_.is_linked()) {
317 // If the return target has dangling jumps to it, then we have not
318 // yet generated the return sequence. This can happen when (a)
319 // control does not flow off the end of the body so we did not
320 // compile an artificial return statement just above, and (b) there
321 // are return statements in the body but (c) they are all shadowed.
322 Result return_value;
323 function_return_.Bind(&return_value);
324 GenerateReturnSequence(&return_value);
325 }
289 } 326 }
327 }
290 328
291 VisitStatements(body);
292 }
293 // Adjust for function-level loop nesting. 329 // Adjust for function-level loop nesting.
294 loop_nesting_ -= function->loop_nesting(); 330 loop_nesting_ -= function->loop_nesting();
295 331
296 // Code generation state must be reset. 332 // Code generation state must be reset.
297 ASSERT(state_ == NULL); 333 ASSERT(state_ == NULL);
298 ASSERT(loop_nesting() == 0); 334 ASSERT(loop_nesting() == 0);
299 ASSERT(!function_return_is_shadowed_); 335 ASSERT(!function_return_is_shadowed_);
300 function_return_.Unuse(); 336 function_return_.Unuse();
301 DeleteFrame(); 337 DeleteFrame();
302 338
(...skipping 1009 matching lines...) Expand 10 before | Expand all | Expand 10 after
1312 CallWithArguments(args, node->position()); 1348 CallWithArguments(args, node->position());
1313 } 1349 }
1314 } 1350 }
1315 1351
1316 1352
1317 void CodeGenerator::VisitCallEval(CallEval* a) { 1353 void CodeGenerator::VisitCallEval(CallEval* a) {
1318 UNIMPLEMENTED(); 1354 UNIMPLEMENTED();
1319 } 1355 }
1320 1356
1321 1357
1322 void CodeGenerator::VisitCallNew(CallNew* a) { 1358 void CodeGenerator::VisitCallNew(CallNew* node) {
1323 UNIMPLEMENTED(); 1359 Comment cmnt(masm_, "[ CallNew");
1360 CodeForStatementPosition(node);
1361
1362 // According to ECMA-262, section 11.2.2, page 44, the function
1363 // expression in new calls must be evaluated before the
1364 // arguments. This is different from ordinary calls, where the
1365 // actual function to call is resolved after the arguments have been
1366 // evaluated.
1367
1368 // Compute function to call and use the global object as the
1369 // receiver. There is no need to use the global proxy here because
1370 // it will always be replaced with a newly allocated object.
1371 Load(node->expression());
1372 LoadGlobal();
1373
1374 // Push the arguments ("left-to-right") on the stack.
1375 ZoneList<Expression*>* args = node->arguments();
1376 int arg_count = args->length();
1377 for (int i = 0; i < arg_count; i++) {
1378 Load(args->at(i));
1379 }
1380
1381 // Call the construct call builtin that handles allocation and
1382 // constructor invocation.
1383 CodeForSourcePosition(node->position());
1384 Result result = frame_->CallConstructor(arg_count);
1385 // Replace the function on the stack with the result.
1386 frame_->SetElementAt(0, &result);
1324 } 1387 }
1325 1388
1326 1389
1327 void CodeGenerator::VisitCallRuntime(CallRuntime* a) { 1390 void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
1328 UNIMPLEMENTED(); 1391 if (CheckForInlineRuntimeCall(node)) {
1392 return;
1393 }
1394
1395 ZoneList<Expression*>* args = node->arguments();
1396 Comment cmnt(masm_, "[ CallRuntime");
1397 Runtime::Function* function = node->function();
1398
1399 if (function == NULL) {
1400 // Prepare stack for calling JS runtime function.
1401 frame_->Push(node->name());
1402 // Push the builtins object found in the current global object.
1403 Result temp = allocator()->Allocate();
William Hesse 2009/06/24 07:55:57 Use kScratchRegister here.
Mads Ager (chromium) 2009/06/24 08:18:50 Done.
1404 ASSERT(temp.is_valid());
1405 __ movq(temp.reg(), GlobalObject());
1406 __ movq(temp.reg(),
1407 FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
1408 frame_->Push(&temp);
1409 }
1410
1411 // Push the arguments ("left-to-right").
1412 int arg_count = args->length();
1413 for (int i = 0; i < arg_count; i++) {
1414 Load(args->at(i));
1415 }
1416
1417 if (function == NULL) {
1418 // Call the JS runtime function.
1419 Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
1420 arg_count,
1421 loop_nesting_);
1422 frame_->RestoreContextRegister();
1423 frame_->SetElementAt(0, &answer);
1424 } else {
1425 // Call the C runtime function.
1426 Result answer = frame_->CallRuntime(function, arg_count);
1427 frame_->Push(&answer);
1428 }
1329 } 1429 }
1330 1430
1331 1431
1332 void CodeGenerator::VisitUnaryOperation(UnaryOperation* a) { 1432 void CodeGenerator::VisitUnaryOperation(UnaryOperation* a) {
1333 UNIMPLEMENTED(); 1433 UNIMPLEMENTED();
1334 } 1434 }
1335 1435
1336 void CodeGenerator::VisitCountOperation(CountOperation* a) { 1436 void CodeGenerator::VisitCountOperation(CountOperation* a) {
1337 UNIMPLEMENTED(); 1437 UNIMPLEMENTED();
1338 } 1438 }
1339 1439
1340 void CodeGenerator::VisitBinaryOperation(BinaryOperation* a) { 1440 void CodeGenerator::VisitBinaryOperation(BinaryOperation* a) {
1341 UNIMPLEMENTED(); 1441 UNIMPLEMENTED();
1342 } 1442 }
1343 1443
1344 void CodeGenerator::VisitCompareOperation(CompareOperation* a) { 1444 void CodeGenerator::VisitCompareOperation(CompareOperation* a) {
1345 UNIMPLEMENTED(); 1445 UNIMPLEMENTED();
1346 } 1446 }
1347 1447
1348 void CodeGenerator::VisitThisFunction(ThisFunction* a) { 1448
1349 UNIMPLEMENTED(); 1449 void CodeGenerator::VisitThisFunction(ThisFunction* node) {
1450 frame_->PushFunction();
1350 } 1451 }
1351 1452
1453
1352 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) { 1454 void CodeGenerator::GenerateArgumentsAccess(ZoneList<Expression*>* args) {
1353 UNIMPLEMENTED(); 1455 UNIMPLEMENTED();
1354 } 1456 }
1355 1457
1356 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) { 1458 void CodeGenerator::GenerateArgumentsLength(ZoneList<Expression*>* args) {
1357 UNIMPLEMENTED();} 1459 UNIMPLEMENTED();}
1358 1460
1359 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* a) { 1461 void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* a) {
1360 UNIMPLEMENTED(); 1462 UNIMPLEMENTED();
1361 } 1463 }
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
1535 dest->true_target()->Branch(equal); 1637 dest->true_target()->Branch(equal);
1536 1638
1537 // 'undefined' => false. 1639 // 'undefined' => false.
1538 __ Cmp(value.reg(), Factory::undefined_value()); 1640 __ Cmp(value.reg(), Factory::undefined_value());
1539 dest->false_target()->Branch(equal); 1641 dest->false_target()->Branch(equal);
1540 1642
1541 // Smi => false iff zero. 1643 // Smi => false iff zero.
1542 ASSERT(kSmiTag == 0); 1644 ASSERT(kSmiTag == 0);
1543 __ testq(value.reg(), value.reg()); 1645 __ testq(value.reg(), value.reg());
1544 dest->false_target()->Branch(zero); 1646 dest->false_target()->Branch(zero);
1545 __ testq(value.reg(), Immediate(kSmiTagMask)); 1647 __ testl(value.reg(), Immediate(kSmiTagMask));
William Hesse 2009/06/24 07:55:57 We have many of these throughout the code, and hav
Mads Ager (chromium) 2009/06/24 08:18:50 We were using it inconsistently before. There was
1546 dest->true_target()->Branch(zero); 1648 dest->true_target()->Branch(zero);
1547 1649
1548 // Call the stub for all other cases. 1650 // Call the stub for all other cases.
1549 frame_->Push(&value); // Undo the Pop() from above. 1651 frame_->Push(&value); // Undo the Pop() from above.
1550 ToBooleanStub stub; 1652 ToBooleanStub stub;
1551 Result temp = frame_->CallStub(&stub, 1); 1653 Result temp = frame_->CallStub(&stub, 1);
1552 // Convert the result to a condition code. 1654 // Convert the result to a condition code.
1553 __ testq(temp.reg(), temp.reg()); 1655 __ testq(temp.reg(), temp.reg());
1554 temp.Unuse(); 1656 temp.Unuse();
1555 dest->Split(not_equal); 1657 dest->Split(not_equal);
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after
2049 receiver.ToRegister(); 2151 receiver.ToRegister();
2050 2152
2051 Result value = cgen_->allocator()->Allocate(); 2153 Result value = cgen_->allocator()->Allocate();
2052 ASSERT(value.is_valid()); 2154 ASSERT(value.is_valid());
2053 DeferredReferenceGetNamedValue* deferred = 2155 DeferredReferenceGetNamedValue* deferred =
2054 new DeferredReferenceGetNamedValue(value.reg(), 2156 new DeferredReferenceGetNamedValue(value.reg(),
2055 receiver.reg(), 2157 receiver.reg(),
2056 GetName()); 2158 GetName());
2057 2159
2058 // Check that the receiver is a heap object. 2160 // Check that the receiver is a heap object.
2059 __ testq(receiver.reg(), Immediate(kSmiTagMask)); 2161 __ testl(receiver.reg(), Immediate(kSmiTagMask));
2060 deferred->Branch(zero); 2162 deferred->Branch(zero);
2061 2163
2062 __ bind(deferred->patch_site()); 2164 __ bind(deferred->patch_site());
2063 // This is the map check instruction that will be patched (so we can't 2165 // This is the map check instruction that will be patched (so we can't
2064 // use the double underscore macro that may insert instructions). 2166 // use the double underscore macro that may insert instructions).
2065 // Initially use an invalid map to force a failure. 2167 // Initially use an invalid map to force a failure.
2066 masm->Move(kScratchRegister, Factory::null_value()); 2168 masm->Move(kScratchRegister, Factory::null_value());
2067 masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 2169 masm->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
2068 kScratchRegister); 2170 kScratchRegister);
2069 // This branch is always a forwards branch so it's always a fixed 2171 // This branch is always a forwards branch so it's always a fixed
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
2363 // by -1. We cannot use the overflow flag, since it is not set 2465 // by -1. We cannot use the overflow flag, since it is not set
2364 // by idiv instruction. 2466 // by idiv instruction.
2365 ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 2467 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
2366 // TODO(X64): TODO(Smi): Smi implementation dependent constant. 2468 // TODO(X64): TODO(Smi): Smi implementation dependent constant.
2367 // Value is Smi::fromInt(-(1<<31)) / Smi::fromInt(-1) 2469 // Value is Smi::fromInt(-(1<<31)) / Smi::fromInt(-1)
2368 __ cmpq(rax, Immediate(0x40000000)); 2470 __ cmpq(rax, Immediate(0x40000000));
2369 __ j(equal, slow); 2471 __ j(equal, slow);
2370 // Check for negative zero result. 2472 // Check for negative zero result.
2371 __ NegativeZeroTest(rax, rcx, slow); // use ecx = x | y 2473 __ NegativeZeroTest(rax, rcx, slow); // use ecx = x | y
2372 // Tag the result and store it in register rax. 2474 // Tag the result and store it in register rax.
2373 ASSERT(kSmiTagSize == kTimes2); // adjust code if not the case 2475 ASSERT(kSmiTagSize == times_2); // adjust code if not the case
2374 __ lea(rax, Operand(rax, rax, kTimes1, kSmiTag)); 2476 __ lea(rax, Operand(rax, rax, times_1, kSmiTag));
2375 break; 2477 break;
2376 2478
2377 case Token::MOD: 2479 case Token::MOD:
2378 // Divide rdx:rax by rbx. 2480 // Divide rdx:rax by rbx.
2379 __ idiv(rbx); 2481 __ idiv(rbx);
2380 // Check for negative zero result. 2482 // Check for negative zero result.
2381 __ NegativeZeroTest(rdx, rcx, slow); // use ecx = x | y 2483 __ NegativeZeroTest(rdx, rcx, slow); // use ecx = x | y
2382 // Move remainder to register rax. 2484 // Move remainder to register rax.
2383 __ movq(rax, rdx); 2485 __ movq(rax, rdx);
2384 break; 2486 break;
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
2428 // shifting the SmiTag in at the bottom doesn't change the sign. 2530 // shifting the SmiTag in at the bottom doesn't change the sign.
2429 ASSERT(kSmiTagSize == 1); 2531 ASSERT(kSmiTagSize == 1);
2430 __ cmpl(rax, Immediate(0xc0000000)); 2532 __ cmpl(rax, Immediate(0xc0000000));
2431 __ j(sign, slow); 2533 __ j(sign, slow);
2432 __ movsxlq(rax, rax); // Extend new sign of eax into rax. 2534 __ movsxlq(rax, rax); // Extend new sign of eax into rax.
2433 break; 2535 break;
2434 default: 2536 default:
2435 UNREACHABLE(); 2537 UNREACHABLE();
2436 } 2538 }
2437 // Tag the result and store it in register eax. 2539 // Tag the result and store it in register eax.
2438 ASSERT(kSmiTagSize == kTimes2); // adjust code if not the case 2540 ASSERT(kSmiTagSize == times_2); // adjust code if not the case
2439 __ lea(rax, Operand(rax, rax, kTimes1, kSmiTag)); 2541 __ lea(rax, Operand(rax, rax, times_1, kSmiTag));
2440 break; 2542 break;
2441 2543
2442 default: 2544 default:
2443 UNREACHABLE(); 2545 UNREACHABLE();
2444 break; 2546 break;
2445 } 2547 }
2446 } 2548 }
2447 2549
2448 2550
2449 void GenericBinaryOpStub::Generate(MacroAssembler* masm) { 2551 void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
2511 2613
2512 2614
2513 void CallFunctionStub::Generate(MacroAssembler* masm) { 2615 void CallFunctionStub::Generate(MacroAssembler* masm) {
2514 Label slow; 2616 Label slow;
2515 2617
2516 // Get the function to call from the stack. 2618 // Get the function to call from the stack.
2517 // +2 ~ receiver, return address 2619 // +2 ~ receiver, return address
2518 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize)); 2620 __ movq(rdi, Operand(rsp, (argc_ + 2) * kPointerSize));
2519 2621
2520 // Check that the function really is a JavaScript function. 2622 // Check that the function really is a JavaScript function.
2521 __ testq(rdi, Immediate(kSmiTagMask)); 2623 __ testl(rdi, Immediate(kSmiTagMask));
2522 __ j(zero, &slow); 2624 __ j(zero, &slow);
2523 // Goto slow case if we do not have a function. 2625 // Goto slow case if we do not have a function.
2524 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2626 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2525 __ j(not_equal, &slow); 2627 __ j(not_equal, &slow);
2526 2628
2527 // Fast-case: Just invoke the function. 2629 // Fast-case: Just invoke the function.
2528 ParameterCount actual(argc_); 2630 ParameterCount actual(argc_);
2529 __ InvokeFunction(rdi, actual, JUMP_FUNCTION); 2631 __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
2530 2632
2531 // Slow-case: Non-function called. 2633 // Slow-case: Non-function called.
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
2576 Label runtime; 2678 Label runtime;
2577 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2679 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2578 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); 2680 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
2579 __ cmpq(rcx, Immediate(ArgumentsAdaptorFrame::SENTINEL)); 2681 __ cmpq(rcx, Immediate(ArgumentsAdaptorFrame::SENTINEL));
2580 __ j(not_equal, &runtime); 2682 __ j(not_equal, &runtime);
2581 // Value in rcx is Smi encoded. 2683 // Value in rcx is Smi encoded.
2582 2684
2583 // Patch the arguments.length and the parameters pointer. 2685 // Patch the arguments.length and the parameters pointer.
2584 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2686 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2585 __ movq(Operand(rsp, 1 * kPointerSize), rcx); 2687 __ movq(Operand(rsp, 1 * kPointerSize), rcx);
2586 __ lea(rdx, Operand(rdx, rcx, kTimes4, kDisplacement)); 2688 __ lea(rdx, Operand(rdx, rcx, times_4, kDisplacement));
2587 __ movq(Operand(rsp, 2 * kPointerSize), rdx); 2689 __ movq(Operand(rsp, 2 * kPointerSize), rdx);
2588 2690
2589 // Do the runtime call to allocate the arguments object. 2691 // Do the runtime call to allocate the arguments object.
2590 __ bind(&runtime); 2692 __ bind(&runtime);
2591 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3); 2693 __ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3);
2592 } 2694 }
2593 2695
2594 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) { 2696 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
2595 // The key is in rdx and the parameter count is in rax. 2697 // The key is in rdx and the parameter count is in rax.
2596 2698
(...skipping 17 matching lines...) Expand all
2614 // Check index against formal parameters count limit passed in 2716 // Check index against formal parameters count limit passed in
2615 // through register rax. Use unsigned comparison to get negative 2717 // through register rax. Use unsigned comparison to get negative
2616 // check for free. 2718 // check for free.
2617 __ cmpq(rdx, rax); 2719 __ cmpq(rdx, rax);
2618 __ j(above_equal, &slow); 2720 __ j(above_equal, &slow);
2619 2721
2620 // Read the argument from the stack and return it. 2722 // Read the argument from the stack and return it.
2621 // Shifting code depends on SmiEncoding being equivalent to left shift: 2723 // Shifting code depends on SmiEncoding being equivalent to left shift:
2622 // we multiply by four to get pointer alignment. 2724 // we multiply by four to get pointer alignment.
2623 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 2725 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2624 __ lea(rbx, Operand(rbp, rax, kTimes4, 0)); 2726 __ lea(rbx, Operand(rbp, rax, times_4, 0));
2625 __ neg(rdx); 2727 __ neg(rdx);
2626 __ movq(rax, Operand(rbx, rdx, kTimes4, kDisplacement)); 2728 __ movq(rax, Operand(rbx, rdx, times_4, kDisplacement));
2627 __ Ret(); 2729 __ Ret();
2628 2730
2629 // Arguments adaptor case: Check index against actual arguments 2731 // Arguments adaptor case: Check index against actual arguments
2630 // limit found in the arguments adaptor frame. Use unsigned 2732 // limit found in the arguments adaptor frame. Use unsigned
2631 // comparison to get negative check for free. 2733 // comparison to get negative check for free.
2632 __ bind(&adaptor); 2734 __ bind(&adaptor);
2633 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2735 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2634 __ cmpq(rdx, rcx); 2736 __ cmpq(rdx, rcx);
2635 __ j(above_equal, &slow); 2737 __ j(above_equal, &slow);
2636 2738
2637 // Read the argument from the stack and return it. 2739 // Read the argument from the stack and return it.
2638 // Shifting code depends on SmiEncoding being equivalent to left shift: 2740 // Shifting code depends on SmiEncoding being equivalent to left shift:
2639 // we multiply by four to get pointer alignment. 2741 // we multiply by four to get pointer alignment.
2640 ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 2742 ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2641 __ lea(rbx, Operand(rbx, rcx, kTimes4, 0)); 2743 __ lea(rbx, Operand(rbx, rcx, times_4, 0));
2642 __ neg(rdx); 2744 __ neg(rdx);
2643 __ movq(rax, Operand(rbx, rdx, kTimes4, kDisplacement)); 2745 __ movq(rax, Operand(rbx, rdx, times_4, kDisplacement));
2644 __ Ret(); 2746 __ Ret();
2645 2747
2646 // Slow-case: Handle non-smi or out-of-bounds access to arguments 2748 // Slow-case: Handle non-smi or out-of-bounds access to arguments
2647 // by calling the runtime system. 2749 // by calling the runtime system.
2648 __ bind(&slow); 2750 __ bind(&slow);
2649 __ pop(rbx); // Return address. 2751 __ pop(rbx); // Return address.
2650 __ push(rdx); 2752 __ push(rdx);
2651 __ push(rbx); 2753 __ push(rbx);
2652 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1); 2754 __ TailCallRuntime(ExternalReference(Runtime::kGetArgumentsProperty), 1);
2653 } 2755 }
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after
3014 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers 3116 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
3015 3117
3016 // Restore frame pointer and return. 3118 // Restore frame pointer and return.
3017 __ pop(rbp); 3119 __ pop(rbp);
3018 __ ret(0); 3120 __ ret(0);
3019 } 3121 }
3020 3122
3021 #undef __ 3123 #undef __
3022 3124
3023 } } // namespace v8::internal 3125 } } // namespace v8::internal
OLDNEW
« src/x64/builtins-x64.cc ('K') | « src/x64/builtins-x64.cc ('k') | src/x64/ic-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698