Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(480)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 6880010: Merge (7265, 7271] from bleeding_edge to experimental/gc branch.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: '' Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after
173 RegisterAllocator register_allocator(this); 173 RegisterAllocator register_allocator(this);
174 allocator_ = &register_allocator; 174 allocator_ = &register_allocator;
175 ASSERT(frame_ == NULL); 175 ASSERT(frame_ == NULL);
176 frame_ = new VirtualFrame(); 176 frame_ = new VirtualFrame();
177 set_in_spilled_code(false); 177 set_in_spilled_code(false);
178 178
179 // Adjust for function-level loop nesting. 179 // Adjust for function-level loop nesting.
180 ASSERT_EQ(0, loop_nesting_); 180 ASSERT_EQ(0, loop_nesting_);
181 loop_nesting_ = info->is_in_loop() ? 1 : 0; 181 loop_nesting_ = info->is_in_loop() ? 1 : 0;
182 182
183 JumpTarget::set_compiling_deferred_code(false); 183 Isolate::Current()->set_jump_target_compiling_deferred_code(false);
184 184
185 { 185 {
186 CodeGenState state(this); 186 CodeGenState state(this);
187 // Entry: 187 // Entry:
188 // Stack: receiver, arguments, return address. 188 // Stack: receiver, arguments, return address.
189 // rbp: caller's frame pointer 189 // rbp: caller's frame pointer
190 // rsp: stack pointer 190 // rsp: stack pointer
191 // rdi: called JS function 191 // rdi: called JS function
192 // rsi: callee's context 192 // rsi: callee's context
193 allocator_->Initialize(); 193 allocator_->Initialize();
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
278 278
279 // Store the arguments object. This must happen after context 279 // Store the arguments object. This must happen after context
280 // initialization because the arguments object may be stored in 280 // initialization because the arguments object may be stored in
281 // the context. 281 // the context.
282 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) { 282 if (ArgumentsMode() != NO_ARGUMENTS_ALLOCATION) {
283 StoreArgumentsObject(true); 283 StoreArgumentsObject(true);
284 } 284 }
285 285
286 // Initialize ThisFunction reference if present. 286 // Initialize ThisFunction reference if present.
287 if (scope()->is_function_scope() && scope()->function() != NULL) { 287 if (scope()->is_function_scope() && scope()->function() != NULL) {
288 frame_->Push(Factory::the_hole_value()); 288 frame_->Push(FACTORY->the_hole_value());
289 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT); 289 StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
290 } 290 }
291 291
292 // Initialize the function return target after the locals are set 292 // Initialize the function return target after the locals are set
293 // up, because it needs the expected frame height from the frame. 293 // up, because it needs the expected frame height from the frame.
294 function_return_.set_direction(JumpTarget::BIDIRECTIONAL); 294 function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
295 function_return_is_shadowed_ = false; 295 function_return_is_shadowed_ = false;
296 296
297 // Generate code to 'execute' declarations and initialize functions 297 // Generate code to 'execute' declarations and initialize functions
298 // (source elements). In case of an illegal redeclaration we need to 298 // (source elements). In case of an illegal redeclaration we need to
(...skipping 14 matching lines...) Expand all
313 // Ignore the return value. 313 // Ignore the return value.
314 } 314 }
315 CheckStack(); 315 CheckStack();
316 316
317 // Compile the body of the function in a vanilla state. Don't 317 // Compile the body of the function in a vanilla state. Don't
318 // bother compiling all the code if the scope has an illegal 318 // bother compiling all the code if the scope has an illegal
319 // redeclaration. 319 // redeclaration.
320 if (!scope()->HasIllegalRedeclaration()) { 320 if (!scope()->HasIllegalRedeclaration()) {
321 Comment cmnt(masm_, "[ function body"); 321 Comment cmnt(masm_, "[ function body");
322 #ifdef DEBUG 322 #ifdef DEBUG
323 bool is_builtin = Bootstrapper::IsActive(); 323 bool is_builtin = Isolate::Current()->bootstrapper()->IsActive();
324 bool should_trace = 324 bool should_trace =
325 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls; 325 is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
326 if (should_trace) { 326 if (should_trace) {
327 frame_->CallRuntime(Runtime::kDebugTrace, 0); 327 frame_->CallRuntime(Runtime::kDebugTrace, 0);
328 // Ignore the return value. 328 // Ignore the return value.
329 } 329 }
330 #endif 330 #endif
331 VisitStatements(info->function()->body()); 331 VisitStatements(info->function()->body());
332 332
333 // Handle the return from the function. 333 // Handle the return from the function.
334 if (has_valid_frame()) { 334 if (has_valid_frame()) {
335 // If there is a valid frame, control flow can fall off the end of 335 // If there is a valid frame, control flow can fall off the end of
336 // the body. In that case there is an implicit return statement. 336 // the body. In that case there is an implicit return statement.
337 ASSERT(!function_return_is_shadowed_); 337 ASSERT(!function_return_is_shadowed_);
338 CodeForReturnPosition(info->function()); 338 CodeForReturnPosition(info->function());
339 frame_->PrepareForReturn(); 339 frame_->PrepareForReturn();
340 Result undefined(Factory::undefined_value()); 340 Result undefined(FACTORY->undefined_value());
341 if (function_return_.is_bound()) { 341 if (function_return_.is_bound()) {
342 function_return_.Jump(&undefined); 342 function_return_.Jump(&undefined);
343 } else { 343 } else {
344 function_return_.Bind(&undefined); 344 function_return_.Bind(&undefined);
345 GenerateReturnSequence(&undefined); 345 GenerateReturnSequence(&undefined);
346 } 346 }
347 } else if (function_return_.is_linked()) { 347 } else if (function_return_.is_linked()) {
348 // If the return target has dangling jumps to it, then we have not 348 // If the return target has dangling jumps to it, then we have not
349 // yet generated the return sequence. This can happen when (a) 349 // yet generated the return sequence. This can happen when (a)
350 // control does not flow off the end of the body so we did not 350 // control does not flow off the end of the body so we did not
(...skipping 11 matching lines...) Expand all
362 loop_nesting_ = 0; 362 loop_nesting_ = 0;
363 363
364 // Code generation state must be reset. 364 // Code generation state must be reset.
365 ASSERT(state_ == NULL); 365 ASSERT(state_ == NULL);
366 ASSERT(!function_return_is_shadowed_); 366 ASSERT(!function_return_is_shadowed_);
367 function_return_.Unuse(); 367 function_return_.Unuse();
368 DeleteFrame(); 368 DeleteFrame();
369 369
370 // Process any deferred code using the register allocator. 370 // Process any deferred code using the register allocator.
371 if (!HasStackOverflow()) { 371 if (!HasStackOverflow()) {
372 JumpTarget::set_compiling_deferred_code(true); 372 info->isolate()->set_jump_target_compiling_deferred_code(true);
373 ProcessDeferred(); 373 ProcessDeferred();
374 JumpTarget::set_compiling_deferred_code(false); 374 info->isolate()->set_jump_target_compiling_deferred_code(false);
375 } 375 }
376 376
377 // There is no need to delete the register allocator, it is a 377 // There is no need to delete the register allocator, it is a
378 // stack-allocated local. 378 // stack-allocated local.
379 allocator_ = NULL; 379 allocator_ = NULL;
380 } 380 }
381 381
382 382
383 Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { 383 Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
384 // Currently, this assertion will fail if we try to assign to 384 // Currently, this assertion will fail if we try to assign to
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after
513 #endif 513 #endif
514 ASSERT(!in_spilled_code()); 514 ASSERT(!in_spilled_code());
515 JumpTarget true_target; 515 JumpTarget true_target;
516 JumpTarget false_target; 516 JumpTarget false_target;
517 ControlDestination dest(&true_target, &false_target, true); 517 ControlDestination dest(&true_target, &false_target, true);
518 LoadCondition(expr, &dest, false); 518 LoadCondition(expr, &dest, false);
519 519
520 if (dest.false_was_fall_through()) { 520 if (dest.false_was_fall_through()) {
521 // The false target was just bound. 521 // The false target was just bound.
522 JumpTarget loaded; 522 JumpTarget loaded;
523 frame_->Push(Factory::false_value()); 523 frame_->Push(FACTORY->false_value());
524 // There may be dangling jumps to the true target. 524 // There may be dangling jumps to the true target.
525 if (true_target.is_linked()) { 525 if (true_target.is_linked()) {
526 loaded.Jump(); 526 loaded.Jump();
527 true_target.Bind(); 527 true_target.Bind();
528 frame_->Push(Factory::true_value()); 528 frame_->Push(FACTORY->true_value());
529 loaded.Bind(); 529 loaded.Bind();
530 } 530 }
531 531
532 } else if (dest.is_used()) { 532 } else if (dest.is_used()) {
533 // There is true, and possibly false, control flow (with true as 533 // There is true, and possibly false, control flow (with true as
534 // the fall through). 534 // the fall through).
535 JumpTarget loaded; 535 JumpTarget loaded;
536 frame_->Push(Factory::true_value()); 536 frame_->Push(FACTORY->true_value());
537 if (false_target.is_linked()) { 537 if (false_target.is_linked()) {
538 loaded.Jump(); 538 loaded.Jump();
539 false_target.Bind(); 539 false_target.Bind();
540 frame_->Push(Factory::false_value()); 540 frame_->Push(FACTORY->false_value());
541 loaded.Bind(); 541 loaded.Bind();
542 } 542 }
543 543
544 } else { 544 } else {
545 // We have a valid value on top of the frame, but we still may 545 // We have a valid value on top of the frame, but we still may
546 // have dangling jumps to the true and false targets from nested 546 // have dangling jumps to the true and false targets from nested
547 // subexpressions (eg, the left subexpressions of the 547 // subexpressions (eg, the left subexpressions of the
548 // short-circuited boolean operators). 548 // short-circuited boolean operators).
549 ASSERT(has_valid_frame()); 549 ASSERT(has_valid_frame());
550 if (true_target.is_linked() || false_target.is_linked()) { 550 if (true_target.is_linked() || false_target.is_linked()) {
551 JumpTarget loaded; 551 JumpTarget loaded;
552 loaded.Jump(); // Don't lose the current TOS. 552 loaded.Jump(); // Don't lose the current TOS.
553 if (true_target.is_linked()) { 553 if (true_target.is_linked()) {
554 true_target.Bind(); 554 true_target.Bind();
555 frame_->Push(Factory::true_value()); 555 frame_->Push(FACTORY->true_value());
556 if (false_target.is_linked()) { 556 if (false_target.is_linked()) {
557 loaded.Jump(); 557 loaded.Jump();
558 } 558 }
559 } 559 }
560 if (false_target.is_linked()) { 560 if (false_target.is_linked()) {
561 false_target.Bind(); 561 false_target.Bind();
562 frame_->Push(Factory::false_value()); 562 frame_->Push(FACTORY->false_value());
563 } 563 }
564 loaded.Bind(); 564 loaded.Bind();
565 } 565 }
566 } 566 }
567 567
568 ASSERT(has_valid_frame()); 568 ASSERT(has_valid_frame());
569 ASSERT(frame_->height() == original_height + 1); 569 ASSERT(frame_->height() == original_height + 1);
570 } 570 }
571 571
572 572
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
629 629
630 Result CodeGenerator::StoreArgumentsObject(bool initial) { 630 Result CodeGenerator::StoreArgumentsObject(bool initial) {
631 ArgumentsAllocationMode mode = ArgumentsMode(); 631 ArgumentsAllocationMode mode = ArgumentsMode();
632 ASSERT(mode != NO_ARGUMENTS_ALLOCATION); 632 ASSERT(mode != NO_ARGUMENTS_ALLOCATION);
633 633
634 Comment cmnt(masm_, "[ store arguments object"); 634 Comment cmnt(masm_, "[ store arguments object");
635 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) { 635 if (mode == LAZY_ARGUMENTS_ALLOCATION && initial) {
636 // When using lazy arguments allocation, we store the arguments marker value 636 // When using lazy arguments allocation, we store the arguments marker value
637 // as a sentinel indicating that the arguments object hasn't been 637 // as a sentinel indicating that the arguments object hasn't been
638 // allocated yet. 638 // allocated yet.
639 frame_->Push(Factory::arguments_marker()); 639 frame_->Push(FACTORY->arguments_marker());
640 } else { 640 } else {
641 ArgumentsAccessStub stub(is_strict_mode() 641 ArgumentsAccessStub stub(is_strict_mode()
642 ? ArgumentsAccessStub::NEW_STRICT 642 ? ArgumentsAccessStub::NEW_STRICT
643 : ArgumentsAccessStub::NEW_NON_STRICT); 643 : ArgumentsAccessStub::NEW_NON_STRICT);
644 frame_->PushFunction(); 644 frame_->PushFunction();
645 frame_->PushReceiverSlotAddress(); 645 frame_->PushReceiverSlotAddress();
646 frame_->Push(Smi::FromInt(scope()->num_parameters())); 646 frame_->Push(Smi::FromInt(scope()->num_parameters()));
647 Result result = frame_->CallStub(&stub, 3); 647 Result result = frame_->CallStub(&stub, 3);
648 frame_->Push(&result); 648 frame_->Push(&result);
649 } 649 }
(...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after
1059 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right); 1059 answer = GenerateGenericBinaryOpStubCall(&stub, &left, &right);
1060 } 1060 }
1061 } 1061 }
1062 1062
1063 answer.set_type_info(result_type); 1063 answer.set_type_info(result_type);
1064 frame_->Push(&answer); 1064 frame_->Push(&answer);
1065 } 1065 }
1066 1066
1067 1067
1068 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) { 1068 bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
1069 Object* answer_object = Heap::undefined_value(); 1069 Object* answer_object = HEAP->undefined_value();
1070 switch (op) { 1070 switch (op) {
1071 case Token::ADD: 1071 case Token::ADD:
1072 // Use intptr_t to detect overflow of 32-bit int. 1072 // Use intptr_t to detect overflow of 32-bit int.
1073 if (Smi::IsValid(static_cast<intptr_t>(left) + right)) { 1073 if (Smi::IsValid(static_cast<intptr_t>(left) + right)) {
1074 answer_object = Smi::FromInt(left + right); 1074 answer_object = Smi::FromInt(left + right);
1075 } 1075 }
1076 break; 1076 break;
1077 case Token::SUB: 1077 case Token::SUB:
1078 // Use intptr_t to detect overflow of 32-bit int. 1078 // Use intptr_t to detect overflow of 32-bit int.
1079 if (Smi::IsValid(static_cast<intptr_t>(left) - right)) { 1079 if (Smi::IsValid(static_cast<intptr_t>(left) - right)) {
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1133 unsigned_left >>= shift_amount; 1133 unsigned_left >>= shift_amount;
1134 } 1134 }
1135 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left))); 1135 ASSERT(Smi::IsValid(static_cast<int32_t>(unsigned_left)));
1136 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left)); 1136 answer_object = Smi::FromInt(static_cast<int32_t>(unsigned_left));
1137 break; 1137 break;
1138 } 1138 }
1139 default: 1139 default:
1140 UNREACHABLE(); 1140 UNREACHABLE();
1141 break; 1141 break;
1142 } 1142 }
1143 if (answer_object == Heap::undefined_value()) { 1143 if (answer_object->IsUndefined()) {
1144 return false; 1144 return false;
1145 } 1145 }
1146 frame_->Push(Handle<Object>(answer_object)); 1146 frame_->Push(Handle<Object>(answer_object));
1147 return true; 1147 return true;
1148 } 1148 }
1149 1149
1150 1150
1151 void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left, 1151 void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Result* left,
1152 Result* right, 1152 Result* right,
1153 JumpTarget* both_smi) { 1153 JumpTarget* both_smi) {
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after
1368 __ AbortIfNotSmi(right->reg()); 1368 __ AbortIfNotSmi(right->reg());
1369 } 1369 }
1370 // If left is not known to be a smi, check if it is. 1370 // If left is not known to be a smi, check if it is.
1371 // If left is not known to be a number, and it isn't a smi, check if 1371 // If left is not known to be a number, and it isn't a smi, check if
1372 // it is a HeapNumber. 1372 // it is a HeapNumber.
1373 if (!left_type_info.IsSmi()) { 1373 if (!left_type_info.IsSmi()) {
1374 __ JumpIfSmi(answer.reg(), &do_op); 1374 __ JumpIfSmi(answer.reg(), &do_op);
1375 if (!left_type_info.IsNumber()) { 1375 if (!left_type_info.IsNumber()) {
1376 // Branch if not a heapnumber. 1376 // Branch if not a heapnumber.
1377 __ Cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset), 1377 __ Cmp(FieldOperand(answer.reg(), HeapObject::kMapOffset),
1378 Factory::heap_number_map()); 1378 FACTORY->heap_number_map());
1379 deferred->Branch(not_equal); 1379 deferred->Branch(not_equal);
1380 } 1380 }
1381 // Load integer value into answer register using truncation. 1381 // Load integer value into answer register using truncation.
1382 __ cvttsd2si(answer.reg(), 1382 __ cvttsd2si(answer.reg(),
1383 FieldOperand(answer.reg(), HeapNumber::kValueOffset)); 1383 FieldOperand(answer.reg(), HeapNumber::kValueOffset));
1384 // Branch if we might have overflowed. 1384 // Branch if we might have overflowed.
1385 // (False negative for Smi::kMinValue) 1385 // (False negative for Smi::kMinValue)
1386 __ cmpl(answer.reg(), Immediate(0x80000000)); 1386 __ cmpl(answer.reg(), Immediate(0x80000000));
1387 deferred->Branch(equal); 1387 deferred->Branch(equal);
1388 // TODO(lrn): Inline shifts on int32 here instead of first smi-tagging. 1388 // TODO(lrn): Inline shifts on int32 here instead of first smi-tagging.
(...skipping 941 matching lines...) Expand 10 before | Expand all | Expand 10 after
2330 } 2330 }
2331 2331
2332 // Jump or fall through to here if we are comparing a non-smi to a 2332 // Jump or fall through to here if we are comparing a non-smi to a
2333 // constant smi. If the non-smi is a heap number and this is not 2333 // constant smi. If the non-smi is a heap number and this is not
2334 // a loop condition, inline the floating point code. 2334 // a loop condition, inline the floating point code.
2335 if (!is_loop_condition) { 2335 if (!is_loop_condition) {
2336 // Right side is a constant smi and left side has been checked 2336 // Right side is a constant smi and left side has been checked
2337 // not to be a smi. 2337 // not to be a smi.
2338 JumpTarget not_number; 2338 JumpTarget not_number;
2339 __ Cmp(FieldOperand(left_reg, HeapObject::kMapOffset), 2339 __ Cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
2340 Factory::heap_number_map()); 2340 FACTORY->heap_number_map());
2341 not_number.Branch(not_equal, left_side); 2341 not_number.Branch(not_equal, left_side);
2342 __ movsd(xmm1, 2342 __ movsd(xmm1,
2343 FieldOperand(left_reg, HeapNumber::kValueOffset)); 2343 FieldOperand(left_reg, HeapNumber::kValueOffset));
2344 int value = constant_smi->value(); 2344 int value = constant_smi->value();
2345 if (value == 0) { 2345 if (value == 0) {
2346 __ xorpd(xmm0, xmm0); 2346 __ xorpd(xmm0, xmm0);
2347 } else { 2347 } else {
2348 Result temp = allocator()->Allocate(); 2348 Result temp = allocator()->Allocate();
2349 __ movl(temp.reg(), Immediate(value)); 2349 __ movl(temp.reg(), Immediate(value));
2350 __ cvtlsi2sd(xmm0, temp.reg()); 2350 __ cvtlsi2sd(xmm0, temp.reg());
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
2490 // stack, as receiver and arguments, and calls x. 2490 // stack, as receiver and arguments, and calls x.
2491 // In the implementation comments, we call x the applicand 2491 // In the implementation comments, we call x the applicand
2492 // and y the receiver. 2492 // and y the receiver.
2493 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION); 2493 ASSERT(ArgumentsMode() == LAZY_ARGUMENTS_ALLOCATION);
2494 ASSERT(arguments->IsArguments()); 2494 ASSERT(arguments->IsArguments());
2495 2495
2496 // Load applicand.apply onto the stack. This will usually 2496 // Load applicand.apply onto the stack. This will usually
2497 // give us a megamorphic load site. Not super, but it works. 2497 // give us a megamorphic load site. Not super, but it works.
2498 Load(applicand); 2498 Load(applicand);
2499 frame()->Dup(); 2499 frame()->Dup();
2500 Handle<String> name = Factory::LookupAsciiSymbol("apply"); 2500 Handle<String> name = FACTORY->LookupAsciiSymbol("apply");
2501 frame()->Push(name); 2501 frame()->Push(name);
2502 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET); 2502 Result answer = frame()->CallLoadIC(RelocInfo::CODE_TARGET);
2503 __ nop(); 2503 __ nop();
2504 frame()->Push(&answer); 2504 frame()->Push(&answer);
2505 2505
2506 // Load the receiver and the existing arguments object onto the 2506 // Load the receiver and the existing arguments object onto the
2507 // expression stack. Avoid allocating the arguments object here. 2507 // expression stack. Avoid allocating the arguments object here.
2508 Load(receiver); 2508 Load(receiver);
2509 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF); 2509 LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
2510 2510
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
2558 __ j(below, &build_args); 2558 __ j(below, &build_args);
2559 2559
2560 // Check that applicand.apply is Function.prototype.apply. 2560 // Check that applicand.apply is Function.prototype.apply.
2561 __ movq(rax, Operand(rsp, kPointerSize)); 2561 __ movq(rax, Operand(rsp, kPointerSize));
2562 is_smi = masm_->CheckSmi(rax); 2562 is_smi = masm_->CheckSmi(rax);
2563 __ j(is_smi, &build_args); 2563 __ j(is_smi, &build_args);
2564 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx); 2564 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rcx);
2565 __ j(not_equal, &build_args); 2565 __ j(not_equal, &build_args);
2566 __ movq(rcx, FieldOperand(rax, JSFunction::kCodeEntryOffset)); 2566 __ movq(rcx, FieldOperand(rax, JSFunction::kCodeEntryOffset));
2567 __ subq(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag)); 2567 __ subq(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2568 Handle<Code> apply_code(Builtins::builtin(Builtins::FunctionApply)); 2568 Handle<Code> apply_code(Isolate::Current()->builtins()->builtin(
2569 Builtins::FunctionApply));
2569 __ Cmp(rcx, apply_code); 2570 __ Cmp(rcx, apply_code);
2570 __ j(not_equal, &build_args); 2571 __ j(not_equal, &build_args);
2571 2572
2572 // Check that applicand is a function. 2573 // Check that applicand is a function.
2573 __ movq(rdi, Operand(rsp, 2 * kPointerSize)); 2574 __ movq(rdi, Operand(rsp, 2 * kPointerSize));
2574 is_smi = masm_->CheckSmi(rdi); 2575 is_smi = masm_->CheckSmi(rdi);
2575 __ j(is_smi, &build_args); 2576 __ j(is_smi, &build_args);
2576 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2577 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2577 __ j(not_equal, &build_args); 2578 __ j(not_equal, &build_args);
2578 2579
(...skipping 222 matching lines...) Expand 10 before | Expand all | Expand 10 after
2801 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4); 2802 Result ignored = frame_->CallRuntime(Runtime::kDeclareContextSlot, 4);
2802 // Ignore the return value (declarations are statements). 2803 // Ignore the return value (declarations are statements).
2803 return; 2804 return;
2804 } 2805 }
2805 2806
2806 ASSERT(!var->is_global()); 2807 ASSERT(!var->is_global());
2807 2808
2808 // If we have a function or a constant, we need to initialize the variable. 2809 // If we have a function or a constant, we need to initialize the variable.
2809 Expression* val = NULL; 2810 Expression* val = NULL;
2810 if (node->mode() == Variable::CONST) { 2811 if (node->mode() == Variable::CONST) {
2811 val = new Literal(Factory::the_hole_value()); 2812 val = new Literal(FACTORY->the_hole_value());
2812 } else { 2813 } else {
2813 val = node->fun(); // NULL if we don't have a function 2814 val = node->fun(); // NULL if we don't have a function
2814 } 2815 }
2815 2816
2816 if (val != NULL) { 2817 if (val != NULL) {
2817 { 2818 {
2818 // Set the initial value. 2819 // Set the initial value.
2819 Reference target(this, node->proxy()); 2820 Reference target(this, node->proxy());
2820 Load(val); 2821 Load(val);
2821 target.SetValue(NOT_CONST_INIT); 2822 target.SetValue(NOT_CONST_INIT);
(...skipping 1155 matching lines...) Expand 10 before | Expand all | Expand 10 after
3977 // After shadowing stops, the original targets are unshadowed and the 3978 // After shadowing stops, the original targets are unshadowed and the
3978 // ShadowTargets represent the formerly shadowing targets. 3979 // ShadowTargets represent the formerly shadowing targets.
3979 bool has_unlinks = false; 3980 bool has_unlinks = false;
3980 for (int i = 0; i < shadows.length(); i++) { 3981 for (int i = 0; i < shadows.length(); i++) {
3981 shadows[i]->StopShadowing(); 3982 shadows[i]->StopShadowing();
3982 has_unlinks = has_unlinks || shadows[i]->is_linked(); 3983 has_unlinks = has_unlinks || shadows[i]->is_linked();
3983 } 3984 }
3984 function_return_is_shadowed_ = function_return_was_shadowed; 3985 function_return_is_shadowed_ = function_return_was_shadowed;
3985 3986
3986 // Get an external reference to the handler address. 3987 // Get an external reference to the handler address.
3987 ExternalReference handler_address(Top::k_handler_address); 3988 ExternalReference handler_address(Isolate::k_handler_address);
3988 3989
3989 // Make sure that there's nothing left on the stack above the 3990 // Make sure that there's nothing left on the stack above the
3990 // handler structure. 3991 // handler structure.
3991 if (FLAG_debug_code) { 3992 if (FLAG_debug_code) {
3992 __ movq(kScratchRegister, handler_address); 3993 __ movq(kScratchRegister, handler_address);
3993 __ cmpq(rsp, Operand(kScratchRegister, 0)); 3994 __ cmpq(rsp, Operand(kScratchRegister, 0));
3994 __ Assert(equal, "stack pointer should point to top handler"); 3995 __ Assert(equal, "stack pointer should point to top handler");
3995 } 3996 }
3996 3997
3997 // If we can fall off the end of the try block, unlink from try chain. 3998 // If we can fall off the end of the try block, unlink from try chain.
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
4106 // After shadowing stops, the original targets are unshadowed and the 4107 // After shadowing stops, the original targets are unshadowed and the
4107 // ShadowTargets represent the formerly shadowing targets. 4108 // ShadowTargets represent the formerly shadowing targets.
4108 int nof_unlinks = 0; 4109 int nof_unlinks = 0;
4109 for (int i = 0; i < shadows.length(); i++) { 4110 for (int i = 0; i < shadows.length(); i++) {
4110 shadows[i]->StopShadowing(); 4111 shadows[i]->StopShadowing();
4111 if (shadows[i]->is_linked()) nof_unlinks++; 4112 if (shadows[i]->is_linked()) nof_unlinks++;
4112 } 4113 }
4113 function_return_is_shadowed_ = function_return_was_shadowed; 4114 function_return_is_shadowed_ = function_return_was_shadowed;
4114 4115
4115 // Get an external reference to the handler address. 4116 // Get an external reference to the handler address.
4116 ExternalReference handler_address(Top::k_handler_address); 4117 ExternalReference handler_address(Isolate::k_handler_address);
4117 4118
4118 // If we can fall off the end of the try block, unlink from the try 4119 // If we can fall off the end of the try block, unlink from the try
4119 // chain and set the state on the frame to FALLING. 4120 // chain and set the state on the frame to FALLING.
4120 if (has_valid_frame()) { 4121 if (has_valid_frame()) {
4121 // The next handler address is on top of the frame. 4122 // The next handler address is on top of the frame.
4122 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0); 4123 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
4123 __ movq(kScratchRegister, handler_address); 4124 __ movq(kScratchRegister, handler_address);
4124 frame_->EmitPop(Operand(kScratchRegister, 0)); 4125 frame_->EmitPop(Operand(kScratchRegister, 0));
4125 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1); 4126 frame_->Drop(StackHandlerConstants::kSize / kPointerSize - 1);
4126 4127
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after
4273 function_info->strict_mode() ? kStrictMode : kNonStrictMode); 4274 function_info->strict_mode() ? kStrictMode : kNonStrictMode);
4274 frame_->Push(function_info); 4275 frame_->Push(function_info);
4275 Result answer = frame_->CallStub(&stub, 1); 4276 Result answer = frame_->CallStub(&stub, 1);
4276 frame_->Push(&answer); 4277 frame_->Push(&answer);
4277 } else { 4278 } else {
4278 // Call the runtime to instantiate the function based on the 4279 // Call the runtime to instantiate the function based on the
4279 // shared function info. 4280 // shared function info.
4280 frame_->EmitPush(rsi); 4281 frame_->EmitPush(rsi);
4281 frame_->EmitPush(function_info); 4282 frame_->EmitPush(function_info);
4282 frame_->EmitPush(pretenure 4283 frame_->EmitPush(pretenure
4283 ? Factory::true_value() 4284 ? FACTORY->true_value()
4284 : Factory::false_value()); 4285 : FACTORY->false_value());
4285 Result result = frame_->CallRuntime(Runtime::kNewClosure, 3); 4286 Result result = frame_->CallRuntime(Runtime::kNewClosure, 3);
4286 frame_->Push(&result); 4287 frame_->Push(&result);
4287 } 4288 }
4288 } 4289 }
4289 4290
4290 4291
4291 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { 4292 void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) {
4292 Comment cmnt(masm_, "[ FunctionLiteral"); 4293 Comment cmnt(masm_, "[ FunctionLiteral");
4293 4294
4294 // Build the function info and instantiate it. 4295 // Build the function info and instantiate it.
(...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after
4763 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax); 4764 if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
4764 } 4765 }
4765 4766
4766 4767
4767 class DeferredAllocateInNewSpace: public DeferredCode { 4768 class DeferredAllocateInNewSpace: public DeferredCode {
4768 public: 4769 public:
4769 DeferredAllocateInNewSpace(int size, 4770 DeferredAllocateInNewSpace(int size,
4770 Register target, 4771 Register target,
4771 int registers_to_save = 0) 4772 int registers_to_save = 0)
4772 : size_(size), target_(target), registers_to_save_(registers_to_save) { 4773 : size_(size), target_(target), registers_to_save_(registers_to_save) {
4773 ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace()); 4774 ASSERT(size >= kPointerSize && size <= HEAP->MaxObjectSizeInNewSpace());
4774 set_comment("[ DeferredAllocateInNewSpace"); 4775 set_comment("[ DeferredAllocateInNewSpace");
4775 } 4776 }
4776 void Generate(); 4777 void Generate();
4777 4778
4778 private: 4779 private:
4779 int size_; 4780 int size_;
4780 Register target_; 4781 Register target_;
4781 int registers_to_save_; 4782 int registers_to_save_;
4782 }; 4783 };
4783 4784
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after
4978 4979
4979 // Load the literals array of the function. 4980 // Load the literals array of the function.
4980 __ movq(literals.reg(), 4981 __ movq(literals.reg(),
4981 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset)); 4982 FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
4982 4983
4983 frame_->Push(&literals); 4984 frame_->Push(&literals);
4984 frame_->Push(Smi::FromInt(node->literal_index())); 4985 frame_->Push(Smi::FromInt(node->literal_index()));
4985 frame_->Push(node->constant_elements()); 4986 frame_->Push(node->constant_elements());
4986 int length = node->values()->length(); 4987 int length = node->values()->length();
4987 Result clone; 4988 Result clone;
4988 if (node->constant_elements()->map() == Heap::fixed_cow_array_map()) { 4989 if (node->constant_elements()->map() == HEAP->fixed_cow_array_map()) {
4989 FastCloneShallowArrayStub stub( 4990 FastCloneShallowArrayStub stub(
4990 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length); 4991 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
4991 clone = frame_->CallStub(&stub, 3); 4992 clone = frame_->CallStub(&stub, 3);
4992 __ IncrementCounter(&Counters::cow_arrays_created_stub, 1); 4993 __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1);
4993 } else if (node->depth() > 1) { 4994 } else if (node->depth() > 1) {
4994 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3); 4995 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
4995 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { 4996 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
4996 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 4997 clone = frame_->CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
4997 } else { 4998 } else {
4998 FastCloneShallowArrayStub stub( 4999 FastCloneShallowArrayStub stub(
4999 FastCloneShallowArrayStub::CLONE_ELEMENTS, length); 5000 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
5000 clone = frame_->CallStub(&stub, 3); 5001 clone = frame_->CallStub(&stub, 3);
5001 } 5002 }
5002 frame_->Push(&clone); 5003 frame_->Push(&clone);
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
5384 5385
5385 // In a call to eval, we first call %ResolvePossiblyDirectEval to 5386 // In a call to eval, we first call %ResolvePossiblyDirectEval to
5386 // resolve the function we need to call and the receiver of the 5387 // resolve the function we need to call and the receiver of the
5387 // call. Then we call the resolved function using the given 5388 // call. Then we call the resolved function using the given
5388 // arguments. 5389 // arguments.
5389 5390
5390 // Prepare the stack for the call to the resolved function. 5391 // Prepare the stack for the call to the resolved function.
5391 Load(function); 5392 Load(function);
5392 5393
5393 // Allocate a frame slot for the receiver. 5394 // Allocate a frame slot for the receiver.
5394 frame_->Push(Factory::undefined_value()); 5395 frame_->Push(FACTORY->undefined_value());
5395 5396
5396 // Load the arguments. 5397 // Load the arguments.
5397 int arg_count = args->length(); 5398 int arg_count = args->length();
5398 for (int i = 0; i < arg_count; i++) { 5399 for (int i = 0; i < arg_count; i++) {
5399 Load(args->at(i)); 5400 Load(args->at(i));
5400 frame_->SpillTop(); 5401 frame_->SpillTop();
5401 } 5402 }
5402 5403
5403 // Result to hold the result of the function resolution and the 5404 // Result to hold the result of the function resolution and the
5404 // final result of the eval call. 5405 // final result of the eval call.
(...skipping 11 matching lines...) Expand all
5416 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded 5417 // ResolvePossiblyDirectEvalNoLookup by pushing the loaded
5417 // function, the first argument to the eval call and the 5418 // function, the first argument to the eval call and the
5418 // receiver. 5419 // receiver.
5419 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(), 5420 Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
5420 NOT_INSIDE_TYPEOF, 5421 NOT_INSIDE_TYPEOF,
5421 &slow); 5422 &slow);
5422 frame_->Push(&fun); 5423 frame_->Push(&fun);
5423 if (arg_count > 0) { 5424 if (arg_count > 0) {
5424 frame_->PushElementAt(arg_count); 5425 frame_->PushElementAt(arg_count);
5425 } else { 5426 } else {
5426 frame_->Push(Factory::undefined_value()); 5427 frame_->Push(FACTORY->undefined_value());
5427 } 5428 }
5428 frame_->PushParameterAt(-1); 5429 frame_->PushParameterAt(-1);
5429 5430
5430 // Push the strict mode flag. 5431 // Push the strict mode flag.
5431 frame_->Push(Smi::FromInt(strict_mode_flag())); 5432 frame_->Push(Smi::FromInt(strict_mode_flag()));
5432 5433
5433 // Resolve the call. 5434 // Resolve the call.
5434 result = 5435 result =
5435 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4); 5436 frame_->CallRuntime(Runtime::kResolvePossiblyDirectEvalNoLookup, 4);
5436 5437
5437 done.Jump(&result); 5438 done.Jump(&result);
5438 slow.Bind(); 5439 slow.Bind();
5439 } 5440 }
5440 5441
5441 // Prepare the stack for the call to ResolvePossiblyDirectEval by 5442 // Prepare the stack for the call to ResolvePossiblyDirectEval by
5442 // pushing the loaded function, the first argument to the eval 5443 // pushing the loaded function, the first argument to the eval
5443 // call and the receiver. 5444 // call and the receiver.
5444 frame_->PushElementAt(arg_count + 1); 5445 frame_->PushElementAt(arg_count + 1);
5445 if (arg_count > 0) { 5446 if (arg_count > 0) {
5446 frame_->PushElementAt(arg_count); 5447 frame_->PushElementAt(arg_count);
5447 } else { 5448 } else {
5448 frame_->Push(Factory::undefined_value()); 5449 frame_->Push(FACTORY->undefined_value());
5449 } 5450 }
5450 frame_->PushParameterAt(-1); 5451 frame_->PushParameterAt(-1);
5451 5452
5452 // Push the strict mode flag. 5453 // Push the strict mode flag.
5453 frame_->Push(Smi::FromInt(strict_mode_flag())); 5454 frame_->Push(Smi::FromInt(strict_mode_flag()));
5454 5455
5455 // Resolve the call. 5456 // Resolve the call.
5456 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4); 5457 result = frame_->CallRuntime(Runtime::kResolvePossiblyDirectEval, 4);
5457 5458
5458 // If we generated fast-case code bind the jump-target where fast 5459 // If we generated fast-case code bind the jump-target where fast
(...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after
5727 // 2 (array): Arguments to the format string. 5728 // 2 (array): Arguments to the format string.
5728 ASSERT_EQ(args->length(), 3); 5729 ASSERT_EQ(args->length(), 3);
5729 #ifdef ENABLE_LOGGING_AND_PROFILING 5730 #ifdef ENABLE_LOGGING_AND_PROFILING
5730 if (ShouldGenerateLog(args->at(0))) { 5731 if (ShouldGenerateLog(args->at(0))) {
5731 Load(args->at(1)); 5732 Load(args->at(1));
5732 Load(args->at(2)); 5733 Load(args->at(2));
5733 frame_->CallRuntime(Runtime::kLog, 2); 5734 frame_->CallRuntime(Runtime::kLog, 2);
5734 } 5735 }
5735 #endif 5736 #endif
5736 // Finally, we're expected to leave a value on the top of the stack. 5737 // Finally, we're expected to leave a value on the top of the stack.
5737 frame_->Push(Factory::undefined_value()); 5738 frame_->Push(FACTORY->undefined_value());
5738 } 5739 }
5739 5740
5740 5741
5741 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) { 5742 void CodeGenerator::GenerateIsNonNegativeSmi(ZoneList<Expression*>* args) {
5742 ASSERT(args->length() == 1); 5743 ASSERT(args->length() == 1);
5743 Load(args->at(0)); 5744 Load(args->at(0));
5744 Result value = frame_->Pop(); 5745 Result value = frame_->Pop();
5745 value.ToRegister(); 5746 value.ToRegister();
5746 ASSERT(value.is_valid()); 5747 ASSERT(value.is_valid());
5747 Condition non_negative_smi = masm_->CheckNonNegativeSmi(value.reg()); 5748 Condition non_negative_smi = masm_->CheckNonNegativeSmi(value.reg());
(...skipping 242 matching lines...) Expand 10 before | Expand all | Expand 10 after
5990 void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) { 5991 void CodeGenerator::GenerateIsObject(ZoneList<Expression*>* args) {
5991 // This generates a fast version of: 5992 // This generates a fast version of:
5992 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp') 5993 // (typeof(arg) === 'object' || %_ClassOf(arg) == 'RegExp')
5993 ASSERT(args->length() == 1); 5994 ASSERT(args->length() == 1);
5994 Load(args->at(0)); 5995 Load(args->at(0));
5995 Result obj = frame_->Pop(); 5996 Result obj = frame_->Pop();
5996 obj.ToRegister(); 5997 obj.ToRegister();
5997 Condition is_smi = masm_->CheckSmi(obj.reg()); 5998 Condition is_smi = masm_->CheckSmi(obj.reg());
5998 destination()->false_target()->Branch(is_smi); 5999 destination()->false_target()->Branch(is_smi);
5999 6000
6000 __ Move(kScratchRegister, Factory::null_value()); 6001 __ Move(kScratchRegister, FACTORY->null_value());
6001 __ cmpq(obj.reg(), kScratchRegister); 6002 __ cmpq(obj.reg(), kScratchRegister);
6002 destination()->true_target()->Branch(equal); 6003 destination()->true_target()->Branch(equal);
6003 6004
6004 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset)); 6005 __ movq(kScratchRegister, FieldOperand(obj.reg(), HeapObject::kMapOffset));
6005 // Undetectable objects behave like undefined when tested with typeof. 6006 // Undetectable objects behave like undefined when tested with typeof.
6006 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), 6007 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
6007 Immediate(1 << Map::kIsUndetectable)); 6008 Immediate(1 << Map::kIsUndetectable));
6008 destination()->false_target()->Branch(not_zero); 6009 destination()->false_target()->Branch(not_zero);
6009 __ movzxbq(kScratchRegister, 6010 __ movzxbq(kScratchRegister,
6010 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); 6011 FieldOperand(kScratchRegister, Map::kInstanceTypeOffset));
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after
6082 // Calculate location of the first key name. 6083 // Calculate location of the first key name.
6083 __ addq(map_result_, 6084 __ addq(map_result_,
6084 Immediate(FixedArray::kHeaderSize + 6085 Immediate(FixedArray::kHeaderSize +
6085 DescriptorArray::kFirstIndex * kPointerSize)); 6086 DescriptorArray::kFirstIndex * kPointerSize));
6086 // Loop through all the keys in the descriptor array. If one of these is the 6087 // Loop through all the keys in the descriptor array. If one of these is the
6087 // symbol valueOf the result is false. 6088 // symbol valueOf the result is false.
6088 Label entry, loop; 6089 Label entry, loop;
6089 __ jmp(&entry); 6090 __ jmp(&entry);
6090 __ bind(&loop); 6091 __ bind(&loop);
6091 __ movq(scratch2_, FieldOperand(map_result_, 0)); 6092 __ movq(scratch2_, FieldOperand(map_result_, 0));
6092 __ Cmp(scratch2_, Factory::value_of_symbol()); 6093 __ Cmp(scratch2_, FACTORY->value_of_symbol());
6093 __ j(equal, &false_result); 6094 __ j(equal, &false_result);
6094 __ addq(map_result_, Immediate(kPointerSize)); 6095 __ addq(map_result_, Immediate(kPointerSize));
6095 __ bind(&entry); 6096 __ bind(&entry);
6096 __ cmpq(map_result_, scratch1_); 6097 __ cmpq(map_result_, scratch1_);
6097 __ j(not_equal, &loop); 6098 __ j(not_equal, &loop);
6098 6099
6099 // Reload map as register map_result_ was used as temporary above. 6100 // Reload map as register map_result_ was used as temporary above.
6100 __ movq(map_result_, FieldOperand(object_, HeapObject::kMapOffset)); 6101 __ movq(map_result_, FieldOperand(object_, HeapObject::kMapOffset));
6101 6102
6102 // If a valueOf property is not found on the object check that it's 6103 // If a valueOf property is not found on the object check that it's
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after
6294 __ movq(obj.reg(), 6295 __ movq(obj.reg(),
6295 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset)); 6296 FieldOperand(obj.reg(), JSFunction::kSharedFunctionInfoOffset));
6296 __ movq(obj.reg(), 6297 __ movq(obj.reg(),
6297 FieldOperand(obj.reg(), 6298 FieldOperand(obj.reg(),
6298 SharedFunctionInfo::kInstanceClassNameOffset)); 6299 SharedFunctionInfo::kInstanceClassNameOffset));
6299 frame_->Push(&obj); 6300 frame_->Push(&obj);
6300 leave.Jump(); 6301 leave.Jump();
6301 6302
6302 // Functions have class 'Function'. 6303 // Functions have class 'Function'.
6303 function.Bind(); 6304 function.Bind();
6304 frame_->Push(Factory::function_class_symbol()); 6305 frame_->Push(FACTORY->function_class_symbol());
6305 leave.Jump(); 6306 leave.Jump();
6306 6307
6307 // Objects with a non-function constructor have class 'Object'. 6308 // Objects with a non-function constructor have class 'Object'.
6308 non_function_constructor.Bind(); 6309 non_function_constructor.Bind();
6309 frame_->Push(Factory::Object_symbol()); 6310 frame_->Push(FACTORY->Object_symbol());
6310 leave.Jump(); 6311 leave.Jump();
6311 6312
6312 // Non-JS objects have class null. 6313 // Non-JS objects have class null.
6313 null.Bind(); 6314 null.Bind();
6314 frame_->Push(Factory::null_value()); 6315 frame_->Push(FACTORY->null_value());
6315 6316
6316 // All done. 6317 // All done.
6317 leave.Bind(); 6318 leave.Bind();
6318 } 6319 }
6319 6320
6320 6321
6321 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { 6322 void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) {
6322 ASSERT(args->length() == 1); 6323 ASSERT(args->length() == 1);
6323 JumpTarget leave; 6324 JumpTarget leave;
6324 Load(args->at(0)); // Load the object. 6325 Load(args->at(0)); // Load the object.
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after
6676 } 6677 }
6677 6678
6678 6679
6679 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) { 6680 void CodeGenerator::GenerateGetFromCache(ZoneList<Expression*>* args) {
6680 ASSERT_EQ(2, args->length()); 6681 ASSERT_EQ(2, args->length());
6681 6682
6682 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 6683 ASSERT_NE(NULL, args->at(0)->AsLiteral());
6683 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); 6684 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
6684 6685
6685 Handle<FixedArray> jsfunction_result_caches( 6686 Handle<FixedArray> jsfunction_result_caches(
6686 Top::global_context()->jsfunction_result_caches()); 6687 Isolate::Current()->global_context()->jsfunction_result_caches());
6687 if (jsfunction_result_caches->length() <= cache_id) { 6688 if (jsfunction_result_caches->length() <= cache_id) {
6688 __ Abort("Attempt to use undefined cache."); 6689 __ Abort("Attempt to use undefined cache.");
6689 frame_->Push(Factory::undefined_value()); 6690 frame_->Push(FACTORY->undefined_value());
6690 return; 6691 return;
6691 } 6692 }
6692 6693
6693 Load(args->at(1)); 6694 Load(args->at(1));
6694 Result key = frame_->Pop(); 6695 Result key = frame_->Pop();
6695 key.ToRegister(); 6696 key.ToRegister();
6696 6697
6697 Result cache = allocator()->Allocate(); 6698 Result cache = allocator()->Allocate();
6698 ASSERT(cache.is_valid()); 6699 ASSERT(cache.is_valid());
6699 __ movq(cache.reg(), ContextOperand(rsi, Context::GLOBAL_INDEX)); 6700 __ movq(cache.reg(), ContextOperand(rsi, Context::GLOBAL_INDEX));
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
6845 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done); 6846 __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
6846 // Possible optimization: do a check that both values are smis 6847 // Possible optimization: do a check that both values are smis
6847 // (or them and test against Smi mask.) 6848 // (or them and test against Smi mask.)
6848 6849
6849 __ movq(tmp2.reg(), tmp1.reg()); 6850 __ movq(tmp2.reg(), tmp1.reg());
6850 __ RecordWriteHelper(tmp1.reg(), index1.reg(), object.reg(), kDontSaveFPRegs); 6851 __ RecordWriteHelper(tmp1.reg(), index1.reg(), object.reg(), kDontSaveFPRegs);
6851 __ RecordWriteHelper(tmp2.reg(), index2.reg(), object.reg(), kDontSaveFPRegs); 6852 __ RecordWriteHelper(tmp2.reg(), index2.reg(), object.reg(), kDontSaveFPRegs);
6852 __ bind(&done); 6853 __ bind(&done);
6853 6854
6854 deferred->BindExit(); 6855 deferred->BindExit();
6855 frame_->Push(Factory::undefined_value()); 6856 frame_->Push(FACTORY->undefined_value());
6856 } 6857 }
6857 6858
6858 6859
6859 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) { 6860 void CodeGenerator::GenerateCallFunction(ZoneList<Expression*>* args) {
6860 Comment cmnt(masm_, "[ GenerateCallFunction"); 6861 Comment cmnt(masm_, "[ GenerateCallFunction");
6861 6862
6862 ASSERT(args->length() >= 2); 6863 ASSERT(args->length() >= 2);
6863 6864
6864 int n_args = args->length() - 2; // for receiver and function. 6865 int n_args = args->length() - 2; // for receiver and function.
6865 Load(args->at(0)); // receiver 6866 Load(args->at(0)); // receiver
(...skipping 323 matching lines...) Expand 10 before | Expand all | Expand 10 after
7189 Result number = allocator()->Allocate(); 7190 Result number = allocator()->Allocate();
7190 ASSERT(number.is_valid()); 7191 ASSERT(number.is_valid());
7191 __ movl(number.reg(), FieldOperand(string.reg(), String::kHashFieldOffset)); 7192 __ movl(number.reg(), FieldOperand(string.reg(), String::kHashFieldOffset));
7192 __ IndexFromHash(number.reg(), number.reg()); 7193 __ IndexFromHash(number.reg(), number.reg());
7193 string.Unuse(); 7194 string.Unuse();
7194 frame_->Push(&number); 7195 frame_->Push(&number);
7195 } 7196 }
7196 7197
7197 7198
7198 void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) { 7199 void CodeGenerator::GenerateFastAsciiArrayJoin(ZoneList<Expression*>* args) {
7199 frame_->Push(Factory::undefined_value()); 7200 frame_->Push(FACTORY->undefined_value());
7200 } 7201 }
7201 7202
7202 7203
7203 void CodeGenerator::VisitCallRuntime(CallRuntime* node) { 7204 void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
7204 if (CheckForInlineRuntimeCall(node)) { 7205 if (CheckForInlineRuntimeCall(node)) {
7205 return; 7206 return;
7206 } 7207 }
7207 7208
7208 ZoneList<Expression*>* args = node->arguments(); 7209 ZoneList<Expression*>* args = node->arguments();
7209 Comment cmnt(masm_, "[ CallRuntime"); 7210 Comment cmnt(masm_, "[ CallRuntime");
7210 Runtime::Function* function = node->function(); 7211 const Runtime::Function* function = node->function();
7211 7212
7212 if (function == NULL) { 7213 if (function == NULL) {
7213 // Push the builtins object found in the current global object. 7214 // Push the builtins object found in the current global object.
7214 Result temp = allocator()->Allocate(); 7215 Result temp = allocator()->Allocate();
7215 ASSERT(temp.is_valid()); 7216 ASSERT(temp.is_valid());
7216 __ movq(temp.reg(), GlobalObjectOperand()); 7217 __ movq(temp.reg(), GlobalObjectOperand());
7217 __ movq(temp.reg(), 7218 __ movq(temp.reg(),
7218 FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset)); 7219 FieldOperand(temp.reg(), GlobalObject::kBuiltinsOffset));
7219 frame_->Push(&temp); 7220 frame_->Push(&temp);
7220 } 7221 }
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
7284 // variable. Sync the virtual frame eagerly so we can push the 7285 // variable. Sync the virtual frame eagerly so we can push the
7285 // arguments directly into place. 7286 // arguments directly into place.
7286 frame_->SyncRange(0, frame_->element_count() - 1); 7287 frame_->SyncRange(0, frame_->element_count() - 1);
7287 frame_->EmitPush(rsi); 7288 frame_->EmitPush(rsi);
7288 frame_->EmitPush(variable->name()); 7289 frame_->EmitPush(variable->name());
7289 Result answer = frame_->CallRuntime(Runtime::kDeleteContextSlot, 2); 7290 Result answer = frame_->CallRuntime(Runtime::kDeleteContextSlot, 2);
7290 frame_->Push(&answer); 7291 frame_->Push(&answer);
7291 } else { 7292 } else {
7292 // Default: Result of deleting non-global, not dynamically 7293 // Default: Result of deleting non-global, not dynamically
7293 // introduced variables is false. 7294 // introduced variables is false.
7294 frame_->Push(Factory::false_value()); 7295 frame_->Push(FACTORY->false_value());
7295 } 7296 }
7296 } else { 7297 } else {
7297 // Default: Result of deleting expressions is true. 7298 // Default: Result of deleting expressions is true.
7298 Load(node->expression()); // may have side-effects 7299 Load(node->expression()); // may have side-effects
7299 frame_->SetElementAt(0, Factory::true_value()); 7300 frame_->SetElementAt(0, FACTORY->true_value());
7300 } 7301 }
7301 7302
7302 } else if (op == Token::TYPEOF) { 7303 } else if (op == Token::TYPEOF) {
7303 // Special case for loading the typeof expression; see comment on 7304 // Special case for loading the typeof expression; see comment on
7304 // LoadTypeofExpression(). 7305 // LoadTypeofExpression().
7305 LoadTypeofExpression(node->expression()); 7306 LoadTypeofExpression(node->expression());
7306 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1); 7307 Result answer = frame_->CallRuntime(Runtime::kTypeof, 1);
7307 frame_->Push(&answer); 7308 frame_->Push(&answer);
7308 7309
7309 } else if (op == Token::VOID) { 7310 } else if (op == Token::VOID) {
7310 Expression* expression = node->expression(); 7311 Expression* expression = node->expression();
7311 if (expression && expression->AsLiteral() && ( 7312 if (expression && expression->AsLiteral() && (
7312 expression->AsLiteral()->IsTrue() || 7313 expression->AsLiteral()->IsTrue() ||
7313 expression->AsLiteral()->IsFalse() || 7314 expression->AsLiteral()->IsFalse() ||
7314 expression->AsLiteral()->handle()->IsNumber() || 7315 expression->AsLiteral()->handle()->IsNumber() ||
7315 expression->AsLiteral()->handle()->IsString() || 7316 expression->AsLiteral()->handle()->IsString() ||
7316 expression->AsLiteral()->handle()->IsJSRegExp() || 7317 expression->AsLiteral()->handle()->IsJSRegExp() ||
7317 expression->AsLiteral()->IsNull())) { 7318 expression->AsLiteral()->IsNull())) {
7318 // Omit evaluating the value of the primitive literal. 7319 // Omit evaluating the value of the primitive literal.
7319 // It will be discarded anyway, and can have no side effect. 7320 // It will be discarded anyway, and can have no side effect.
7320 frame_->Push(Factory::undefined_value()); 7321 frame_->Push(FACTORY->undefined_value());
7321 } else { 7322 } else {
7322 Load(node->expression()); 7323 Load(node->expression());
7323 frame_->SetElementAt(0, Factory::undefined_value()); 7324 frame_->SetElementAt(0, FACTORY->undefined_value());
7324 } 7325 }
7325 7326
7326 } else { 7327 } else {
7327 bool can_overwrite = node->expression()->ResultOverwriteAllowed(); 7328 bool can_overwrite = node->expression()->ResultOverwriteAllowed();
7328 UnaryOverwriteMode overwrite = 7329 UnaryOverwriteMode overwrite =
7329 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE; 7330 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
7330 bool no_negative_zero = node->expression()->no_negative_zero(); 7331 bool no_negative_zero = node->expression()->no_negative_zero();
7331 Load(node->expression()); 7332 Load(node->expression());
7332 switch (op) { 7333 switch (op) {
7333 case Token::NOT: 7334 case Token::NOT:
(...skipping 451 matching lines...) Expand 10 before | Expand all | Expand 10 after
7785 (operation != NULL && operation->op() == Token::TYPEOF) && 7786 (operation != NULL && operation->op() == Token::TYPEOF) &&
7786 (right->AsLiteral() != NULL && 7787 (right->AsLiteral() != NULL &&
7787 right->AsLiteral()->handle()->IsString())) { 7788 right->AsLiteral()->handle()->IsString())) {
7788 Handle<String> check(Handle<String>::cast(right->AsLiteral()->handle())); 7789 Handle<String> check(Handle<String>::cast(right->AsLiteral()->handle()));
7789 7790
7790 // Load the operand and move it to a register. 7791 // Load the operand and move it to a register.
7791 LoadTypeofExpression(operation->expression()); 7792 LoadTypeofExpression(operation->expression());
7792 Result answer = frame_->Pop(); 7793 Result answer = frame_->Pop();
7793 answer.ToRegister(); 7794 answer.ToRegister();
7794 7795
7795 if (check->Equals(Heap::number_symbol())) { 7796 if (check->Equals(HEAP->number_symbol())) {
7796 Condition is_smi = masm_->CheckSmi(answer.reg()); 7797 Condition is_smi = masm_->CheckSmi(answer.reg());
7797 destination()->true_target()->Branch(is_smi); 7798 destination()->true_target()->Branch(is_smi);
7798 frame_->Spill(answer.reg()); 7799 frame_->Spill(answer.reg());
7799 __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset)); 7800 __ movq(answer.reg(), FieldOperand(answer.reg(), HeapObject::kMapOffset));
7800 __ CompareRoot(answer.reg(), Heap::kHeapNumberMapRootIndex); 7801 __ CompareRoot(answer.reg(), Heap::kHeapNumberMapRootIndex);
7801 answer.Unuse(); 7802 answer.Unuse();
7802 destination()->Split(equal); 7803 destination()->Split(equal);
7803 7804
7804 } else if (check->Equals(Heap::string_symbol())) { 7805 } else if (check->Equals(HEAP->string_symbol())) {
7805 Condition is_smi = masm_->CheckSmi(answer.reg()); 7806 Condition is_smi = masm_->CheckSmi(answer.reg());
7806 destination()->false_target()->Branch(is_smi); 7807 destination()->false_target()->Branch(is_smi);
7807 7808
7808 // It can be an undetectable string object. 7809 // It can be an undetectable string object.
7809 __ movq(kScratchRegister, 7810 __ movq(kScratchRegister,
7810 FieldOperand(answer.reg(), HeapObject::kMapOffset)); 7811 FieldOperand(answer.reg(), HeapObject::kMapOffset));
7811 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), 7812 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
7812 Immediate(1 << Map::kIsUndetectable)); 7813 Immediate(1 << Map::kIsUndetectable));
7813 destination()->false_target()->Branch(not_zero); 7814 destination()->false_target()->Branch(not_zero);
7814 __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE); 7815 __ CmpInstanceType(kScratchRegister, FIRST_NONSTRING_TYPE);
7815 answer.Unuse(); 7816 answer.Unuse();
7816 destination()->Split(below); // Unsigned byte comparison needed. 7817 destination()->Split(below); // Unsigned byte comparison needed.
7817 7818
7818 } else if (check->Equals(Heap::boolean_symbol())) { 7819 } else if (check->Equals(HEAP->boolean_symbol())) {
7819 __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex); 7820 __ CompareRoot(answer.reg(), Heap::kTrueValueRootIndex);
7820 destination()->true_target()->Branch(equal); 7821 destination()->true_target()->Branch(equal);
7821 __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex); 7822 __ CompareRoot(answer.reg(), Heap::kFalseValueRootIndex);
7822 answer.Unuse(); 7823 answer.Unuse();
7823 destination()->Split(equal); 7824 destination()->Split(equal);
7824 7825
7825 } else if (check->Equals(Heap::undefined_symbol())) { 7826 } else if (check->Equals(HEAP->undefined_symbol())) {
7826 __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex); 7827 __ CompareRoot(answer.reg(), Heap::kUndefinedValueRootIndex);
7827 destination()->true_target()->Branch(equal); 7828 destination()->true_target()->Branch(equal);
7828 7829
7829 Condition is_smi = masm_->CheckSmi(answer.reg()); 7830 Condition is_smi = masm_->CheckSmi(answer.reg());
7830 destination()->false_target()->Branch(is_smi); 7831 destination()->false_target()->Branch(is_smi);
7831 7832
7832 // It can be an undetectable object. 7833 // It can be an undetectable object.
7833 __ movq(kScratchRegister, 7834 __ movq(kScratchRegister,
7834 FieldOperand(answer.reg(), HeapObject::kMapOffset)); 7835 FieldOperand(answer.reg(), HeapObject::kMapOffset));
7835 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), 7836 __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset),
7836 Immediate(1 << Map::kIsUndetectable)); 7837 Immediate(1 << Map::kIsUndetectable));
7837 answer.Unuse(); 7838 answer.Unuse();
7838 destination()->Split(not_zero); 7839 destination()->Split(not_zero);
7839 7840
7840 } else if (check->Equals(Heap::function_symbol())) { 7841 } else if (check->Equals(HEAP->function_symbol())) {
7841 Condition is_smi = masm_->CheckSmi(answer.reg()); 7842 Condition is_smi = masm_->CheckSmi(answer.reg());
7842 destination()->false_target()->Branch(is_smi); 7843 destination()->false_target()->Branch(is_smi);
7843 frame_->Spill(answer.reg()); 7844 frame_->Spill(answer.reg());
7844 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg()); 7845 __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg());
7845 destination()->true_target()->Branch(equal); 7846 destination()->true_target()->Branch(equal);
7846 // Regular expressions are callable so typeof == 'function'. 7847 // Regular expressions are callable so typeof == 'function'.
7847 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE); 7848 __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE);
7848 answer.Unuse(); 7849 answer.Unuse();
7849 destination()->Split(equal); 7850 destination()->Split(equal);
7850 7851
7851 } else if (check->Equals(Heap::object_symbol())) { 7852 } else if (check->Equals(HEAP->object_symbol())) {
7852 Condition is_smi = masm_->CheckSmi(answer.reg()); 7853 Condition is_smi = masm_->CheckSmi(answer.reg());
7853 destination()->false_target()->Branch(is_smi); 7854 destination()->false_target()->Branch(is_smi);
7854 __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex); 7855 __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex);
7855 destination()->true_target()->Branch(equal); 7856 destination()->true_target()->Branch(equal);
7856 7857
7857 // Regular expressions are typeof == 'function', not 'object'. 7858 // Regular expressions are typeof == 'function', not 'object'.
7858 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, kScratchRegister); 7859 __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, kScratchRegister);
7859 destination()->false_target()->Branch(equal); 7860 destination()->false_target()->Branch(equal);
7860 7861
7861 // It can be an undetectable object. 7862 // It can be an undetectable object.
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after
8005 Register receiver_; 8006 Register receiver_;
8006 Handle<String> name_; 8007 Handle<String> name_;
8007 }; 8008 };
8008 8009
8009 8010
8010 void DeferredReferenceGetNamedValue::Generate() { 8011 void DeferredReferenceGetNamedValue::Generate() {
8011 if (!receiver_.is(rax)) { 8012 if (!receiver_.is(rax)) {
8012 __ movq(rax, receiver_); 8013 __ movq(rax, receiver_);
8013 } 8014 }
8014 __ Move(rcx, name_); 8015 __ Move(rcx, name_);
8015 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 8016 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
8017 Builtins::LoadIC_Initialize));
8016 __ Call(ic, RelocInfo::CODE_TARGET); 8018 __ Call(ic, RelocInfo::CODE_TARGET);
8017 // The call must be followed by a test rax instruction to indicate 8019 // The call must be followed by a test rax instruction to indicate
8018 // that the inobject property case was inlined. 8020 // that the inobject property case was inlined.
8019 // 8021 //
8020 // Store the delta to the map check instruction here in the test 8022 // Store the delta to the map check instruction here in the test
8021 // instruction. Use masm_-> instead of the __ macro since the 8023 // instruction. Use masm_-> instead of the __ macro since the
8022 // latter can't return a value. 8024 // latter can't return a value.
8023 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 8025 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8024 // Here we use masm_-> instead of the __ macro because this is the 8026 // Here we use masm_-> instead of the __ macro because this is the
8025 // instruction that gets patched and coverage code gets in the way. 8027 // instruction that gets patched and coverage code gets in the way.
8026 masm_->testl(rax, Immediate(-delta_to_patch_site)); 8028 masm_->testl(rax, Immediate(-delta_to_patch_site));
8027 __ IncrementCounter(&Counters::named_load_inline_miss, 1); 8029 __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1);
8028 8030
8029 if (!dst_.is(rax)) __ movq(dst_, rax); 8031 if (!dst_.is(rax)) __ movq(dst_, rax);
8030 } 8032 }
8031 8033
8032 8034
8033 class DeferredReferenceGetKeyedValue: public DeferredCode { 8035 class DeferredReferenceGetKeyedValue: public DeferredCode {
8034 public: 8036 public:
8035 explicit DeferredReferenceGetKeyedValue(Register dst, 8037 explicit DeferredReferenceGetKeyedValue(Register dst,
8036 Register receiver, 8038 Register receiver,
8037 Register key) 8039 Register key)
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
8070 } else { 8072 } else {
8071 __ movq(rax, key_); 8073 __ movq(rax, key_);
8072 __ movq(rdx, receiver_); 8074 __ movq(rdx, receiver_);
8073 } 8075 }
8074 // Calculate the delta from the IC call instruction to the map check 8076 // Calculate the delta from the IC call instruction to the map check
8075 // movq instruction in the inlined version. This delta is stored in 8077 // movq instruction in the inlined version. This delta is stored in
8076 // a test(rax, delta) instruction after the call so that we can find 8078 // a test(rax, delta) instruction after the call so that we can find
8077 // it in the IC initialization code and patch the movq instruction. 8079 // it in the IC initialization code and patch the movq instruction.
8078 // This means that we cannot allow test instructions after calls to 8080 // This means that we cannot allow test instructions after calls to
8079 // KeyedLoadIC stubs in other places. 8081 // KeyedLoadIC stubs in other places.
8080 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 8082 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
8083 Builtins::KeyedLoadIC_Initialize));
8081 __ Call(ic, RelocInfo::CODE_TARGET); 8084 __ Call(ic, RelocInfo::CODE_TARGET);
8082 // The delta from the start of the map-compare instruction to the 8085 // The delta from the start of the map-compare instruction to the
8083 // test instruction. We use masm_-> directly here instead of the __ 8086 // test instruction. We use masm_-> directly here instead of the __
8084 // macro because the macro sometimes uses macro expansion to turn 8087 // macro because the macro sometimes uses macro expansion to turn
8085 // into something that can't return a value. This is encountered 8088 // into something that can't return a value. This is encountered
8086 // when doing generated code coverage tests. 8089 // when doing generated code coverage tests.
8087 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 8090 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8088 // Here we use masm_-> instead of the __ macro because this is the 8091 // Here we use masm_-> instead of the __ macro because this is the
8089 // instruction that gets patched and coverage code gets in the way. 8092 // instruction that gets patched and coverage code gets in the way.
8090 // TODO(X64): Consider whether it's worth switching the test to a 8093 // TODO(X64): Consider whether it's worth switching the test to a
8091 // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't 8094 // 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't
8092 // be generated normally. 8095 // be generated normally.
8093 masm_->testl(rax, Immediate(-delta_to_patch_site)); 8096 masm_->testl(rax, Immediate(-delta_to_patch_site));
8094 __ IncrementCounter(&Counters::keyed_load_inline_miss, 1); 8097 __ IncrementCounter(COUNTERS->keyed_load_inline_miss(), 1);
8095 8098
8096 if (!dst_.is(rax)) __ movq(dst_, rax); 8099 if (!dst_.is(rax)) __ movq(dst_, rax);
8097 } 8100 }
8098 8101
8099 8102
8100 class DeferredReferenceSetKeyedValue: public DeferredCode { 8103 class DeferredReferenceSetKeyedValue: public DeferredCode {
8101 public: 8104 public:
8102 DeferredReferenceSetKeyedValue(Register value, 8105 DeferredReferenceSetKeyedValue(Register value,
8103 Register key, 8106 Register key,
8104 Register receiver, 8107 Register receiver,
(...skipping 12 matching lines...) Expand all
8117 private: 8120 private:
8118 Register value_; 8121 Register value_;
8119 Register key_; 8122 Register key_;
8120 Register receiver_; 8123 Register receiver_;
8121 Label patch_site_; 8124 Label patch_site_;
8122 StrictModeFlag strict_mode_; 8125 StrictModeFlag strict_mode_;
8123 }; 8126 };
8124 8127
8125 8128
8126 void DeferredReferenceSetKeyedValue::Generate() { 8129 void DeferredReferenceSetKeyedValue::Generate() {
8127 __ IncrementCounter(&Counters::keyed_store_inline_miss, 1); 8130 __ IncrementCounter(COUNTERS->keyed_store_inline_miss(), 1);
8128 // Move value, receiver, and key to registers rax, rdx, and rcx, as 8131 // Move value, receiver, and key to registers rax, rdx, and rcx, as
8129 // the IC stub expects. 8132 // the IC stub expects.
8130 // Move value to rax, using xchg if the receiver or key is in rax. 8133 // Move value to rax, using xchg if the receiver or key is in rax.
8131 if (!value_.is(rax)) { 8134 if (!value_.is(rax)) {
8132 if (!receiver_.is(rax) && !key_.is(rax)) { 8135 if (!receiver_.is(rax) && !key_.is(rax)) {
8133 __ movq(rax, value_); 8136 __ movq(rax, value_);
8134 } else { 8137 } else {
8135 __ xchg(rax, value_); 8138 __ xchg(rax, value_);
8136 // Update receiver_ and key_ if they are affected by the swap. 8139 // Update receiver_ and key_ if they are affected by the swap.
8137 if (receiver_.is(rax)) { 8140 if (receiver_.is(rax)) {
(...skipping 26 matching lines...) Expand all
8164 __ movq(rcx, key_); 8167 __ movq(rcx, key_);
8165 } 8168 }
8166 } else if (key_.is(rcx)) { 8169 } else if (key_.is(rcx)) {
8167 __ movq(rdx, receiver_); 8170 __ movq(rdx, receiver_);
8168 } else { 8171 } else {
8169 __ movq(rcx, key_); 8172 __ movq(rcx, key_);
8170 __ movq(rdx, receiver_); 8173 __ movq(rdx, receiver_);
8171 } 8174 }
8172 8175
8173 // Call the IC stub. 8176 // Call the IC stub.
8174 Handle<Code> ic(Builtins::builtin( 8177 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
8175 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict 8178 (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
8176 : Builtins::KeyedStoreIC_Initialize)); 8179 : Builtins::KeyedStoreIC_Initialize));
8177 __ Call(ic, RelocInfo::CODE_TARGET); 8180 __ Call(ic, RelocInfo::CODE_TARGET);
8178 // The delta from the start of the map-compare instructions (initial movq) 8181 // The delta from the start of the map-compare instructions (initial movq)
8179 // to the test instruction. We use masm_-> directly here instead of the 8182 // to the test instruction. We use masm_-> directly here instead of the
8180 // __ macro because the macro sometimes uses macro expansion to turn 8183 // __ macro because the macro sometimes uses macro expansion to turn
8181 // into something that can't return a value. This is encountered 8184 // into something that can't return a value. This is encountered
8182 // when doing generated code coverage tests. 8185 // when doing generated code coverage tests.
8183 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site()); 8186 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
8184 // Here we use masm_-> instead of the __ macro because this is the 8187 // Here we use masm_-> instead of the __ macro because this is the
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
8233 DeferredReferenceGetNamedValue* deferred = 8236 DeferredReferenceGetNamedValue* deferred =
8234 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name); 8237 new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name);
8235 8238
8236 // Check that the receiver is a heap object. 8239 // Check that the receiver is a heap object.
8237 __ JumpIfSmi(receiver.reg(), deferred->entry_label()); 8240 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
8238 8241
8239 __ bind(deferred->patch_site()); 8242 __ bind(deferred->patch_site());
8240 // This is the map check instruction that will be patched (so we can't 8243 // This is the map check instruction that will be patched (so we can't
8241 // use the double underscore macro that may insert instructions). 8244 // use the double underscore macro that may insert instructions).
8242 // Initially use an invalid map to force a failure. 8245 // Initially use an invalid map to force a failure.
8243 masm()->movq(kScratchRegister, Factory::null_value(), 8246 masm()->movq(kScratchRegister, FACTORY->null_value(),
8244 RelocInfo::EMBEDDED_OBJECT); 8247 RelocInfo::EMBEDDED_OBJECT);
8245 masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 8248 masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8246 kScratchRegister); 8249 kScratchRegister);
8247 // This branch is always a forwards branch so it's always a fixed 8250 // This branch is always a forwards branch so it's always a fixed
8248 // size which allows the assert below to succeed and patching to work. 8251 // size which allows the assert below to succeed and patching to work.
8249 // Don't use deferred->Branch(...), since that might add coverage code. 8252 // Don't use deferred->Branch(...), since that might add coverage code.
8250 masm()->j(not_equal, deferred->entry_label()); 8253 masm()->j(not_equal, deferred->entry_label());
8251 8254
8252 // The delta from the patch label to the load offset must be 8255 // The delta from the patch label to the load offset must be
8253 // statically known. 8256 // statically known.
8254 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) == 8257 ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
8255 LoadIC::kOffsetToLoadInstruction); 8258 LoadIC::kOffsetToLoadInstruction);
8256 // The initial (invalid) offset has to be large enough to force 8259 // The initial (invalid) offset has to be large enough to force
8257 // a 32-bit instruction encoding to allow patching with an 8260 // a 32-bit instruction encoding to allow patching with an
8258 // arbitrary offset. Use kMaxInt (minus kHeapObjectTag). 8261 // arbitrary offset. Use kMaxInt (minus kHeapObjectTag).
8259 int offset = kMaxInt; 8262 int offset = kMaxInt;
8260 masm()->movq(result.reg(), FieldOperand(receiver.reg(), offset)); 8263 masm()->movq(result.reg(), FieldOperand(receiver.reg(), offset));
8261 8264
8262 __ IncrementCounter(&Counters::named_load_inline, 1); 8265 __ IncrementCounter(COUNTERS->named_load_inline(), 1);
8263 deferred->BindExit(); 8266 deferred->BindExit();
8264 } 8267 }
8265 ASSERT(frame()->height() == original_height - 1); 8268 ASSERT(frame()->height() == original_height - 1);
8266 return result; 8269 return result;
8267 } 8270 }
8268 8271
8269 8272
8270 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) { 8273 Result CodeGenerator::EmitNamedStore(Handle<String> name, bool is_contextual) {
8271 #ifdef DEBUG 8274 #ifdef DEBUG
8272 int expected_height = frame()->height() - (is_contextual ? 1 : 2); 8275 int expected_height = frame()->height() - (is_contextual ? 1 : 2);
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
8310 Condition is_smi = masm()->CheckSmi(receiver.reg()); 8313 Condition is_smi = masm()->CheckSmi(receiver.reg());
8311 slow.Branch(is_smi, &value, &receiver); 8314 slow.Branch(is_smi, &value, &receiver);
8312 8315
8313 // This is the map check instruction that will be patched. 8316 // This is the map check instruction that will be patched.
8314 // Initially use an invalid map to force a failure. The exact 8317 // Initially use an invalid map to force a failure. The exact
8315 // instruction sequence is important because we use the 8318 // instruction sequence is important because we use the
8316 // kOffsetToStoreInstruction constant for patching. We avoid using 8319 // kOffsetToStoreInstruction constant for patching. We avoid using
8317 // the __ macro for the following two instructions because it 8320 // the __ macro for the following two instructions because it
8318 // might introduce extra instructions. 8321 // might introduce extra instructions.
8319 __ bind(&patch_site); 8322 __ bind(&patch_site);
8320 masm()->movq(kScratchRegister, Factory::null_value(), 8323 masm()->movq(kScratchRegister, FACTORY->null_value(),
8321 RelocInfo::EMBEDDED_OBJECT); 8324 RelocInfo::EMBEDDED_OBJECT);
8322 masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 8325 masm()->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8323 kScratchRegister); 8326 kScratchRegister);
8324 // This branch is always a forwards branch so it's always a fixed size 8327 // This branch is always a forwards branch so it's always a fixed size
8325 // which allows the assert below to succeed and patching to work. 8328 // which allows the assert below to succeed and patching to work.
8326 slow.Branch(not_equal, &value, &receiver); 8329 slow.Branch(not_equal, &value, &receiver);
8327 8330
8328 // The delta from the patch label to the store offset must be 8331 // The delta from the patch label to the store offset must be
8329 // statically known. 8332 // statically known.
8330 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) == 8333 ASSERT(masm()->SizeOfCodeGeneratedSince(&patch_site) ==
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
8427 __ JumpIfSmi(receiver.reg(), deferred->entry_label()); 8430 __ JumpIfSmi(receiver.reg(), deferred->entry_label());
8428 8431
8429 // Check that the receiver has the expected map. 8432 // Check that the receiver has the expected map.
8430 // Initially, use an invalid map. The map is patched in the IC 8433 // Initially, use an invalid map. The map is patched in the IC
8431 // initialization code. 8434 // initialization code.
8432 __ bind(deferred->patch_site()); 8435 __ bind(deferred->patch_site());
8433 // Use masm-> here instead of the double underscore macro since extra 8436 // Use masm-> here instead of the double underscore macro since extra
8434 // coverage code can interfere with the patching. Do not use a load 8437 // coverage code can interfere with the patching. Do not use a load
8435 // from the root array to load null_value, since the load must be patched 8438 // from the root array to load null_value, since the load must be patched
8436 // with the expected receiver map, which is not in the root array. 8439 // with the expected receiver map, which is not in the root array.
8437 masm_->movq(kScratchRegister, Factory::null_value(), 8440 masm_->movq(kScratchRegister, FACTORY->null_value(),
8438 RelocInfo::EMBEDDED_OBJECT); 8441 RelocInfo::EMBEDDED_OBJECT);
8439 masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset), 8442 masm_->cmpq(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
8440 kScratchRegister); 8443 kScratchRegister);
8441 deferred->Branch(not_equal); 8444 deferred->Branch(not_equal);
8442 8445
8443 __ JumpUnlessNonNegativeSmi(key.reg(), deferred->entry_label()); 8446 __ JumpUnlessNonNegativeSmi(key.reg(), deferred->entry_label());
8444 8447
8445 // Get the elements array from the receiver. 8448 // Get the elements array from the receiver.
8446 __ movq(elements.reg(), 8449 __ movq(elements.reg(),
8447 FieldOperand(receiver.reg(), JSObject::kElementsOffset)); 8450 FieldOperand(receiver.reg(), JSObject::kElementsOffset));
(...skipping 14 matching lines...) Expand all
8462 SmiIndex index = 8465 SmiIndex index =
8463 masm_->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); 8466 masm_->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
8464 __ movq(elements.reg(), 8467 __ movq(elements.reg(),
8465 FieldOperand(elements.reg(), 8468 FieldOperand(elements.reg(),
8466 index.reg, 8469 index.reg,
8467 index.scale, 8470 index.scale,
8468 FixedArray::kHeaderSize)); 8471 FixedArray::kHeaderSize));
8469 result = elements; 8472 result = elements;
8470 __ CompareRoot(result.reg(), Heap::kTheHoleValueRootIndex); 8473 __ CompareRoot(result.reg(), Heap::kTheHoleValueRootIndex);
8471 deferred->Branch(equal); 8474 deferred->Branch(equal);
8472 __ IncrementCounter(&Counters::keyed_load_inline, 1); 8475 __ IncrementCounter(COUNTERS->keyed_load_inline(), 1);
8473 8476
8474 deferred->BindExit(); 8477 deferred->BindExit();
8475 } else { 8478 } else {
8476 Comment cmnt(masm_, "[ Load from keyed Property"); 8479 Comment cmnt(masm_, "[ Load from keyed Property");
8477 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET); 8480 result = frame_->CallKeyedLoadIC(RelocInfo::CODE_TARGET);
8478 // Make sure that we do not have a test instruction after the 8481 // Make sure that we do not have a test instruction after the
8479 // call. A test instruction after the call is used to 8482 // call. A test instruction after the call is used to
8480 // indicate that we have generated an inline version of the 8483 // indicate that we have generated an inline version of the
8481 // keyed load. The explicit nop instruction is here because 8484 // keyed load. The explicit nop instruction is here because
8482 // the push that follows might be peep-hole optimized away. 8485 // the push that follows might be peep-hole optimized away.
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
8552 8555
8553 __ bind(&in_new_space); 8556 __ bind(&in_new_space);
8554 8557
8555 // Bind the deferred code patch site to be able to locate the fixed 8558 // Bind the deferred code patch site to be able to locate the fixed
8556 // array map comparison. When debugging, we patch this comparison to 8559 // array map comparison. When debugging, we patch this comparison to
8557 // always fail so that we will hit the IC call in the deferred code 8560 // always fail so that we will hit the IC call in the deferred code
8558 // which will allow the debugger to break for fast case stores. 8561 // which will allow the debugger to break for fast case stores.
8559 __ bind(deferred->patch_site()); 8562 __ bind(deferred->patch_site());
8560 // Avoid using __ to ensure the distance from patch_site 8563 // Avoid using __ to ensure the distance from patch_site
8561 // to the map address is always the same. 8564 // to the map address is always the same.
8562 masm()->movq(kScratchRegister, Factory::fixed_array_map(), 8565 masm()->movq(kScratchRegister, FACTORY->fixed_array_map(),
8563 RelocInfo::EMBEDDED_OBJECT); 8566 RelocInfo::EMBEDDED_OBJECT);
8564 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset), 8567 __ cmpq(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
8565 kScratchRegister); 8568 kScratchRegister);
8566 deferred->Branch(not_equal); 8569 deferred->Branch(not_equal);
8567 8570
8568 // Check that the key is within bounds. Both the key and the length of 8571 // Check that the key is within bounds. Both the key and the length of
8569 // the JSArray are smis (because the fixed array check above ensures the 8572 // the JSArray are smis (because the fixed array check above ensures the
8570 // elements are in fast case). Use unsigned comparison to handle negative 8573 // elements are in fast case). Use unsigned comparison to handle negative
8571 // keys. 8574 // keys.
8572 __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset), 8575 __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
8573 key.reg()); 8576 key.reg());
8574 deferred->Branch(below_equal); 8577 deferred->Branch(below_equal);
8575 8578
8576 // Store the value. 8579 // Store the value.
8577 SmiIndex index = 8580 SmiIndex index =
8578 masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2); 8581 masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
8579 __ movq(FieldOperand(tmp.reg(), 8582 __ movq(FieldOperand(tmp.reg(),
8580 index.reg, 8583 index.reg,
8581 index.scale, 8584 index.scale,
8582 FixedArray::kHeaderSize), 8585 FixedArray::kHeaderSize),
8583 result.reg()); 8586 result.reg());
8584 __ IncrementCounter(&Counters::keyed_store_inline, 1); 8587 __ IncrementCounter(COUNTERS->keyed_store_inline(), 1);
8585 8588
8586 deferred->BindExit(); 8589 deferred->BindExit();
8587 } else { 8590 } else {
8588 result = frame()->CallKeyedStoreIC(strict_mode_flag()); 8591 result = frame()->CallKeyedStoreIC(strict_mode_flag());
8589 // Make sure that we do not have a test instruction after the 8592 // Make sure that we do not have a test instruction after the
8590 // call. A test instruction after the call is used to 8593 // call. A test instruction after the call is used to
8591 // indicate that we have generated an inline version of the 8594 // indicate that we have generated an inline version of the
8592 // keyed store. 8595 // keyed store.
8593 __ nop(); 8596 __ nop();
8594 } 8597 }
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after
8857 } 8860 }
8858 8861
8859 #endif 8862 #endif
8860 8863
8861 8864
8862 #undef __ 8865 #undef __
8863 8866
8864 } } // namespace v8::internal 8867 } } // namespace v8::internal
8865 8868
8866 #endif // V8_TARGET_ARCH_X64 8869 #endif // V8_TARGET_ARCH_X64
OLDNEW
« src/store-buffer.cc ('K') | « src/x64/codegen-x64.h ('k') | src/x64/cpu-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698