OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
141 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { | 141 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { |
142 __ Debug("stop-at", __LINE__, BREAK); | 142 __ Debug("stop-at", __LINE__, BREAK); |
143 } | 143 } |
144 #endif | 144 #endif |
145 | 145 |
146 // Classic mode functions and builtins need to replace the receiver with the | 146 // Classic mode functions and builtins need to replace the receiver with the |
147 // global proxy when called as functions (without an explicit receiver | 147 // global proxy when called as functions (without an explicit receiver |
148 // object). | 148 // object). |
149 if (info->is_classic_mode() && !info->is_native()) { | 149 if (info->is_classic_mode() && !info->is_native()) { |
150 Label ok; | 150 Label ok; |
151 int receiver_offset = info->scope()->num_parameters() * kXRegSizeInBytes; | 151 int receiver_offset = info->scope()->num_parameters() * kXRegSize; |
152 __ Peek(x10, receiver_offset); | 152 __ Peek(x10, receiver_offset); |
153 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok); | 153 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok); |
154 | 154 |
155 __ Ldr(x10, GlobalObjectMemOperand()); | 155 __ Ldr(x10, GlobalObjectMemOperand()); |
156 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset)); | 156 __ Ldr(x10, FieldMemOperand(x10, GlobalObject::kGlobalReceiverOffset)); |
157 __ Poke(x10, receiver_offset); | 157 __ Poke(x10, receiver_offset); |
158 | 158 |
159 __ Bind(&ok); | 159 __ Bind(&ok); |
160 } | 160 } |
161 | 161 |
(...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
418 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | 418 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
419 __ RecordJSReturn(); | 419 __ RecordJSReturn(); |
420 // This code is generated using Assembler methods rather than Macro | 420 // This code is generated using Assembler methods rather than Macro |
421 // Assembler methods because it will be patched later on, and so the size | 421 // Assembler methods because it will be patched later on, and so the size |
422 // of the generated code must be consistent. | 422 // of the generated code must be consistent. |
423 const Register& current_sp = __ StackPointer(); | 423 const Register& current_sp = __ StackPointer(); |
424 // Nothing ensures 16 bytes alignment here. | 424 // Nothing ensures 16 bytes alignment here. |
425 ASSERT(!current_sp.Is(csp)); | 425 ASSERT(!current_sp.Is(csp)); |
426 __ mov(current_sp, fp); | 426 __ mov(current_sp, fp); |
427 int no_frame_start = masm_->pc_offset(); | 427 int no_frame_start = masm_->pc_offset(); |
428 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSizeInBytes, PostIndex)); | 428 __ ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex)); |
429 // Drop the arguments and receiver and return. | 429 // Drop the arguments and receiver and return. |
430 // TODO(all): This implementation is overkill as it supports 2**31+1 | 430 // TODO(all): This implementation is overkill as it supports 2**31+1 |
431 // arguments, consider how to improve it without creating a security | 431 // arguments, consider how to improve it without creating a security |
432 // hole. | 432 // hole. |
433 __ LoadLiteral(ip0, 3 * kInstructionSize); | 433 __ LoadLiteral(ip0, 3 * kInstructionSize); |
434 __ add(current_sp, current_sp, ip0); | 434 __ add(current_sp, current_sp, ip0); |
435 __ ret(); | 435 __ ret(); |
436 __ dc64(kXRegSizeInBytes * (info_->scope()->num_parameters() + 1)); | 436 __ dc64(kXRegSize * (info_->scope()->num_parameters() + 1)); |
437 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); | 437 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); |
438 } | 438 } |
439 } | 439 } |
440 } | 440 } |
441 | 441 |
442 | 442 |
443 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { | 443 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { |
444 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 444 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
445 } | 445 } |
446 | 446 |
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
685 __ B(InvertCondition(cond), if_false); | 685 __ B(InvertCondition(cond), if_false); |
686 } else { | 686 } else { |
687 __ B(cond, if_true); | 687 __ B(cond, if_true); |
688 __ B(if_false); | 688 __ B(if_false); |
689 } | 689 } |
690 } | 690 } |
691 | 691 |
692 | 692 |
693 MemOperand FullCodeGenerator::StackOperand(Variable* var) { | 693 MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
694 // Offset is negative because higher indexes are at lower addresses. | 694 // Offset is negative because higher indexes are at lower addresses. |
695 int offset = -var->index() * kXRegSizeInBytes; | 695 int offset = -var->index() * kXRegSize; |
696 // Adjust by a (parameter or local) base offset. | 696 // Adjust by a (parameter or local) base offset. |
697 if (var->IsParameter()) { | 697 if (var->IsParameter()) { |
698 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; | 698 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; |
699 } else { | 699 } else { |
700 offset += JavaScriptFrameConstants::kLocal0Offset; | 700 offset += JavaScriptFrameConstants::kLocal0Offset; |
701 } | 701 } |
702 return MemOperand(fp, offset); | 702 return MemOperand(fp, offset); |
703 } | 703 } |
704 | 704 |
705 | 705 |
(...skipping 471 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1177 | 1177 |
1178 // Generate code for doing the condition check. | 1178 // Generate code for doing the condition check. |
1179 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | 1179 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); |
1180 __ Bind(&loop); | 1180 __ Bind(&loop); |
1181 // Load the current count to x0, load the length to x1. | 1181 // Load the current count to x0, load the length to x1. |
1182 __ PeekPair(x0, x1, 0); | 1182 __ PeekPair(x0, x1, 0); |
1183 __ Cmp(x0, x1); // Compare to the array length. | 1183 __ Cmp(x0, x1); // Compare to the array length. |
1184 __ B(hs, loop_statement.break_label()); | 1184 __ B(hs, loop_statement.break_label()); |
1185 | 1185 |
1186 // Get the current entry of the array into register r3. | 1186 // Get the current entry of the array into register r3. |
1187 __ Peek(x10, 2 * kXRegSizeInBytes); | 1187 __ Peek(x10, 2 * kXRegSize); |
1188 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2)); | 1188 __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2)); |
1189 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag)); | 1189 __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag)); |
1190 | 1190 |
1191 // Get the expected map from the stack or a smi in the | 1191 // Get the expected map from the stack or a smi in the |
1192 // permanent slow case into register x10. | 1192 // permanent slow case into register x10. |
1193 __ Peek(x2, 3 * kXRegSizeInBytes); | 1193 __ Peek(x2, 3 * kXRegSize); |
1194 | 1194 |
1195 // Check if the expected map still matches that of the enumerable. | 1195 // Check if the expected map still matches that of the enumerable. |
1196 // If not, we may have to filter the key. | 1196 // If not, we may have to filter the key. |
1197 Label update_each; | 1197 Label update_each; |
1198 __ Peek(x1, 4 * kXRegSizeInBytes); | 1198 __ Peek(x1, 4 * kXRegSize); |
1199 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset)); | 1199 __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset)); |
1200 __ Cmp(x11, x2); | 1200 __ Cmp(x11, x2); |
1201 __ B(eq, &update_each); | 1201 __ B(eq, &update_each); |
1202 | 1202 |
1203 // For proxies, no filtering is done. | 1203 // For proxies, no filtering is done. |
1204 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. | 1204 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. |
1205 STATIC_ASSERT(kSmiTag == 0); | 1205 STATIC_ASSERT(kSmiTag == 0); |
1206 __ Cbz(x2, &update_each); | 1206 __ Cbz(x2, &update_each); |
1207 | 1207 |
1208 // Convert the entry to a string or (smi) 0 if it isn't a property | 1208 // Convert the entry to a string or (smi) 0 if it isn't a property |
(...skipping 838 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2047 Label not_minus_zero, done; | 2047 Label not_minus_zero, done; |
2048 __ Smulh(x10, left, right); | 2048 __ Smulh(x10, left, right); |
2049 __ Cbnz(x10, ¬_minus_zero); | 2049 __ Cbnz(x10, ¬_minus_zero); |
2050 __ Eor(x11, left, right); | 2050 __ Eor(x11, left, right); |
2051 __ Tbnz(x11, kXSignBit, &stub_call); | 2051 __ Tbnz(x11, kXSignBit, &stub_call); |
2052 STATIC_ASSERT(kSmiTag == 0); | 2052 STATIC_ASSERT(kSmiTag == 0); |
2053 __ Mov(result, x10); | 2053 __ Mov(result, x10); |
2054 __ B(&done); | 2054 __ B(&done); |
2055 __ Bind(¬_minus_zero); | 2055 __ Bind(¬_minus_zero); |
2056 __ Cls(x11, x10); | 2056 __ Cls(x11, x10); |
2057 __ Cmp(x11, kXRegSize - kSmiShift); | 2057 __ Cmp(x11, kXRegSizeInBits - kSmiShift); |
2058 __ B(lt, &stub_call); | 2058 __ B(lt, &stub_call); |
2059 __ SmiTag(result, x10); | 2059 __ SmiTag(result, x10); |
2060 __ Bind(&done); | 2060 __ Bind(&done); |
2061 break; | 2061 break; |
2062 } | 2062 } |
2063 case Token::BIT_OR: | 2063 case Token::BIT_OR: |
2064 __ Orr(result, left, right); | 2064 __ Orr(result, left, right); |
2065 break; | 2065 break; |
2066 case Token::BIT_AND: | 2066 case Token::BIT_AND: |
2067 __ And(result, left, right); | 2067 __ And(result, left, right); |
(...skipping 339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2407 } | 2407 } |
2408 } | 2408 } |
2409 // Record source position for debugger. | 2409 // Record source position for debugger. |
2410 SetSourcePosition(expr->position()); | 2410 SetSourcePosition(expr->position()); |
2411 | 2411 |
2412 __ LoadObject(x2, FeedbackVector()); | 2412 __ LoadObject(x2, FeedbackVector()); |
2413 __ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); | 2413 __ Mov(x3, Operand(Smi::FromInt(expr->CallFeedbackSlot()))); |
2414 | 2414 |
2415 // Record call targets in unoptimized code. | 2415 // Record call targets in unoptimized code. |
2416 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); | 2416 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET); |
2417 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes); | 2417 __ Peek(x1, (arg_count + 1) * kXRegSize); |
2418 __ CallStub(&stub); | 2418 __ CallStub(&stub); |
2419 RecordJSReturnSite(expr); | 2419 RecordJSReturnSite(expr); |
2420 // Restore context register. | 2420 // Restore context register. |
2421 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2421 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2422 context()->DropAndPlug(1, x0); | 2422 context()->DropAndPlug(1, x0); |
2423 } | 2423 } |
2424 | 2424 |
2425 | 2425 |
2426 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | 2426 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
2427 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval"); | 2427 ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval"); |
2428 // Prepare to push a copy of the first argument or undefined if it doesn't | 2428 // Prepare to push a copy of the first argument or undefined if it doesn't |
2429 // exist. | 2429 // exist. |
2430 if (arg_count > 0) { | 2430 if (arg_count > 0) { |
2431 __ Peek(x10, arg_count * kXRegSizeInBytes); | 2431 __ Peek(x10, arg_count * kXRegSize); |
2432 } else { | 2432 } else { |
2433 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); | 2433 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); |
2434 } | 2434 } |
2435 | 2435 |
2436 // Prepare to push the receiver of the enclosing function. | 2436 // Prepare to push the receiver of the enclosing function. |
2437 int receiver_offset = 2 + info_->scope()->num_parameters(); | 2437 int receiver_offset = 2 + info_->scope()->num_parameters(); |
2438 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize)); | 2438 __ Ldr(x11, MemOperand(fp, receiver_offset * kPointerSize)); |
2439 | 2439 |
2440 // Push. | 2440 // Push. |
2441 __ Push(x10, x11); | 2441 __ Push(x10, x11); |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2492 // The runtime call returns a pair of values in x0 (function) and | 2492 // The runtime call returns a pair of values in x0 (function) and |
2493 // x1 (receiver). Touch up the stack with the right values. | 2493 // x1 (receiver). Touch up the stack with the right values. |
2494 __ PokePair(x1, x0, arg_count * kPointerSize); | 2494 __ PokePair(x1, x0, arg_count * kPointerSize); |
2495 } | 2495 } |
2496 | 2496 |
2497 // Record source position for debugger. | 2497 // Record source position for debugger. |
2498 SetSourcePosition(expr->position()); | 2498 SetSourcePosition(expr->position()); |
2499 | 2499 |
2500 // Call the evaluated function. | 2500 // Call the evaluated function. |
2501 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); | 2501 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS); |
2502 __ Peek(x1, (arg_count + 1) * kXRegSizeInBytes); | 2502 __ Peek(x1, (arg_count + 1) * kXRegSize); |
2503 __ CallStub(&stub); | 2503 __ CallStub(&stub); |
2504 RecordJSReturnSite(expr); | 2504 RecordJSReturnSite(expr); |
2505 // Restore context register. | 2505 // Restore context register. |
2506 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2506 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
2507 context()->DropAndPlug(1, x0); | 2507 context()->DropAndPlug(1, x0); |
2508 | 2508 |
2509 } else if (call_type == Call::GLOBAL_CALL) { | 2509 } else if (call_type == Call::GLOBAL_CALL) { |
2510 EmitCallWithIC(expr); | 2510 EmitCallWithIC(expr); |
2511 | 2511 |
2512 } else if (call_type == Call::LOOKUP_SLOT_CALL) { | 2512 } else if (call_type == Call::LOOKUP_SLOT_CALL) { |
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2595 for (int i = 0; i < arg_count; i++) { | 2595 for (int i = 0; i < arg_count; i++) { |
2596 VisitForStackValue(args->at(i)); | 2596 VisitForStackValue(args->at(i)); |
2597 } | 2597 } |
2598 | 2598 |
2599 // Call the construct call builtin that handles allocation and | 2599 // Call the construct call builtin that handles allocation and |
2600 // constructor invocation. | 2600 // constructor invocation. |
2601 SetSourcePosition(expr->position()); | 2601 SetSourcePosition(expr->position()); |
2602 | 2602 |
2603 // Load function and argument count into x1 and x0. | 2603 // Load function and argument count into x1 and x0. |
2604 __ Mov(x0, arg_count); | 2604 __ Mov(x0, arg_count); |
2605 __ Peek(x1, arg_count * kXRegSizeInBytes); | 2605 __ Peek(x1, arg_count * kXRegSize); |
2606 | 2606 |
2607 // Record call targets in unoptimized code. | 2607 // Record call targets in unoptimized code. |
2608 __ LoadObject(x2, FeedbackVector()); | 2608 __ LoadObject(x2, FeedbackVector()); |
2609 __ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); | 2609 __ Mov(x3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot()))); |
2610 | 2610 |
2611 CallConstructStub stub(RECORD_CALL_TARGET); | 2611 CallConstructStub stub(RECORD_CALL_TARGET); |
2612 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); | 2612 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); |
2613 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | 2613 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); |
2614 context()->Plug(x0); | 2614 context()->Plug(x0); |
2615 } | 2615 } |
(...skipping 1463 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4079 if (expr->is_postfix()) { | 4079 if (expr->is_postfix()) { |
4080 if (!context()->IsEffect()) { | 4080 if (!context()->IsEffect()) { |
4081 // Save the result on the stack. If we have a named or keyed property | 4081 // Save the result on the stack. If we have a named or keyed property |
4082 // we store the result under the receiver that is currently on top | 4082 // we store the result under the receiver that is currently on top |
4083 // of the stack. | 4083 // of the stack. |
4084 switch (assign_type) { | 4084 switch (assign_type) { |
4085 case VARIABLE: | 4085 case VARIABLE: |
4086 __ Push(x0); | 4086 __ Push(x0); |
4087 break; | 4087 break; |
4088 case NAMED_PROPERTY: | 4088 case NAMED_PROPERTY: |
4089 __ Poke(x0, kXRegSizeInBytes); | 4089 __ Poke(x0, kXRegSize); |
4090 break; | 4090 break; |
4091 case KEYED_PROPERTY: | 4091 case KEYED_PROPERTY: |
4092 __ Poke(x0, 2 * kXRegSizeInBytes); | 4092 __ Poke(x0, 2 * kXRegSize); |
4093 break; | 4093 break; |
4094 } | 4094 } |
4095 } | 4095 } |
4096 } | 4096 } |
4097 | 4097 |
4098 __ Bind(&stub_call); | 4098 __ Bind(&stub_call); |
4099 __ Mov(x1, x0); | 4099 __ Mov(x1, x0); |
4100 __ Mov(x0, Operand(Smi::FromInt(count_value))); | 4100 __ Mov(x0, Operand(Smi::FromInt(count_value))); |
4101 | 4101 |
4102 // Record position before stub call. | 4102 // Record position before stub call. |
(...skipping 867 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4970 return previous_; | 4970 return previous_; |
4971 } | 4971 } |
4972 | 4972 |
4973 | 4973 |
4974 #undef __ | 4974 #undef __ |
4975 | 4975 |
4976 | 4976 |
4977 } } // namespace v8::internal | 4977 } } // namespace v8::internal |
4978 | 4978 |
4979 #endif // V8_TARGET_ARCH_A64 | 4979 #endif // V8_TARGET_ARCH_A64 |
OLD | NEW |