| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_PPC | 5 #if V8_TARGET_ARCH_S390 |
| 6 | 6 |
| 7 #include "src/ast/scopes.h" | 7 #include "src/ast/scopes.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| 11 #include "src/debug/debug.h" | 11 #include "src/debug/debug.h" |
| 12 #include "src/full-codegen/full-codegen.h" | 12 #include "src/full-codegen/full-codegen.h" |
| 13 #include "src/ic/ic.h" | 13 #include "src/ic/ic.h" |
| 14 #include "src/parsing/parser.h" | 14 #include "src/parsing/parser.h" |
| 15 | 15 |
| 16 #include "src/ppc/code-stubs-ppc.h" | 16 #include "src/s390/code-stubs-s390.h" |
| 17 #include "src/ppc/macro-assembler-ppc.h" | 17 #include "src/s390/macro-assembler-s390.h" |
| 18 | 18 |
| 19 namespace v8 { | 19 namespace v8 { |
| 20 namespace internal { | 20 namespace internal { |
| 21 | 21 |
| 22 #define __ ACCESS_MASM(masm()) | 22 #define __ ACCESS_MASM(masm()) |
| 23 | 23 |
| 24 // A patch site is a location in the code which it is possible to patch. This | 24 // A patch site is a location in the code which it is possible to patch. This |
| 25 // class has a number of methods to emit the code which is patchable and the | 25 // class has a number of methods to emit the code which is patchable and the |
| 26 // method EmitPatchInfo to record a marker back to the patchable code. This | 26 // method EmitPatchInfo to record a marker back to the patchable code. This |
| 27 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit | 27 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit |
| 28 // immediate value is used) is the delta from the pc to the first instruction of | 28 // immediate value is used) is the delta from the pc to the first instruction of |
| 29 // the patchable code. | 29 // the patchable code. |
| 30 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it | 30 // See PatchInlinedSmiCode in ic-s390.cc for the code that patches it |
| 31 class JumpPatchSite BASE_EMBEDDED { | 31 class JumpPatchSite BASE_EMBEDDED { |
| 32 public: | 32 public: |
| 33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { | 33 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
| 34 #ifdef DEBUG | 34 #ifdef DEBUG |
| 35 info_emitted_ = false; | 35 info_emitted_ = false; |
| 36 #endif | 36 #endif |
| 37 } | 37 } |
| 38 | 38 |
| 39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); } | 39 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); } |
| 40 | 40 |
| 41 // When initially emitting this ensure that a jump is always generated to skip | 41 // When initially emitting this ensure that a jump is always generated to skip |
| 42 // the inlined smi code. | 42 // the inlined smi code. |
| 43 void EmitJumpIfNotSmi(Register reg, Label* target) { | 43 void EmitJumpIfNotSmi(Register reg, Label* target) { |
| 44 DCHECK(!patch_site_.is_bound() && !info_emitted_); | 44 DCHECK(!patch_site_.is_bound() && !info_emitted_); |
| 45 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
| 46 __ bind(&patch_site_); | 45 __ bind(&patch_site_); |
| 47 __ cmp(reg, reg, cr0); | 46 __ CmpP(reg, reg); |
| 48 __ beq(target, cr0); // Always taken before patched. | 47 // Emit the Nop to make bigger place for patching on 31-bit |
| 48 // as the TestIfSmi sequence uses 4-byte TMLL |
| 49 #ifndef V8_TARGET_ARCH_S390X |
| 50 __ nop(); |
| 51 #endif |
| 52 __ beq(target); // Always taken before patched. |
| 49 } | 53 } |
| 50 | 54 |
| 51 // When initially emitting this ensure that a jump is never generated to skip | 55 // When initially emitting this ensure that a jump is never generated to skip |
| 52 // the inlined smi code. | 56 // the inlined smi code. |
| 53 void EmitJumpIfSmi(Register reg, Label* target) { | 57 void EmitJumpIfSmi(Register reg, Label* target) { |
| 54 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
| 55 DCHECK(!patch_site_.is_bound() && !info_emitted_); | 58 DCHECK(!patch_site_.is_bound() && !info_emitted_); |
| 56 __ bind(&patch_site_); | 59 __ bind(&patch_site_); |
| 57 __ cmp(reg, reg, cr0); | 60 __ CmpP(reg, reg); |
| 58 __ bne(target, cr0); // Never taken before patched. | 61 // Emit the Nop to make bigger place for patching on 31-bit |
| 62 // as the TestIfSmi sequence uses 4-byte TMLL |
| 63 #ifndef V8_TARGET_ARCH_S390X |
| 64 __ nop(); |
| 65 #endif |
| 66 __ bne(target); // Never taken before patched. |
| 59 } | 67 } |
| 60 | 68 |
| 61 void EmitPatchInfo() { | 69 void EmitPatchInfo() { |
| 62 if (patch_site_.is_bound()) { | 70 if (patch_site_.is_bound()) { |
| 63 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | 71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
| 64 Register reg; | 72 DCHECK(is_int16(delta_to_patch_site)); |
| 65 // I believe this is using reg as the high bits of of the offset | 73 __ chi(r0, Operand(delta_to_patch_site)); |
| 66 reg.set_code(delta_to_patch_site / kOff16Mask); | |
| 67 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask)); | |
| 68 #ifdef DEBUG | 74 #ifdef DEBUG |
| 69 info_emitted_ = true; | 75 info_emitted_ = true; |
| 70 #endif | 76 #endif |
| 71 } else { | 77 } else { |
| 72 __ nop(); // Signals no inlined code. | 78 __ nop(); |
| 79 __ nop(); |
| 73 } | 80 } |
| 74 } | 81 } |
| 75 | 82 |
| 76 private: | 83 private: |
| 77 MacroAssembler* masm() { return masm_; } | 84 MacroAssembler* masm() { return masm_; } |
| 78 MacroAssembler* masm_; | 85 MacroAssembler* masm_; |
| 79 Label patch_site_; | 86 Label patch_site_; |
| 80 #ifdef DEBUG | 87 #ifdef DEBUG |
| 81 bool info_emitted_; | 88 bool info_emitted_; |
| 82 #endif | 89 #endif |
| 83 }; | 90 }; |
| 84 | 91 |
| 85 | |
| 86 // Generate code for a JS function. On entry to the function the receiver | 92 // Generate code for a JS function. On entry to the function the receiver |
| 87 // and arguments have been pushed on the stack left to right. The actual | 93 // and arguments have been pushed on the stack left to right. The actual |
| 88 // argument count matches the formal parameter count expected by the | 94 // argument count matches the formal parameter count expected by the |
| 89 // function. | 95 // function. |
| 90 // | 96 // |
| 91 // The live registers are: | 97 // The live registers are: |
| 92 // o r4: the JS function object being called (i.e., ourselves) | 98 // o r3: the JS function object being called (i.e., ourselves) |
| 93 // o r6: the new target value | 99 // o r5: the new target value |
| 94 // o cp: our context | 100 // o cp: our context |
| 95 // o fp: our caller's frame pointer (aka r31) | 101 // o fp: our caller's frame pointer |
| 96 // o sp: stack pointer | 102 // o sp: stack pointer |
| 97 // o lr: return address | 103 // o lr: return address |
| 98 // o ip: our own function entry (required by the prologue) | 104 // o ip: our own function entry (required by the prologue) |
| 99 // | 105 // |
| 100 // The function builds a JS frame. Please see JavaScriptFrameConstants in | 106 // The function builds a JS frame. Please see JavaScriptFrameConstants in |
| 101 // frames-ppc.h for its layout. | 107 // frames-s390.h for its layout. |
| 102 void FullCodeGenerator::Generate() { | 108 void FullCodeGenerator::Generate() { |
| 103 CompilationInfo* info = info_; | 109 CompilationInfo* info = info_; |
| 104 profiling_counter_ = isolate()->factory()->NewCell( | 110 profiling_counter_ = isolate()->factory()->NewCell( |
| 105 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); | 111 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); |
| 106 SetFunctionPosition(literal()); | 112 SetFunctionPosition(literal()); |
| 107 Comment cmnt(masm_, "[ function compiled by full code generator"); | 113 Comment cmnt(masm_, "[ function compiled by full code generator"); |
| 108 | 114 |
| 109 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 115 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
| 110 | 116 |
| 111 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { | 117 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { |
| 112 int receiver_offset = info->scope()->num_parameters() * kPointerSize; | 118 int receiver_offset = info->scope()->num_parameters() * kPointerSize; |
| 113 __ LoadP(r5, MemOperand(sp, receiver_offset), r0); | 119 __ LoadP(r4, MemOperand(sp, receiver_offset), r0); |
| 114 __ AssertNotSmi(r5); | 120 __ AssertNotSmi(r4); |
| 115 __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE); | 121 __ CompareObjectType(r4, r4, no_reg, FIRST_JS_RECEIVER_TYPE); |
| 116 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver); | 122 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver); |
| 117 } | 123 } |
| 118 | 124 |
| 119 // Open a frame scope to indicate that there is a frame on the stack. The | 125 // Open a frame scope to indicate that there is a frame on the stack. The |
| 120 // MANUAL indicates that the scope shouldn't actually generate code to set up | 126 // MANUAL indicates that the scope shouldn't actually generate code to set up |
| 121 // the frame (that is done below). | 127 // the frame (that is done below). |
| 122 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 128 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
| 123 int prologue_offset = masm_->pc_offset(); | 129 int prologue_offset = masm_->pc_offset(); |
| 124 | 130 |
| 125 if (prologue_offset) { | |
| 126 // Prologue logic requires it's starting address in ip and the | |
| 127 // corresponding offset from the function entry. | |
| 128 prologue_offset += Instruction::kInstrSize; | |
| 129 __ addi(ip, ip, Operand(prologue_offset)); | |
| 130 } | |
| 131 info->set_prologue_offset(prologue_offset); | 131 info->set_prologue_offset(prologue_offset); |
| 132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset); | 132 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset); |
| 133 | 133 |
| 134 { | 134 { |
| 135 Comment cmnt(masm_, "[ Allocate locals"); | 135 Comment cmnt(masm_, "[ Allocate locals"); |
| 136 int locals_count = info->scope()->num_stack_slots(); | 136 int locals_count = info->scope()->num_stack_slots(); |
| 137 // Generators allocate locals, if any, in context slots. | 137 // Generators allocate locals, if any, in context slots. |
| 138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0); | 138 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0); |
| 139 OperandStackDepthIncrement(locals_count); | 139 OperandStackDepthIncrement(locals_count); |
| 140 if (locals_count > 0) { | 140 if (locals_count > 0) { |
| 141 if (locals_count >= 128) { | 141 if (locals_count >= 128) { |
| 142 Label ok; | 142 Label ok; |
| 143 __ Add(ip, sp, -(locals_count * kPointerSize), r0); | 143 __ AddP(ip, sp, Operand(-(locals_count * kPointerSize))); |
| 144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); | 144 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); |
| 145 __ cmpl(ip, r5); | 145 __ CmpLogicalP(ip, r5); |
| 146 __ bc_short(ge, &ok); | 146 __ bge(&ok, Label::kNear); |
| 147 __ CallRuntime(Runtime::kThrowStackOverflow); | 147 __ CallRuntime(Runtime::kThrowStackOverflow); |
| 148 __ bind(&ok); | 148 __ bind(&ok); |
| 149 } | 149 } |
| 150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; | 151 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32; |
| 152 if (locals_count >= kMaxPushes) { | 152 if (locals_count >= kMaxPushes) { |
| 153 int loop_iterations = locals_count / kMaxPushes; | 153 int loop_iterations = locals_count / kMaxPushes; |
| 154 __ mov(r5, Operand(loop_iterations)); | 154 __ mov(r4, Operand(loop_iterations)); |
| 155 __ mtctr(r5); | |
| 156 Label loop_header; | 155 Label loop_header; |
| 157 __ bind(&loop_header); | 156 __ bind(&loop_header); |
| 158 // Do pushes. | 157 // Do pushes. |
| 158 // TODO(joransiu): Use MVC for better performance |
| 159 __ lay(sp, MemOperand(sp, -kMaxPushes * kPointerSize)); |
| 159 for (int i = 0; i < kMaxPushes; i++) { | 160 for (int i = 0; i < kMaxPushes; i++) { |
| 160 __ push(ip); | 161 __ StoreP(ip, MemOperand(sp, i * kPointerSize)); |
| 161 } | 162 } |
| 162 // Continue loop if not done. | 163 // Continue loop if not done. |
| 163 __ bdnz(&loop_header); | 164 __ BranchOnCount(r4, &loop_header); |
| 164 } | 165 } |
| 165 int remaining = locals_count % kMaxPushes; | 166 int remaining = locals_count % kMaxPushes; |
| 166 // Emit the remaining pushes. | 167 // Emit the remaining pushes. |
| 167 for (int i = 0; i < remaining; i++) { | 168 // TODO(joransiu): Use MVC for better performance |
| 168 __ push(ip); | 169 if (remaining > 0) { |
| 170 __ lay(sp, MemOperand(sp, -remaining * kPointerSize)); |
| 171 for (int i = 0; i < remaining; i++) { |
| 172 __ StoreP(ip, MemOperand(sp, i * kPointerSize)); |
| 173 } |
| 169 } | 174 } |
| 170 } | 175 } |
| 171 } | 176 } |
| 172 | 177 |
| 173 bool function_in_register_r4 = true; | 178 bool function_in_register_r3 = true; |
| 174 | 179 |
| 175 // Possibly allocate a local context. | 180 // Possibly allocate a local context. |
| 176 if (info->scope()->num_heap_slots() > 0) { | 181 if (info->scope()->num_heap_slots() > 0) { |
| 177 // Argument to NewContext is the function, which is still in r4. | 182 // Argument to NewContext is the function, which is still in r3. |
| 178 Comment cmnt(masm_, "[ Allocate context"); | 183 Comment cmnt(masm_, "[ Allocate context"); |
| 179 bool need_write_barrier = true; | 184 bool need_write_barrier = true; |
| 180 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; | 185 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; |
| 181 if (info->scope()->is_script_scope()) { | 186 if (info->scope()->is_script_scope()) { |
| 182 __ push(r4); | 187 __ push(r3); |
| 183 __ Push(info->scope()->GetScopeInfo(info->isolate())); | 188 __ Push(info->scope()->GetScopeInfo(info->isolate())); |
| 184 __ CallRuntime(Runtime::kNewScriptContext); | 189 __ CallRuntime(Runtime::kNewScriptContext); |
| 185 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG); | 190 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG); |
| 186 // The new target value is not used, clobbering is safe. | 191 // The new target value is not used, clobbering is safe. |
| 187 DCHECK_NULL(info->scope()->new_target_var()); | 192 DCHECK_NULL(info->scope()->new_target_var()); |
| 188 } else { | 193 } else { |
| 189 if (info->scope()->new_target_var() != nullptr) { | 194 if (info->scope()->new_target_var() != nullptr) { |
| 190 __ push(r6); // Preserve new target. | 195 __ push(r5); // Preserve new target. |
| 191 } | 196 } |
| 192 if (slots <= FastNewContextStub::kMaximumSlots) { | 197 if (slots <= FastNewContextStub::kMaximumSlots) { |
| 193 FastNewContextStub stub(isolate(), slots); | 198 FastNewContextStub stub(isolate(), slots); |
| 194 __ CallStub(&stub); | 199 __ CallStub(&stub); |
| 195 // Result of FastNewContextStub is always in new space. | 200 // Result of FastNewContextStub is always in new space. |
| 196 need_write_barrier = false; | 201 need_write_barrier = false; |
| 197 } else { | 202 } else { |
| 198 __ push(r4); | 203 __ push(r3); |
| 199 __ CallRuntime(Runtime::kNewFunctionContext); | 204 __ CallRuntime(Runtime::kNewFunctionContext); |
| 200 } | 205 } |
| 201 if (info->scope()->new_target_var() != nullptr) { | 206 if (info->scope()->new_target_var() != nullptr) { |
| 202 __ pop(r6); // Preserve new target. | 207 __ pop(r5); // Preserve new target. |
| 203 } | 208 } |
| 204 } | 209 } |
| 205 function_in_register_r4 = false; | 210 function_in_register_r3 = false; |
| 206 // Context is returned in r3. It replaces the context passed to us. | 211 // Context is returned in r2. It replaces the context passed to us. |
| 207 // It's saved in the stack and kept live in cp. | 212 // It's saved in the stack and kept live in cp. |
| 208 __ mr(cp, r3); | 213 __ LoadRR(cp, r2); |
| 209 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 214 __ StoreP(r2, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 210 // Copy any necessary parameters into the context. | 215 // Copy any necessary parameters into the context. |
| 211 int num_parameters = info->scope()->num_parameters(); | 216 int num_parameters = info->scope()->num_parameters(); |
| 212 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; | 217 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; |
| 213 for (int i = first_parameter; i < num_parameters; i++) { | 218 for (int i = first_parameter; i < num_parameters; i++) { |
| 214 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); | 219 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); |
| 215 if (var->IsContextSlot()) { | 220 if (var->IsContextSlot()) { |
| 216 int parameter_offset = StandardFrameConstants::kCallerSPOffset + | 221 int parameter_offset = StandardFrameConstants::kCallerSPOffset + |
| 217 (num_parameters - 1 - i) * kPointerSize; | 222 (num_parameters - 1 - i) * kPointerSize; |
| 218 // Load parameter from stack. | 223 // Load parameter from stack. |
| 219 __ LoadP(r3, MemOperand(fp, parameter_offset), r0); | 224 __ LoadP(r2, MemOperand(fp, parameter_offset), r0); |
| 220 // Store it in the context. | 225 // Store it in the context. |
| 221 MemOperand target = ContextMemOperand(cp, var->index()); | 226 MemOperand target = ContextMemOperand(cp, var->index()); |
| 222 __ StoreP(r3, target, r0); | 227 __ StoreP(r2, target); |
| 223 | 228 |
| 224 // Update the write barrier. | 229 // Update the write barrier. |
| 225 if (need_write_barrier) { | 230 if (need_write_barrier) { |
| 226 __ RecordWriteContextSlot(cp, target.offset(), r3, r5, | 231 __ RecordWriteContextSlot(cp, target.offset(), r2, r4, |
| 227 kLRHasBeenSaved, kDontSaveFPRegs); | 232 kLRHasBeenSaved, kDontSaveFPRegs); |
| 228 } else if (FLAG_debug_code) { | 233 } else if (FLAG_debug_code) { |
| 229 Label done; | 234 Label done; |
| 230 __ JumpIfInNewSpace(cp, r3, &done); | 235 __ JumpIfInNewSpace(cp, r2, &done); |
| 231 __ Abort(kExpectedNewSpaceObject); | 236 __ Abort(kExpectedNewSpaceObject); |
| 232 __ bind(&done); | 237 __ bind(&done); |
| 233 } | 238 } |
| 234 } | 239 } |
| 235 } | 240 } |
| 236 } | 241 } |
| 237 | 242 |
| 238 // Register holding this function and new target are both trashed in case we | 243 // Register holding this function and new target are both trashed in case we |
| 239 // bailout here. But since that can happen only when new target is not used | 244 // bailout here. But since that can happen only when new target is not used |
| 240 // and we allocate a context, the value of |function_in_register| is correct. | 245 // and we allocate a context, the value of |function_in_register| is correct. |
| 241 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS); | 246 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS); |
| 242 | 247 |
| 243 // Possibly set up a local binding to the this function which is used in | 248 // Possibly set up a local binding to the this function which is used in |
| 244 // derived constructors with super calls. | 249 // derived constructors with super calls. |
| 245 Variable* this_function_var = scope()->this_function_var(); | 250 Variable* this_function_var = scope()->this_function_var(); |
| 246 if (this_function_var != nullptr) { | 251 if (this_function_var != nullptr) { |
| 247 Comment cmnt(masm_, "[ This function"); | 252 Comment cmnt(masm_, "[ This function"); |
| 248 if (!function_in_register_r4) { | 253 if (!function_in_register_r3) { |
| 249 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 254 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 250 // The write barrier clobbers register again, keep it marked as such. | 255 // The write barrier clobbers register again, keep it marked as such. |
| 251 } | 256 } |
| 252 SetVar(this_function_var, r4, r3, r5); | 257 SetVar(this_function_var, r3, r2, r4); |
| 253 } | 258 } |
| 254 | 259 |
| 255 // Possibly set up a local binding to the new target value. | 260 // Possibly set up a local binding to the new target value. |
| 256 Variable* new_target_var = scope()->new_target_var(); | 261 Variable* new_target_var = scope()->new_target_var(); |
| 257 if (new_target_var != nullptr) { | 262 if (new_target_var != nullptr) { |
| 258 Comment cmnt(masm_, "[ new.target"); | 263 Comment cmnt(masm_, "[ new.target"); |
| 259 SetVar(new_target_var, r6, r3, r5); | 264 SetVar(new_target_var, r5, r2, r4); |
| 260 } | 265 } |
| 261 | 266 |
| 262 // Possibly allocate RestParameters | 267 // Possibly allocate RestParameters |
| 263 int rest_index; | 268 int rest_index; |
| 264 Variable* rest_param = scope()->rest_parameter(&rest_index); | 269 Variable* rest_param = scope()->rest_parameter(&rest_index); |
| 265 if (rest_param) { | 270 if (rest_param) { |
| 266 Comment cmnt(masm_, "[ Allocate rest parameter array"); | 271 Comment cmnt(masm_, "[ Allocate rest parameter array"); |
| 267 if (!function_in_register_r4) { | 272 |
| 268 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 273 if (!function_in_register_r3) { |
| 274 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 269 } | 275 } |
| 270 FastNewRestParameterStub stub(isolate()); | 276 FastNewRestParameterStub stub(isolate()); |
| 271 __ CallStub(&stub); | 277 __ CallStub(&stub); |
| 272 function_in_register_r4 = false; | 278 |
| 273 SetVar(rest_param, r3, r4, r5); | 279 function_in_register_r3 = false; |
| 280 SetVar(rest_param, r2, r3, r4); |
| 274 } | 281 } |
| 275 | 282 |
| 276 Variable* arguments = scope()->arguments(); | 283 Variable* arguments = scope()->arguments(); |
| 277 if (arguments != NULL) { | 284 if (arguments != NULL) { |
| 278 // Function uses arguments object. | 285 // Function uses arguments object. |
| 279 Comment cmnt(masm_, "[ Allocate arguments object"); | 286 Comment cmnt(masm_, "[ Allocate arguments object"); |
| 280 if (!function_in_register_r4) { | 287 if (!function_in_register_r3) { |
| 281 // Load this again, if it's used by the local context below. | 288 // Load this again, if it's used by the local context below. |
| 282 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 289 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 283 } | 290 } |
| 284 if (is_strict(language_mode()) || !has_simple_parameters()) { | 291 if (is_strict(language_mode()) || !has_simple_parameters()) { |
| 285 FastNewStrictArgumentsStub stub(isolate()); | 292 FastNewStrictArgumentsStub stub(isolate()); |
| 286 __ CallStub(&stub); | 293 __ CallStub(&stub); |
| 287 } else if (literal()->has_duplicate_parameters()) { | 294 } else if (literal()->has_duplicate_parameters()) { |
| 288 __ Push(r4); | 295 __ Push(r3); |
| 289 __ CallRuntime(Runtime::kNewSloppyArguments_Generic); | 296 __ CallRuntime(Runtime::kNewSloppyArguments_Generic); |
| 290 } else { | 297 } else { |
| 291 FastNewSloppyArgumentsStub stub(isolate()); | 298 FastNewSloppyArgumentsStub stub(isolate()); |
| 292 __ CallStub(&stub); | 299 __ CallStub(&stub); |
| 293 } | 300 } |
| 294 | 301 |
| 295 SetVar(arguments, r3, r4, r5); | 302 SetVar(arguments, r2, r3, r4); |
| 296 } | 303 } |
| 297 | 304 |
| 298 if (FLAG_trace) { | 305 if (FLAG_trace) { |
| 299 __ CallRuntime(Runtime::kTraceEnter); | 306 __ CallRuntime(Runtime::kTraceEnter); |
| 300 } | 307 } |
| 301 | 308 |
| 302 // Visit the declarations and body unless there is an illegal | 309 // Visit the declarations and body unless there is an illegal |
| 303 // redeclaration. | 310 // redeclaration. |
| 304 if (scope()->HasIllegalRedeclaration()) { | 311 if (scope()->HasIllegalRedeclaration()) { |
| 305 EmitIllegalRedeclaration(); | 312 Comment cmnt(masm_, "[ Declarations"); |
| 313 VisitForEffect(scope()->GetIllegalRedeclaration()); |
| 314 |
| 306 } else { | 315 } else { |
| 307 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); | 316 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); |
| 308 { | 317 { |
| 309 Comment cmnt(masm_, "[ Declarations"); | 318 Comment cmnt(masm_, "[ Declarations"); |
| 310 VisitDeclarations(scope()->declarations()); | 319 VisitDeclarations(scope()->declarations()); |
| 311 } | 320 } |
| 312 | 321 |
| 313 // Assert that the declarations do not use ICs. Otherwise the debugger | 322 // Assert that the declarations do not use ICs. Otherwise the debugger |
| 314 // won't be able to redirect a PC at an IC to the correct IC in newly | 323 // won't be able to redirect a PC at an IC to the correct IC in newly |
| 315 // recompiled code. | 324 // recompiled code. |
| 316 DCHECK_EQ(0, ic_total_count_); | 325 DCHECK_EQ(0, ic_total_count_); |
| 317 | 326 |
| 318 { | 327 { |
| 319 Comment cmnt(masm_, "[ Stack check"); | 328 Comment cmnt(masm_, "[ Stack check"); |
| 320 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | 329 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); |
| 321 Label ok; | 330 Label ok; |
| 322 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 331 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 323 __ cmpl(sp, ip); | 332 __ CmpLogicalP(sp, ip); |
| 324 __ bc_short(ge, &ok); | 333 __ bge(&ok, Label::kNear); |
| 325 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); | 334 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); |
| 326 __ bind(&ok); | 335 __ bind(&ok); |
| 327 } | 336 } |
| 328 | 337 |
| 329 { | 338 { |
| 330 Comment cmnt(masm_, "[ Body"); | 339 Comment cmnt(masm_, "[ Body"); |
| 331 DCHECK(loop_depth() == 0); | 340 DCHECK(loop_depth() == 0); |
| 332 VisitStatements(literal()->body()); | 341 VisitStatements(literal()->body()); |
| 333 DCHECK(loop_depth() == 0); | 342 DCHECK(loop_depth() == 0); |
| 334 } | 343 } |
| 335 } | 344 } |
| 336 | 345 |
| 337 // Always emit a 'return undefined' in case control fell off the end of | 346 // Always emit a 'return undefined' in case control fell off the end of |
| 338 // the body. | 347 // the body. |
| 339 { | 348 { |
| 340 Comment cmnt(masm_, "[ return <undefined>;"); | 349 Comment cmnt(masm_, "[ return <undefined>;"); |
| 341 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 350 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 342 } | 351 } |
| 343 EmitReturnSequence(); | 352 EmitReturnSequence(); |
| 353 } |
| 344 | 354 |
| 345 if (HasStackOverflow()) { | 355 void FullCodeGenerator::ClearAccumulator() { |
| 346 masm_->AbortConstantPoolBuilding(); | 356 __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
| 357 } |
| 358 |
| 359 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
| 360 __ mov(r4, Operand(profiling_counter_)); |
| 361 intptr_t smi_delta = reinterpret_cast<intptr_t>(Smi::FromInt(delta)); |
| 362 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT) && is_int8(-smi_delta)) { |
| 363 __ AddP(FieldMemOperand(r4, Cell::kValueOffset), Operand(-smi_delta)); |
| 364 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
| 365 } else { |
| 366 __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
| 367 __ SubSmiLiteral(r5, r5, Smi::FromInt(delta), r0); |
| 368 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
| 347 } | 369 } |
| 348 } | 370 } |
| 349 | 371 |
| 350 | |
| 351 void FullCodeGenerator::ClearAccumulator() { | |
| 352 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | |
| 353 } | |
| 354 | |
| 355 | |
| 356 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | |
| 357 __ mov(r5, Operand(profiling_counter_)); | |
| 358 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset)); | |
| 359 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0); | |
| 360 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0); | |
| 361 } | |
| 362 | |
| 363 | |
| 364 void FullCodeGenerator::EmitProfilingCounterReset() { | 372 void FullCodeGenerator::EmitProfilingCounterReset() { |
| 365 int reset_value = FLAG_interrupt_budget; | 373 int reset_value = FLAG_interrupt_budget; |
| 366 __ mov(r5, Operand(profiling_counter_)); | 374 __ mov(r4, Operand(profiling_counter_)); |
| 367 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value)); | 375 __ LoadSmiLiteral(r5, Smi::FromInt(reset_value)); |
| 368 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0); | 376 __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset)); |
| 369 } | 377 } |
| 370 | 378 |
| 371 | |
| 372 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, | 379 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, |
| 373 Label* back_edge_target) { | 380 Label* back_edge_target) { |
| 374 Comment cmnt(masm_, "[ Back edge bookkeeping"); | 381 Comment cmnt(masm_, "[ Back edge bookkeeping"); |
| 375 Label ok; | 382 Label ok; |
| 376 | 383 |
| 377 DCHECK(back_edge_target->is_bound()); | 384 DCHECK(back_edge_target->is_bound()); |
| 378 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) + | 385 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) + |
| 379 kCodeSizeMultiplier / 2; | 386 kCodeSizeMultiplier / 2; |
| 380 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); | 387 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); |
| 381 EmitProfilingCounterDecrement(weight); | 388 EmitProfilingCounterDecrement(weight); |
| 382 { | 389 { |
| 383 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
| 384 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_); | |
| 385 // BackEdgeTable::PatchAt manipulates this sequence. | 390 // BackEdgeTable::PatchAt manipulates this sequence. |
| 386 __ cmpi(r6, Operand::Zero()); | 391 __ bge(&ok, Label::kNear); |
| 387 __ bc_short(ge, &ok); | |
| 388 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | 392 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
| 389 | 393 |
| 390 // Record a mapping of this PC offset to the OSR id. This is used to find | 394 // Record a mapping of this PC offset to the OSR id. This is used to find |
| 391 // the AST id from the unoptimized code in order to use it as a key into | 395 // the AST id from the unoptimized code in order to use it as a key into |
| 392 // the deoptimization input data found in the optimized code. | 396 // the deoptimization input data found in the optimized code. |
| 393 RecordBackEdge(stmt->OsrEntryId()); | 397 RecordBackEdge(stmt->OsrEntryId()); |
| 394 } | 398 } |
| 395 EmitProfilingCounterReset(); | 399 EmitProfilingCounterReset(); |
| 396 | 400 |
| 397 __ bind(&ok); | 401 __ bind(&ok); |
| 398 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 402 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 399 // Record a mapping of the OSR id to this PC. This is used if the OSR | 403 // Record a mapping of the OSR id to this PC. This is used if the OSR |
| 400 // entry becomes the target of a bailout. We don't expect it to be, but | 404 // entry becomes the target of a bailout. We don't expect it to be, but |
| 401 // we want it to work if it is. | 405 // we want it to work if it is. |
| 402 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); | 406 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); |
| 403 } | 407 } |
| 404 | 408 |
| 405 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( | 409 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( |
| 406 bool is_tail_call) { | 410 bool is_tail_call) { |
| 407 // Pretend that the exit is a backwards jump to the entry. | 411 // Pretend that the exit is a backwards jump to the entry. |
| 408 int weight = 1; | 412 int weight = 1; |
| 409 if (info_->ShouldSelfOptimize()) { | 413 if (info_->ShouldSelfOptimize()) { |
| 410 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 414 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 411 } else { | 415 } else { |
| 412 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; | 416 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2; |
| 413 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); | 417 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); |
| 414 } | 418 } |
| 415 EmitProfilingCounterDecrement(weight); | 419 EmitProfilingCounterDecrement(weight); |
| 416 Label ok; | 420 Label ok; |
| 417 __ cmpi(r6, Operand::Zero()); | 421 __ CmpP(r5, Operand::Zero()); |
| 418 __ bge(&ok); | 422 __ bge(&ok); |
| 419 // Don't need to save result register if we are going to do a tail call. | 423 // Don't need to save result register if we are going to do a tail call. |
| 420 if (!is_tail_call) { | 424 if (!is_tail_call) { |
| 421 __ push(r3); | 425 __ push(r2); |
| 422 } | 426 } |
| 423 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); | 427 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); |
| 424 if (!is_tail_call) { | 428 if (!is_tail_call) { |
| 425 __ pop(r3); | 429 __ pop(r2); |
| 426 } | 430 } |
| 427 EmitProfilingCounterReset(); | 431 EmitProfilingCounterReset(); |
| 428 __ bind(&ok); | 432 __ bind(&ok); |
| 429 } | 433 } |
| 430 | 434 |
| 431 void FullCodeGenerator::EmitReturnSequence() { | 435 void FullCodeGenerator::EmitReturnSequence() { |
| 432 Comment cmnt(masm_, "[ Return sequence"); | 436 Comment cmnt(masm_, "[ Return sequence"); |
| 433 if (return_label_.is_bound()) { | 437 if (return_label_.is_bound()) { |
| 434 __ b(&return_label_); | 438 __ b(&return_label_); |
| 435 } else { | 439 } else { |
| 436 __ bind(&return_label_); | 440 __ bind(&return_label_); |
| 437 if (FLAG_trace) { | 441 if (FLAG_trace) { |
| 438 // Push the return value on the stack as the parameter. | 442 // Push the return value on the stack as the parameter. |
| 439 // Runtime::TraceExit returns its parameter in r3 | 443 // Runtime::TraceExit returns its parameter in r2 |
| 440 __ push(r3); | 444 __ push(r2); |
| 441 __ CallRuntime(Runtime::kTraceExit); | 445 __ CallRuntime(Runtime::kTraceExit); |
| 442 } | 446 } |
| 443 EmitProfilingCounterHandlingForReturnSequence(false); | 447 EmitProfilingCounterHandlingForReturnSequence(false); |
| 444 | 448 |
| 445 // Make sure that the constant pool is not emitted inside of the return | 449 // Make sure that the constant pool is not emitted inside of the return |
| 446 // sequence. | 450 // sequence. |
| 447 { | 451 { |
| 448 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 452 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
| 453 // tool from instrumenting as we rely on the code size here. |
| 449 int32_t arg_count = info_->scope()->num_parameters() + 1; | 454 int32_t arg_count = info_->scope()->num_parameters() + 1; |
| 450 int32_t sp_delta = arg_count * kPointerSize; | 455 int32_t sp_delta = arg_count * kPointerSize; |
| 451 SetReturnPosition(literal()); | 456 SetReturnPosition(literal()); |
| 452 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); | 457 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta); |
| 453 __ blr(); | 458 |
| 459 __ Ret(); |
| 454 } | 460 } |
| 455 } | 461 } |
| 456 } | 462 } |
| 457 | 463 |
| 458 | |
| 459 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { | 464 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { |
| 460 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 465 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 461 codegen()->GetVar(result_register(), var); | 466 codegen()->GetVar(result_register(), var); |
| 462 codegen()->PushOperand(result_register()); | 467 codegen()->PushOperand(result_register()); |
| 463 } | 468 } |
| 464 | 469 |
| 465 | |
| 466 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {} | 470 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {} |
| 467 | 471 |
| 468 | |
| 469 void FullCodeGenerator::AccumulatorValueContext::Plug( | 472 void FullCodeGenerator::AccumulatorValueContext::Plug( |
| 470 Heap::RootListIndex index) const { | 473 Heap::RootListIndex index) const { |
| 471 __ LoadRoot(result_register(), index); | 474 __ LoadRoot(result_register(), index); |
| 472 } | 475 } |
| 473 | 476 |
| 474 | |
| 475 void FullCodeGenerator::StackValueContext::Plug( | 477 void FullCodeGenerator::StackValueContext::Plug( |
| 476 Heap::RootListIndex index) const { | 478 Heap::RootListIndex index) const { |
| 477 __ LoadRoot(result_register(), index); | 479 __ LoadRoot(result_register(), index); |
| 478 codegen()->PushOperand(result_register()); | 480 codegen()->PushOperand(result_register()); |
| 479 } | 481 } |
| 480 | 482 |
| 481 | |
| 482 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { | 483 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { |
| 483 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, | 484 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
| 484 false_label_); | 485 false_label_); |
| 485 if (index == Heap::kUndefinedValueRootIndex || | 486 if (index == Heap::kUndefinedValueRootIndex || |
| 486 index == Heap::kNullValueRootIndex || | 487 index == Heap::kNullValueRootIndex || |
| 487 index == Heap::kFalseValueRootIndex) { | 488 index == Heap::kFalseValueRootIndex) { |
| 488 if (false_label_ != fall_through_) __ b(false_label_); | 489 if (false_label_ != fall_through_) __ b(false_label_); |
| 489 } else if (index == Heap::kTrueValueRootIndex) { | 490 } else if (index == Heap::kTrueValueRootIndex) { |
| 490 if (true_label_ != fall_through_) __ b(true_label_); | 491 if (true_label_ != fall_through_) __ b(true_label_); |
| 491 } else { | 492 } else { |
| 492 __ LoadRoot(result_register(), index); | 493 __ LoadRoot(result_register(), index); |
| 493 codegen()->DoTest(this); | 494 codegen()->DoTest(this); |
| 494 } | 495 } |
| 495 } | 496 } |
| 496 | 497 |
| 497 | |
| 498 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {} | 498 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {} |
| 499 | 499 |
| 500 | |
| 501 void FullCodeGenerator::AccumulatorValueContext::Plug( | 500 void FullCodeGenerator::AccumulatorValueContext::Plug( |
| 502 Handle<Object> lit) const { | 501 Handle<Object> lit) const { |
| 503 __ mov(result_register(), Operand(lit)); | 502 __ mov(result_register(), Operand(lit)); |
| 504 } | 503 } |
| 505 | 504 |
| 506 | |
| 507 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { | 505 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { |
| 508 // Immediates cannot be pushed directly. | 506 // Immediates cannot be pushed directly. |
| 509 __ mov(result_register(), Operand(lit)); | 507 __ mov(result_register(), Operand(lit)); |
| 510 codegen()->PushOperand(result_register()); | 508 codegen()->PushOperand(result_register()); |
| 511 } | 509 } |
| 512 | 510 |
| 513 | |
| 514 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { | 511 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { |
| 515 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, | 512 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
| 516 false_label_); | 513 false_label_); |
| 517 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectable()); | 514 DCHECK(lit->IsNull() || lit->IsUndefined() || !lit->IsUndetectableObject()); |
| 518 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { | 515 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { |
| 519 if (false_label_ != fall_through_) __ b(false_label_); | 516 if (false_label_ != fall_through_) __ b(false_label_); |
| 520 } else if (lit->IsTrue() || lit->IsJSObject()) { | 517 } else if (lit->IsTrue() || lit->IsJSObject()) { |
| 521 if (true_label_ != fall_through_) __ b(true_label_); | 518 if (true_label_ != fall_through_) __ b(true_label_); |
| 522 } else if (lit->IsString()) { | 519 } else if (lit->IsString()) { |
| 523 if (String::cast(*lit)->length() == 0) { | 520 if (String::cast(*lit)->length() == 0) { |
| 524 if (false_label_ != fall_through_) __ b(false_label_); | 521 if (false_label_ != fall_through_) __ b(false_label_); |
| 525 } else { | 522 } else { |
| 526 if (true_label_ != fall_through_) __ b(true_label_); | 523 if (true_label_ != fall_through_) __ b(true_label_); |
| 527 } | 524 } |
| 528 } else if (lit->IsSmi()) { | 525 } else if (lit->IsSmi()) { |
| 529 if (Smi::cast(*lit)->value() == 0) { | 526 if (Smi::cast(*lit)->value() == 0) { |
| 530 if (false_label_ != fall_through_) __ b(false_label_); | 527 if (false_label_ != fall_through_) __ b(false_label_); |
| 531 } else { | 528 } else { |
| 532 if (true_label_ != fall_through_) __ b(true_label_); | 529 if (true_label_ != fall_through_) __ b(true_label_); |
| 533 } | 530 } |
| 534 } else { | 531 } else { |
| 535 // For simplicity we always test the accumulator register. | 532 // For simplicity we always test the accumulator register. |
| 536 __ mov(result_register(), Operand(lit)); | 533 __ mov(result_register(), Operand(lit)); |
| 537 codegen()->DoTest(this); | 534 codegen()->DoTest(this); |
| 538 } | 535 } |
| 539 } | 536 } |
| 540 | 537 |
| 541 | |
| 542 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, | 538 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, |
| 543 Register reg) const { | 539 Register reg) const { |
| 544 DCHECK(count > 0); | 540 DCHECK(count > 0); |
| 545 if (count > 1) codegen()->DropOperands(count - 1); | 541 if (count > 1) codegen()->DropOperands(count - 1); |
| 546 __ StoreP(reg, MemOperand(sp, 0)); | 542 __ StoreP(reg, MemOperand(sp, 0)); |
| 547 } | 543 } |
| 548 | 544 |
| 549 | |
| 550 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, | 545 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, |
| 551 Label* materialize_false) const { | 546 Label* materialize_false) const { |
| 552 DCHECK(materialize_true == materialize_false); | 547 DCHECK(materialize_true == materialize_false); |
| 553 __ bind(materialize_true); | 548 __ bind(materialize_true); |
| 554 } | 549 } |
| 555 | 550 |
| 556 | |
| 557 void FullCodeGenerator::AccumulatorValueContext::Plug( | 551 void FullCodeGenerator::AccumulatorValueContext::Plug( |
| 558 Label* materialize_true, Label* materialize_false) const { | 552 Label* materialize_true, Label* materialize_false) const { |
| 559 Label done; | 553 Label done; |
| 560 __ bind(materialize_true); | 554 __ bind(materialize_true); |
| 561 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); | 555 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); |
| 562 __ b(&done); | 556 __ b(&done, Label::kNear); |
| 563 __ bind(materialize_false); | 557 __ bind(materialize_false); |
| 564 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); | 558 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); |
| 565 __ bind(&done); | 559 __ bind(&done); |
| 566 } | 560 } |
| 567 | 561 |
| 568 | |
| 569 void FullCodeGenerator::StackValueContext::Plug( | 562 void FullCodeGenerator::StackValueContext::Plug( |
| 570 Label* materialize_true, Label* materialize_false) const { | 563 Label* materialize_true, Label* materialize_false) const { |
| 571 Label done; | 564 Label done; |
| 572 __ bind(materialize_true); | 565 __ bind(materialize_true); |
| 573 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 566 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 574 __ b(&done); | 567 __ b(&done, Label::kNear); |
| 575 __ bind(materialize_false); | 568 __ bind(materialize_false); |
| 576 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | 569 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 577 __ bind(&done); | 570 __ bind(&done); |
| 578 codegen()->PushOperand(ip); | 571 codegen()->PushOperand(ip); |
| 579 } | 572 } |
| 580 | 573 |
| 581 | |
| 582 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, | 574 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, |
| 583 Label* materialize_false) const { | 575 Label* materialize_false) const { |
| 584 DCHECK(materialize_true == true_label_); | 576 DCHECK(materialize_true == true_label_); |
| 585 DCHECK(materialize_false == false_label_); | 577 DCHECK(materialize_false == false_label_); |
| 586 } | 578 } |
| 587 | 579 |
| 588 | |
| 589 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { | 580 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { |
| 590 Heap::RootListIndex value_root_index = | 581 Heap::RootListIndex value_root_index = |
| 591 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | 582 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
| 592 __ LoadRoot(result_register(), value_root_index); | 583 __ LoadRoot(result_register(), value_root_index); |
| 593 } | 584 } |
| 594 | 585 |
| 595 | |
| 596 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { | 586 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { |
| 597 Heap::RootListIndex value_root_index = | 587 Heap::RootListIndex value_root_index = |
| 598 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; | 588 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; |
| 599 __ LoadRoot(ip, value_root_index); | 589 __ LoadRoot(ip, value_root_index); |
| 600 codegen()->PushOperand(ip); | 590 codegen()->PushOperand(ip); |
| 601 } | 591 } |
| 602 | 592 |
| 603 | |
| 604 void FullCodeGenerator::TestContext::Plug(bool flag) const { | 593 void FullCodeGenerator::TestContext::Plug(bool flag) const { |
| 605 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, | 594 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_, |
| 606 false_label_); | 595 false_label_); |
| 607 if (flag) { | 596 if (flag) { |
| 608 if (true_label_ != fall_through_) __ b(true_label_); | 597 if (true_label_ != fall_through_) __ b(true_label_); |
| 609 } else { | 598 } else { |
| 610 if (false_label_ != fall_through_) __ b(false_label_); | 599 if (false_label_ != fall_through_) __ b(false_label_); |
| 611 } | 600 } |
| 612 } | 601 } |
| 613 | 602 |
| 614 | |
| 615 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true, | 603 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true, |
| 616 Label* if_false, Label* fall_through) { | 604 Label* if_false, Label* fall_through) { |
| 617 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate()); | 605 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); |
| 618 CallIC(ic, condition->test_id()); | 606 CallIC(ic, condition->test_id()); |
| 619 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); | 607 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); |
| 620 Split(eq, if_true, if_false, fall_through); | 608 Split(eq, if_true, if_false, fall_through); |
| 621 } | 609 } |
| 622 | 610 |
| 623 | |
| 624 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false, | 611 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false, |
| 625 Label* fall_through, CRegister cr) { | 612 Label* fall_through) { |
| 626 if (if_false == fall_through) { | 613 if (if_false == fall_through) { |
| 627 __ b(cond, if_true, cr); | 614 __ b(cond, if_true); |
| 628 } else if (if_true == fall_through) { | 615 } else if (if_true == fall_through) { |
| 629 __ b(NegateCondition(cond), if_false, cr); | 616 __ b(NegateCondition(cond), if_false); |
| 630 } else { | 617 } else { |
| 631 __ b(cond, if_true, cr); | 618 __ b(cond, if_true); |
| 632 __ b(if_false); | 619 __ b(if_false); |
| 633 } | 620 } |
| 634 } | 621 } |
| 635 | 622 |
| 636 | |
| 637 MemOperand FullCodeGenerator::StackOperand(Variable* var) { | 623 MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
| 638 DCHECK(var->IsStackAllocated()); | 624 DCHECK(var->IsStackAllocated()); |
| 639 // Offset is negative because higher indexes are at lower addresses. | 625 // Offset is negative because higher indexes are at lower addresses. |
| 640 int offset = -var->index() * kPointerSize; | 626 int offset = -var->index() * kPointerSize; |
| 641 // Adjust by a (parameter or local) base offset. | 627 // Adjust by a (parameter or local) base offset. |
| 642 if (var->IsParameter()) { | 628 if (var->IsParameter()) { |
| 643 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; | 629 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; |
| 644 } else { | 630 } else { |
| 645 offset += JavaScriptFrameConstants::kLocal0Offset; | 631 offset += JavaScriptFrameConstants::kLocal0Offset; |
| 646 } | 632 } |
| 647 return MemOperand(fp, offset); | 633 return MemOperand(fp, offset); |
| 648 } | 634 } |
| 649 | 635 |
| 650 | |
| 651 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { | 636 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { |
| 652 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | 637 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); |
| 653 if (var->IsContextSlot()) { | 638 if (var->IsContextSlot()) { |
| 654 int context_chain_length = scope()->ContextChainLength(var->scope()); | 639 int context_chain_length = scope()->ContextChainLength(var->scope()); |
| 655 __ LoadContext(scratch, context_chain_length); | 640 __ LoadContext(scratch, context_chain_length); |
| 656 return ContextMemOperand(scratch, var->index()); | 641 return ContextMemOperand(scratch, var->index()); |
| 657 } else { | 642 } else { |
| 658 return StackOperand(var); | 643 return StackOperand(var); |
| 659 } | 644 } |
| 660 } | 645 } |
| 661 | 646 |
| 662 | |
| 663 void FullCodeGenerator::GetVar(Register dest, Variable* var) { | 647 void FullCodeGenerator::GetVar(Register dest, Variable* var) { |
| 664 // Use destination as scratch. | 648 // Use destination as scratch. |
| 665 MemOperand location = VarOperand(var, dest); | 649 MemOperand location = VarOperand(var, dest); |
| 666 __ LoadP(dest, location, r0); | 650 __ LoadP(dest, location, r0); |
| 667 } | 651 } |
| 668 | 652 |
| 669 | |
| 670 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0, | 653 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0, |
| 671 Register scratch1) { | 654 Register scratch1) { |
| 672 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); | 655 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); |
| 673 DCHECK(!scratch0.is(src)); | 656 DCHECK(!scratch0.is(src)); |
| 674 DCHECK(!scratch0.is(scratch1)); | 657 DCHECK(!scratch0.is(scratch1)); |
| 675 DCHECK(!scratch1.is(src)); | 658 DCHECK(!scratch1.is(src)); |
| 676 MemOperand location = VarOperand(var, scratch0); | 659 MemOperand location = VarOperand(var, scratch0); |
| 677 __ StoreP(src, location, r0); | 660 __ StoreP(src, location); |
| 678 | 661 |
| 679 // Emit the write barrier code if the location is in the heap. | 662 // Emit the write barrier code if the location is in the heap. |
| 680 if (var->IsContextSlot()) { | 663 if (var->IsContextSlot()) { |
| 681 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1, | 664 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1, |
| 682 kLRHasBeenSaved, kDontSaveFPRegs); | 665 kLRHasBeenSaved, kDontSaveFPRegs); |
| 683 } | 666 } |
| 684 } | 667 } |
| 685 | 668 |
| 686 | |
| 687 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, | 669 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, |
| 688 bool should_normalize, | 670 bool should_normalize, |
| 689 Label* if_true, | 671 Label* if_true, |
| 690 Label* if_false) { | 672 Label* if_false) { |
| 691 // Only prepare for bailouts before splits if we're in a test | 673 // Only prepare for bailouts before splits if we're in a test |
| 692 // context. Otherwise, we let the Visit function deal with the | 674 // context. Otherwise, we let the Visit function deal with the |
| 693 // preparation to avoid preparing with the same AST id twice. | 675 // preparation to avoid preparing with the same AST id twice. |
| 694 if (!context()->IsTest()) return; | 676 if (!context()->IsTest()) return; |
| 695 | 677 |
| 696 Label skip; | 678 Label skip; |
| 697 if (should_normalize) __ b(&skip); | 679 if (should_normalize) __ b(&skip); |
| 698 PrepareForBailout(expr, TOS_REG); | 680 PrepareForBailout(expr, TOS_REG); |
| 699 if (should_normalize) { | 681 if (should_normalize) { |
| 700 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 682 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 701 __ cmp(r3, ip); | |
| 702 Split(eq, if_true, if_false, NULL); | 683 Split(eq, if_true, if_false, NULL); |
| 703 __ bind(&skip); | 684 __ bind(&skip); |
| 704 } | 685 } |
| 705 } | 686 } |
| 706 | 687 |
| 707 | |
| 708 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { | 688 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { |
| 709 // The variable in the declaration always resides in the current function | 689 // The variable in the declaration always resides in the current function |
| 710 // context. | 690 // context. |
| 711 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); | 691 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); |
| 712 if (FLAG_debug_code) { | 692 if (FLAG_debug_code) { |
| 713 // Check that we're not inside a with or catch context. | 693 // Check that we're not inside a with or catch context. |
| 714 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset)); | 694 __ LoadP(r3, FieldMemOperand(cp, HeapObject::kMapOffset)); |
| 715 __ CompareRoot(r4, Heap::kWithContextMapRootIndex); | 695 __ CompareRoot(r3, Heap::kWithContextMapRootIndex); |
| 716 __ Check(ne, kDeclarationInWithContext); | 696 __ Check(ne, kDeclarationInWithContext); |
| 717 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex); | 697 __ CompareRoot(r3, Heap::kCatchContextMapRootIndex); |
| 718 __ Check(ne, kDeclarationInCatchContext); | 698 __ Check(ne, kDeclarationInCatchContext); |
| 719 } | 699 } |
| 720 } | 700 } |
| 721 | 701 |
| 722 | |
| 723 void FullCodeGenerator::VisitVariableDeclaration( | 702 void FullCodeGenerator::VisitVariableDeclaration( |
| 724 VariableDeclaration* declaration) { | 703 VariableDeclaration* declaration) { |
| 725 // If it was not possible to allocate the variable at compile time, we | 704 // If it was not possible to allocate the variable at compile time, we |
| 726 // need to "declare" it at runtime to make sure it actually exists in the | 705 // need to "declare" it at runtime to make sure it actually exists in the |
| 727 // local context. | 706 // local context. |
| 728 VariableProxy* proxy = declaration->proxy(); | 707 VariableProxy* proxy = declaration->proxy(); |
| 729 VariableMode mode = declaration->mode(); | 708 VariableMode mode = declaration->mode(); |
| 730 Variable* variable = proxy->var(); | 709 Variable* variable = proxy->var(); |
| 731 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; | 710 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; |
| 732 switch (variable->location()) { | 711 switch (variable->location()) { |
| (...skipping 13 matching lines...) Expand all Loading... |
| 746 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 725 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 747 __ StoreP(ip, StackOperand(variable)); | 726 __ StoreP(ip, StackOperand(variable)); |
| 748 } | 727 } |
| 749 break; | 728 break; |
| 750 | 729 |
| 751 case VariableLocation::CONTEXT: | 730 case VariableLocation::CONTEXT: |
| 752 if (hole_init) { | 731 if (hole_init) { |
| 753 Comment cmnt(masm_, "[ VariableDeclaration"); | 732 Comment cmnt(masm_, "[ VariableDeclaration"); |
| 754 EmitDebugCheckDeclarationContext(variable); | 733 EmitDebugCheckDeclarationContext(variable); |
| 755 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 734 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 756 __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0); | 735 __ StoreP(ip, ContextMemOperand(cp, variable->index())); |
| 757 // No write barrier since the_hole_value is in old space. | 736 // No write barrier since the_hole_value is in old space. |
| 758 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 737 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
| 759 } | 738 } |
| 760 break; | 739 break; |
| 761 | 740 |
| 762 case VariableLocation::LOOKUP: { | 741 case VariableLocation::LOOKUP: { |
| 763 Comment cmnt(masm_, "[ VariableDeclaration"); | 742 Comment cmnt(masm_, "[ VariableDeclaration"); |
| 764 __ mov(r5, Operand(variable->name())); | 743 __ mov(r4, Operand(variable->name())); |
| 765 // Declaration nodes are always introduced in one of four modes. | 744 // Declaration nodes are always introduced in one of four modes. |
| 766 DCHECK(IsDeclaredVariableMode(mode)); | 745 DCHECK(IsDeclaredVariableMode(mode)); |
| 767 // Push initial value, if any. | 746 // Push initial value, if any. |
| 768 // Note: For variables we must not push an initial value (such as | 747 // Note: For variables we must not push an initial value (such as |
| 769 // 'undefined') because we may have a (legal) redeclaration and we | 748 // 'undefined') because we may have a (legal) redeclaration and we |
| 770 // must not destroy the current value. | 749 // must not destroy the current value. |
| 771 if (hole_init) { | 750 if (hole_init) { |
| 772 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); | 751 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex); |
| 773 } else { | 752 } else { |
| 774 __ LoadSmiLiteral(r3, Smi::FromInt(0)); // Indicates no initial value. | 753 __ LoadSmiLiteral(r2, Smi::FromInt(0)); // Indicates no initial value. |
| 775 } | 754 } |
| 776 __ Push(r5, r3); | 755 __ Push(r4, r2); |
| 777 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); | 756 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); |
| 778 __ CallRuntime(Runtime::kDeclareLookupSlot); | 757 __ CallRuntime(Runtime::kDeclareLookupSlot); |
| 779 break; | 758 break; |
| 780 } | 759 } |
| 781 } | 760 } |
| 782 } | 761 } |
| 783 | 762 |
| 784 | |
| 785 void FullCodeGenerator::VisitFunctionDeclaration( | 763 void FullCodeGenerator::VisitFunctionDeclaration( |
| 786 FunctionDeclaration* declaration) { | 764 FunctionDeclaration* declaration) { |
| 787 VariableProxy* proxy = declaration->proxy(); | 765 VariableProxy* proxy = declaration->proxy(); |
| 788 Variable* variable = proxy->var(); | 766 Variable* variable = proxy->var(); |
| 789 switch (variable->location()) { | 767 switch (variable->location()) { |
| 790 case VariableLocation::GLOBAL: | 768 case VariableLocation::GLOBAL: |
| 791 case VariableLocation::UNALLOCATED: { | 769 case VariableLocation::UNALLOCATED: { |
| 792 globals_->Add(variable->name(), zone()); | 770 globals_->Add(variable->name(), zone()); |
| 793 Handle<SharedFunctionInfo> function = | 771 Handle<SharedFunctionInfo> function = |
| 794 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); | 772 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); |
| 795 // Check for stack-overflow exception. | 773 // Check for stack-overflow exception. |
| 796 if (function.is_null()) return SetStackOverflow(); | 774 if (function.is_null()) return SetStackOverflow(); |
| 797 globals_->Add(function, zone()); | 775 globals_->Add(function, zone()); |
| 798 break; | 776 break; |
| 799 } | 777 } |
| 800 | 778 |
| 801 case VariableLocation::PARAMETER: | 779 case VariableLocation::PARAMETER: |
| 802 case VariableLocation::LOCAL: { | 780 case VariableLocation::LOCAL: { |
| 803 Comment cmnt(masm_, "[ FunctionDeclaration"); | 781 Comment cmnt(masm_, "[ FunctionDeclaration"); |
| 804 VisitForAccumulatorValue(declaration->fun()); | 782 VisitForAccumulatorValue(declaration->fun()); |
| 805 __ StoreP(result_register(), StackOperand(variable)); | 783 __ StoreP(result_register(), StackOperand(variable)); |
| 806 break; | 784 break; |
| 807 } | 785 } |
| 808 | 786 |
| 809 case VariableLocation::CONTEXT: { | 787 case VariableLocation::CONTEXT: { |
| 810 Comment cmnt(masm_, "[ FunctionDeclaration"); | 788 Comment cmnt(masm_, "[ FunctionDeclaration"); |
| 811 EmitDebugCheckDeclarationContext(variable); | 789 EmitDebugCheckDeclarationContext(variable); |
| 812 VisitForAccumulatorValue(declaration->fun()); | 790 VisitForAccumulatorValue(declaration->fun()); |
| 813 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()), | 791 __ StoreP(result_register(), ContextMemOperand(cp, variable->index())); |
| 814 r0); | |
| 815 int offset = Context::SlotOffset(variable->index()); | 792 int offset = Context::SlotOffset(variable->index()); |
| 816 // We know that we have written a function, which is not a smi. | 793 // We know that we have written a function, which is not a smi. |
| 817 __ RecordWriteContextSlot(cp, offset, result_register(), r5, | 794 __ RecordWriteContextSlot(cp, offset, result_register(), r4, |
| 818 kLRHasBeenSaved, kDontSaveFPRegs, | 795 kLRHasBeenSaved, kDontSaveFPRegs, |
| 819 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 796 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 820 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); | 797 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); |
| 821 break; | 798 break; |
| 822 } | 799 } |
| 823 | 800 |
| 824 case VariableLocation::LOOKUP: { | 801 case VariableLocation::LOOKUP: { |
| 825 Comment cmnt(masm_, "[ FunctionDeclaration"); | 802 Comment cmnt(masm_, "[ FunctionDeclaration"); |
| 826 __ mov(r5, Operand(variable->name())); | 803 __ mov(r4, Operand(variable->name())); |
| 827 PushOperand(r5); | 804 PushOperand(r4); |
| 828 // Push initial value for function declaration. | 805 // Push initial value for function declaration. |
| 829 VisitForStackValue(declaration->fun()); | 806 VisitForStackValue(declaration->fun()); |
| 830 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes())); | 807 PushOperand(Smi::FromInt(variable->DeclarationPropertyAttributes())); |
| 831 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot); | 808 CallRuntimeWithOperands(Runtime::kDeclareLookupSlot); |
| 832 break; | 809 break; |
| 833 } | 810 } |
| 834 } | 811 } |
| 835 } | 812 } |
| 836 | 813 |
| 837 | |
| 838 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { | 814 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { |
| 839 // Call the runtime to declare the globals. | 815 // Call the runtime to declare the globals. |
| 840 __ mov(r4, Operand(pairs)); | 816 __ mov(r3, Operand(pairs)); |
| 841 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags())); | 817 __ LoadSmiLiteral(r2, Smi::FromInt(DeclareGlobalsFlags())); |
| 842 __ Push(r4, r3); | 818 __ Push(r3, r2); |
| 843 __ CallRuntime(Runtime::kDeclareGlobals); | 819 __ CallRuntime(Runtime::kDeclareGlobals); |
| 844 // Return value is ignored. | 820 // Return value is ignored. |
| 845 } | 821 } |
| 846 | 822 |
| 847 | |
| 848 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { | 823 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { |
| 849 // Call the runtime to declare the modules. | 824 // Call the runtime to declare the modules. |
| 850 __ Push(descriptions); | 825 __ Push(descriptions); |
| 851 __ CallRuntime(Runtime::kDeclareModules); | 826 __ CallRuntime(Runtime::kDeclareModules); |
| 852 // Return value is ignored. | 827 // Return value is ignored. |
| 853 } | 828 } |
| 854 | 829 |
| 855 | |
| 856 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { | 830 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
| 857 Comment cmnt(masm_, "[ SwitchStatement"); | 831 Comment cmnt(masm_, "[ SwitchStatement"); |
| 858 Breakable nested_statement(this, stmt); | 832 Breakable nested_statement(this, stmt); |
| 859 SetStatementPosition(stmt); | 833 SetStatementPosition(stmt); |
| 860 | 834 |
| 861 // Keep the switch value on the stack until a case matches. | 835 // Keep the switch value on the stack until a case matches. |
| 862 VisitForStackValue(stmt->tag()); | 836 VisitForStackValue(stmt->tag()); |
| 863 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); | 837 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); |
| 864 | 838 |
| 865 ZoneList<CaseClause*>* clauses = stmt->cases(); | 839 ZoneList<CaseClause*>* clauses = stmt->cases(); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 878 } | 852 } |
| 879 | 853 |
| 880 Comment cmnt(masm_, "[ Case comparison"); | 854 Comment cmnt(masm_, "[ Case comparison"); |
| 881 __ bind(&next_test); | 855 __ bind(&next_test); |
| 882 next_test.Unuse(); | 856 next_test.Unuse(); |
| 883 | 857 |
| 884 // Compile the label expression. | 858 // Compile the label expression. |
| 885 VisitForAccumulatorValue(clause->label()); | 859 VisitForAccumulatorValue(clause->label()); |
| 886 | 860 |
| 887 // Perform the comparison as if via '==='. | 861 // Perform the comparison as if via '==='. |
| 888 __ LoadP(r4, MemOperand(sp, 0)); // Switch value. | 862 __ LoadP(r3, MemOperand(sp, 0)); // Switch value. |
| 889 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | 863 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
| 890 JumpPatchSite patch_site(masm_); | 864 JumpPatchSite patch_site(masm_); |
| 891 if (inline_smi_code) { | 865 if (inline_smi_code) { |
| 892 Label slow_case; | 866 Label slow_case; |
| 893 __ orx(r5, r4, r3); | 867 __ LoadRR(r4, r2); |
| 894 patch_site.EmitJumpIfNotSmi(r5, &slow_case); | 868 __ OrP(r4, r3); |
| 869 patch_site.EmitJumpIfNotSmi(r4, &slow_case); |
| 895 | 870 |
| 896 __ cmp(r4, r3); | 871 __ CmpP(r3, r2); |
| 897 __ bne(&next_test); | 872 __ bne(&next_test); |
| 898 __ Drop(1); // Switch value is no longer needed. | 873 __ Drop(1); // Switch value is no longer needed. |
| 899 __ b(clause->body_target()); | 874 __ b(clause->body_target()); |
| 900 __ bind(&slow_case); | 875 __ bind(&slow_case); |
| 901 } | 876 } |
| 902 | 877 |
| 903 // Record position before stub call for type feedback. | 878 // Record position before stub call for type feedback. |
| 904 SetExpressionPosition(clause); | 879 SetExpressionPosition(clause); |
| 905 Handle<Code> ic = | 880 Handle<Code> ic = |
| 906 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); | 881 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); |
| 907 CallIC(ic, clause->CompareId()); | 882 CallIC(ic, clause->CompareId()); |
| 908 patch_site.EmitPatchInfo(); | 883 patch_site.EmitPatchInfo(); |
| 909 | 884 |
| 910 Label skip; | 885 Label skip; |
| 911 __ b(&skip); | 886 __ b(&skip); |
| 912 PrepareForBailout(clause, TOS_REG); | 887 PrepareForBailout(clause, TOS_REG); |
| 913 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 888 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 914 __ cmp(r3, ip); | |
| 915 __ bne(&next_test); | 889 __ bne(&next_test); |
| 916 __ Drop(1); | 890 __ Drop(1); |
| 917 __ b(clause->body_target()); | 891 __ b(clause->body_target()); |
| 918 __ bind(&skip); | 892 __ bind(&skip); |
| 919 | 893 |
| 920 __ cmpi(r3, Operand::Zero()); | 894 __ CmpP(r2, Operand::Zero()); |
| 921 __ bne(&next_test); | 895 __ bne(&next_test); |
| 922 __ Drop(1); // Switch value is no longer needed. | 896 __ Drop(1); // Switch value is no longer needed. |
| 923 __ b(clause->body_target()); | 897 __ b(clause->body_target()); |
| 924 } | 898 } |
| 925 | 899 |
| 926 // Discard the test value and jump to the default if present, otherwise to | 900 // Discard the test value and jump to the default if present, otherwise to |
| 927 // the end of the statement. | 901 // the end of the statement. |
| 928 __ bind(&next_test); | 902 __ bind(&next_test); |
| 929 DropOperands(1); // Switch value is no longer needed. | 903 DropOperands(1); // Switch value is no longer needed. |
| 930 if (default_clause == NULL) { | 904 if (default_clause == NULL) { |
| 931 __ b(nested_statement.break_label()); | 905 __ b(nested_statement.break_label()); |
| 932 } else { | 906 } else { |
| 933 __ b(default_clause->body_target()); | 907 __ b(default_clause->body_target()); |
| 934 } | 908 } |
| 935 | 909 |
| 936 // Compile all the case bodies. | 910 // Compile all the case bodies. |
| 937 for (int i = 0; i < clauses->length(); i++) { | 911 for (int i = 0; i < clauses->length(); i++) { |
| 938 Comment cmnt(masm_, "[ Case body"); | 912 Comment cmnt(masm_, "[ Case body"); |
| 939 CaseClause* clause = clauses->at(i); | 913 CaseClause* clause = clauses->at(i); |
| 940 __ bind(clause->body_target()); | 914 __ bind(clause->body_target()); |
| 941 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); | 915 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); |
| 942 VisitStatements(clause->statements()); | 916 VisitStatements(clause->statements()); |
| 943 } | 917 } |
| 944 | 918 |
| 945 __ bind(nested_statement.break_label()); | 919 __ bind(nested_statement.break_label()); |
| 946 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | 920 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
| 947 } | 921 } |
| 948 | 922 |
| 949 | |
| 950 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { | 923 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { |
| 951 Comment cmnt(masm_, "[ ForInStatement"); | 924 Comment cmnt(masm_, "[ ForInStatement"); |
| 952 SetStatementPosition(stmt, SKIP_BREAK); | 925 SetStatementPosition(stmt, SKIP_BREAK); |
| 953 | 926 |
| 954 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); | 927 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); |
| 955 | 928 |
| 956 // Get the object to enumerate over. | 929 // Get the object to enumerate over. |
| 957 SetExpressionAsStatementPosition(stmt->enumerable()); | 930 SetExpressionAsStatementPosition(stmt->enumerable()); |
| 958 VisitForAccumulatorValue(stmt->enumerable()); | 931 VisitForAccumulatorValue(stmt->enumerable()); |
| 959 OperandStackDepthIncrement(5); | 932 OperandStackDepthIncrement(5); |
| 960 | 933 |
| 961 Label loop, exit; | 934 Label loop, exit; |
| 962 Iteration loop_statement(this, stmt); | 935 Iteration loop_statement(this, stmt); |
| 963 increment_loop_depth(); | 936 increment_loop_depth(); |
| 964 | 937 |
| 965 // If the object is null or undefined, skip over the loop, otherwise convert | 938 // If the object is null or undefined, skip over the loop, otherwise convert |
| 966 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. | 939 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. |
| 967 Label convert, done_convert; | 940 Label convert, done_convert; |
| 968 __ JumpIfSmi(r3, &convert); | 941 __ JumpIfSmi(r2, &convert); |
| 969 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); | 942 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE); |
| 970 __ bge(&done_convert); | 943 __ bge(&done_convert); |
| 971 __ CompareRoot(r3, Heap::kNullValueRootIndex); | 944 __ CompareRoot(r2, Heap::kNullValueRootIndex); |
| 972 __ beq(&exit); | 945 __ beq(&exit); |
| 973 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); | 946 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); |
| 974 __ beq(&exit); | 947 __ beq(&exit); |
| 975 __ bind(&convert); | 948 __ bind(&convert); |
| 976 ToObjectStub stub(isolate()); | 949 ToObjectStub stub(isolate()); |
| 977 __ CallStub(&stub); | 950 __ CallStub(&stub); |
| 978 __ bind(&done_convert); | 951 __ bind(&done_convert); |
| 979 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); | 952 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); |
| 980 __ push(r3); | 953 __ push(r2); |
| 981 | 954 |
| 982 // Check cache validity in generated code. This is a fast case for | 955 // Check cache validity in generated code. This is a fast case for |
| 983 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | 956 // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
| 984 // guarantee cache validity, call the runtime system to check cache | 957 // guarantee cache validity, call the runtime system to check cache |
| 985 // validity or get the property names in a fixed array. | 958 // validity or get the property names in a fixed array. |
| 986 // Note: Proxies never have an enum cache, so will always take the | 959 // Note: Proxies never have an enum cache, so will always take the |
| 987 // slow path. | 960 // slow path. |
| 988 Label call_runtime; | 961 Label call_runtime; |
| 989 __ CheckEnumCache(&call_runtime); | 962 __ CheckEnumCache(&call_runtime); |
| 990 | 963 |
| 991 // The enum cache is valid. Load the map of the object being | 964 // The enum cache is valid. Load the map of the object being |
| 992 // iterated over and use the cache for the iteration. | 965 // iterated over and use the cache for the iteration. |
| 993 Label use_cache; | 966 Label use_cache; |
| 994 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 967 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 995 __ b(&use_cache); | 968 __ b(&use_cache); |
| 996 | 969 |
| 997 // Get the set of properties to enumerate. | 970 // Get the set of properties to enumerate. |
| 998 __ bind(&call_runtime); | 971 __ bind(&call_runtime); |
| 999 __ push(r3); // Duplicate the enumerable object on the stack. | 972 __ push(r2); // Duplicate the enumerable object on the stack. |
| 1000 __ CallRuntime(Runtime::kForInEnumerate); | 973 __ CallRuntime(Runtime::kForInEnumerate); |
| 1001 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); | 974 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); |
| 1002 | 975 |
| 1003 // If we got a map from the runtime call, we can do a fast | 976 // If we got a map from the runtime call, we can do a fast |
| 1004 // modification check. Otherwise, we got a fixed array, and we have | 977 // modification check. Otherwise, we got a fixed array, and we have |
| 1005 // to do a slow check. | 978 // to do a slow check. |
| 1006 Label fixed_array; | 979 Label fixed_array; |
| 1007 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); | 980 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 1008 __ LoadRoot(ip, Heap::kMetaMapRootIndex); | 981 __ CompareRoot(r4, Heap::kMetaMapRootIndex); |
| 1009 __ cmp(r5, ip); | |
| 1010 __ bne(&fixed_array); | 982 __ bne(&fixed_array); |
| 1011 | 983 |
| 1012 // We got a map in register r3. Get the enumeration cache from it. | 984 // We got a map in register r2. Get the enumeration cache from it. |
| 1013 Label no_descriptors; | 985 Label no_descriptors; |
| 1014 __ bind(&use_cache); | 986 __ bind(&use_cache); |
| 1015 | 987 |
| 1016 __ EnumLength(r4, r3); | 988 __ EnumLength(r3, r2); |
| 1017 __ CmpSmiLiteral(r4, Smi::FromInt(0), r0); | 989 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0); |
| 1018 __ beq(&no_descriptors); | 990 __ beq(&no_descriptors, Label::kNear); |
| 1019 | 991 |
| 1020 __ LoadInstanceDescriptors(r3, r5); | 992 __ LoadInstanceDescriptors(r2, r4); |
| 1021 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset)); | 993 __ LoadP(r4, FieldMemOperand(r4, DescriptorArray::kEnumCacheOffset)); |
| 1022 __ LoadP(r5, | 994 __ LoadP(r4, |
| 1023 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 995 FieldMemOperand(r4, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 1024 | 996 |
| 1025 // Set up the four remaining stack slots. | 997 // Set up the four remaining stack slots. |
| 1026 __ push(r3); // Map. | 998 __ push(r2); // Map. |
| 1027 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | 999 __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
| 1028 // Push enumeration cache, enumeration cache length (as smi) and zero. | 1000 // Push enumeration cache, enumeration cache length (as smi) and zero. |
| 1029 __ Push(r5, r4, r3); | 1001 __ Push(r4, r3, r2); |
| 1030 __ b(&loop); | 1002 __ b(&loop); |
| 1031 | 1003 |
| 1032 __ bind(&no_descriptors); | 1004 __ bind(&no_descriptors); |
| 1033 __ Drop(1); | 1005 __ Drop(1); |
| 1034 __ b(&exit); | 1006 __ b(&exit); |
| 1035 | 1007 |
| 1036 // We got a fixed array in register r3. Iterate through that. | 1008 // We got a fixed array in register r2. Iterate through that. |
| 1037 __ bind(&fixed_array); | 1009 __ bind(&fixed_array); |
| 1038 | 1010 |
| 1039 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi(1) indicates slow check | 1011 int const vector_index = SmiFromSlot(slot)->value(); |
| 1040 __ Push(r4, r3); // Smi and array | 1012 __ EmitLoadTypeFeedbackVector(r3); |
| 1041 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset)); | 1013 __ mov(r4, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); |
| 1042 __ Push(r4); // Fixed array length (as smi). | 1014 __ StoreP( |
| 1015 r4, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0); |
| 1016 __ LoadSmiLiteral(r3, Smi::FromInt(1)); // Smi(1) indicates slow check |
| 1017 __ Push(r3, r2); // Smi and array |
| 1018 __ LoadP(r3, FieldMemOperand(r2, FixedArray::kLengthOffset)); |
| 1019 __ Push(r3); // Fixed array length (as smi). |
| 1043 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS); | 1020 PrepareForBailoutForId(stmt->PrepareId(), NO_REGISTERS); |
| 1044 __ LoadSmiLiteral(r3, Smi::FromInt(0)); | 1021 __ LoadSmiLiteral(r2, Smi::FromInt(0)); |
| 1045 __ Push(r3); // Initial index. | 1022 __ Push(r2); // Initial index. |
| 1046 | 1023 |
| 1047 // Generate code for doing the condition check. | 1024 // Generate code for doing the condition check. |
| 1048 __ bind(&loop); | 1025 __ bind(&loop); |
| 1049 SetExpressionAsStatementPosition(stmt->each()); | 1026 SetExpressionAsStatementPosition(stmt->each()); |
| 1050 | 1027 |
| 1051 // Load the current count to r3, load the length to r4. | 1028 // Load the current count to r2, load the length to r3. |
| 1052 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize)); | 1029 __ LoadP(r2, MemOperand(sp, 0 * kPointerSize)); |
| 1053 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize)); | 1030 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); |
| 1054 __ cmpl(r3, r4); // Compare to the array length. | 1031 __ CmpLogicalP(r2, r3); // Compare to the array length. |
| 1055 __ bge(loop_statement.break_label()); | 1032 __ bge(loop_statement.break_label()); |
| 1056 | 1033 |
| 1057 // Get the current entry of the array into register r6. | 1034 // Get the current entry of the array into register r5. |
| 1058 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize)); | 1035 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize)); |
| 1059 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1036 __ AddP(r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 1060 __ SmiToPtrArrayOffset(r6, r3); | 1037 __ SmiToPtrArrayOffset(r5, r2); |
| 1061 __ LoadPX(r6, MemOperand(r6, r5)); | 1038 __ LoadP(r5, MemOperand(r5, r4)); |
| 1062 | 1039 |
| 1063 // Get the expected map from the stack or a smi in the | 1040 // Get the expected map from the stack or a smi in the |
| 1064 // permanent slow case into register r5. | 1041 // permanent slow case into register r4. |
| 1065 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize)); | 1042 __ LoadP(r4, MemOperand(sp, 3 * kPointerSize)); |
| 1066 | 1043 |
| 1067 // Check if the expected map still matches that of the enumerable. | 1044 // Check if the expected map still matches that of the enumerable. |
| 1068 // If not, we may have to filter the key. | 1045 // If not, we may have to filter the key. |
| 1069 Label update_each; | 1046 Label update_each; |
| 1070 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize)); | 1047 __ LoadP(r3, MemOperand(sp, 4 * kPointerSize)); |
| 1071 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); | 1048 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset)); |
| 1072 __ cmp(r7, r5); | 1049 __ CmpP(r6, r4); |
| 1073 __ beq(&update_each); | 1050 __ beq(&update_each); |
| 1074 | 1051 |
| 1075 // We need to filter the key, record slow-path here. | 1052 // We might get here from TurboFan or Crankshaft when something in the |
| 1076 int const vector_index = SmiFromSlot(slot)->value(); | 1053 // for-in loop body deopts and only now notice in fullcodegen, that we |
| 1077 __ EmitLoadTypeFeedbackVector(r3); | 1054 // can now longer use the enum cache, i.e. left fast mode. So better record |
| 1078 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); | 1055 // this information here, in case we later OSR back into this loop or |
| 1056 // reoptimize the whole function w/o rerunning the loop with the slow |
| 1057 // mode object in fullcodegen (which would result in a deopt loop). |
| 1058 __ EmitLoadTypeFeedbackVector(r2); |
| 1059 __ mov(r4, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate()))); |
| 1079 __ StoreP( | 1060 __ StoreP( |
| 1080 r5, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0); | 1061 r4, FieldMemOperand(r2, FixedArray::OffsetOfElementAt(vector_index)), r0); |
| 1081 | 1062 |
| 1082 // Convert the entry to a string or (smi) 0 if it isn't a property | 1063 // Convert the entry to a string or (smi) 0 if it isn't a property |
| 1083 // any more. If the property has been removed while iterating, we | 1064 // any more. If the property has been removed while iterating, we |
| 1084 // just skip it. | 1065 // just skip it. |
| 1085 __ Push(r4, r6); // Enumerable and current entry. | 1066 __ Push(r3, r5); // Enumerable and current entry. |
| 1086 __ CallRuntime(Runtime::kForInFilter); | 1067 __ CallRuntime(Runtime::kForInFilter); |
| 1087 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); | 1068 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); |
| 1088 __ mr(r6, r3); | 1069 __ LoadRR(r5, r2); |
| 1089 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 1070 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 1090 __ cmp(r3, r0); | 1071 __ CmpP(r2, r0); |
| 1091 __ beq(loop_statement.continue_label()); | 1072 __ beq(loop_statement.continue_label()); |
| 1092 | 1073 |
| 1093 // Update the 'each' property or variable from the possibly filtered | 1074 // Update the 'each' property or variable from the possibly filtered |
| 1094 // entry in register r6. | 1075 // entry in register r5. |
| 1095 __ bind(&update_each); | 1076 __ bind(&update_each); |
| 1096 __ mr(result_register(), r6); | 1077 __ LoadRR(result_register(), r5); |
| 1097 // Perform the assignment as if via '='. | 1078 // Perform the assignment as if via '='. |
| 1098 { | 1079 { |
| 1099 EffectContext context(this); | 1080 EffectContext context(this); |
| 1100 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); | 1081 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); |
| 1101 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); | 1082 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); |
| 1102 } | 1083 } |
| 1103 | 1084 |
| 1104 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). | 1085 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). |
| 1105 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | 1086 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); |
| 1106 // Generate code for the body of the loop. | 1087 // Generate code for the body of the loop. |
| 1107 Visit(stmt->body()); | 1088 Visit(stmt->body()); |
| 1108 | 1089 |
| 1109 // Generate code for the going to the next element by incrementing | 1090 // Generate code for the going to the next element by incrementing |
| 1110 // the index (smi) stored on top of the stack. | 1091 // the index (smi) stored on top of the stack. |
| 1111 __ bind(loop_statement.continue_label()); | 1092 __ bind(loop_statement.continue_label()); |
| 1112 __ pop(r3); | 1093 __ pop(r2); |
| 1113 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0); | 1094 __ AddSmiLiteral(r2, r2, Smi::FromInt(1), r0); |
| 1114 __ push(r3); | 1095 __ push(r2); |
| 1115 | 1096 |
| 1116 EmitBackEdgeBookkeeping(stmt, &loop); | 1097 EmitBackEdgeBookkeeping(stmt, &loop); |
| 1117 __ b(&loop); | 1098 __ b(&loop); |
| 1118 | 1099 |
| 1119 // Remove the pointers stored on the stack. | 1100 // Remove the pointers stored on the stack. |
| 1120 __ bind(loop_statement.break_label()); | 1101 __ bind(loop_statement.break_label()); |
| 1121 DropOperands(5); | 1102 DropOperands(5); |
| 1122 | 1103 |
| 1123 // Exit and decrement the loop depth. | 1104 // Exit and decrement the loop depth. |
| 1124 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); | 1105 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); |
| 1125 __ bind(&exit); | 1106 __ bind(&exit); |
| 1126 decrement_loop_depth(); | 1107 decrement_loop_depth(); |
| 1127 } | 1108 } |
| 1128 | 1109 |
| 1129 | |
| 1130 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, | 1110 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, |
| 1131 FeedbackVectorSlot slot) { | 1111 FeedbackVectorSlot slot) { |
| 1132 DCHECK(NeedsHomeObject(initializer)); | 1112 DCHECK(NeedsHomeObject(initializer)); |
| 1133 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); | 1113 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); |
| 1134 __ mov(StoreDescriptor::NameRegister(), | 1114 __ mov(StoreDescriptor::NameRegister(), |
| 1135 Operand(isolate()->factory()->home_object_symbol())); | 1115 Operand(isolate()->factory()->home_object_symbol())); |
| 1136 __ LoadP(StoreDescriptor::ValueRegister(), | 1116 __ LoadP(StoreDescriptor::ValueRegister(), |
| 1137 MemOperand(sp, offset * kPointerSize)); | 1117 MemOperand(sp, offset * kPointerSize)); |
| 1138 EmitLoadStoreICSlot(slot); | 1118 EmitLoadStoreICSlot(slot); |
| 1139 CallStoreIC(); | 1119 CallStoreIC(); |
| 1140 } | 1120 } |
| 1141 | 1121 |
| 1142 | |
| 1143 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, | 1122 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, |
| 1144 int offset, | 1123 int offset, |
| 1145 FeedbackVectorSlot slot) { | 1124 FeedbackVectorSlot slot) { |
| 1146 DCHECK(NeedsHomeObject(initializer)); | 1125 DCHECK(NeedsHomeObject(initializer)); |
| 1147 __ Move(StoreDescriptor::ReceiverRegister(), r3); | 1126 __ Move(StoreDescriptor::ReceiverRegister(), r2); |
| 1148 __ mov(StoreDescriptor::NameRegister(), | 1127 __ mov(StoreDescriptor::NameRegister(), |
| 1149 Operand(isolate()->factory()->home_object_symbol())); | 1128 Operand(isolate()->factory()->home_object_symbol())); |
| 1150 __ LoadP(StoreDescriptor::ValueRegister(), | 1129 __ LoadP(StoreDescriptor::ValueRegister(), |
| 1151 MemOperand(sp, offset * kPointerSize)); | 1130 MemOperand(sp, offset * kPointerSize)); |
| 1152 EmitLoadStoreICSlot(slot); | 1131 EmitLoadStoreICSlot(slot); |
| 1153 CallStoreIC(); | 1132 CallStoreIC(); |
| 1154 } | 1133 } |
| 1155 | 1134 |
| 1156 | |
| 1157 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, | 1135 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, |
| 1158 TypeofMode typeof_mode, | 1136 TypeofMode typeof_mode, |
| 1159 Label* slow) { | 1137 Label* slow) { |
| 1160 Register current = cp; | 1138 Register current = cp; |
| 1161 Register next = r4; | 1139 Register next = r3; |
| 1162 Register temp = r5; | 1140 Register temp = r4; |
| 1163 | 1141 |
| 1164 Scope* s = scope(); | 1142 Scope* s = scope(); |
| 1165 while (s != NULL) { | 1143 while (s != NULL) { |
| 1166 if (s->num_heap_slots() > 0) { | 1144 if (s->num_heap_slots() > 0) { |
| 1167 if (s->calls_sloppy_eval()) { | 1145 if (s->calls_sloppy_eval()) { |
| 1168 // Check that extension is "the hole". | 1146 // Check that extension is "the hole". |
| 1169 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); | 1147 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX)); |
| 1170 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); | 1148 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
| 1171 } | 1149 } |
| 1172 // Load next context in chain. | 1150 // Load next context in chain. |
| 1173 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); | 1151 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX)); |
| 1174 // Walk the rest of the chain without clobbering cp. | 1152 // Walk the rest of the chain without clobbering cp. |
| 1175 current = next; | 1153 current = next; |
| 1176 } | 1154 } |
| 1177 // If no outer scope calls eval, we do not need to check more | 1155 // If no outer scope calls eval, we do not need to check more |
| 1178 // context extensions. | 1156 // context extensions. |
| 1179 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; | 1157 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; |
| 1180 s = s->outer_scope(); | 1158 s = s->outer_scope(); |
| 1181 } | 1159 } |
| 1182 | 1160 |
| 1183 if (s->is_eval_scope()) { | 1161 if (s->is_eval_scope()) { |
| 1184 Label loop, fast; | 1162 Label loop, fast; |
| 1185 if (!current.is(next)) { | 1163 if (!current.is(next)) { |
| 1186 __ Move(next, current); | 1164 __ Move(next, current); |
| 1187 } | 1165 } |
| 1188 __ bind(&loop); | 1166 __ bind(&loop); |
| 1189 // Terminate at native context. | 1167 // Terminate at native context. |
| 1190 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | 1168 __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset)); |
| 1191 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); | 1169 __ CompareRoot(temp, Heap::kNativeContextMapRootIndex); |
| 1192 __ cmp(temp, ip); | 1170 __ beq(&fast, Label::kNear); |
| 1193 __ beq(&fast); | |
| 1194 // Check that extension is "the hole". | 1171 // Check that extension is "the hole". |
| 1195 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); | 1172 __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX)); |
| 1196 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); | 1173 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
| 1197 // Load next context in chain. | 1174 // Load next context in chain. |
| 1198 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); | 1175 __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX)); |
| 1199 __ b(&loop); | 1176 __ b(&loop); |
| 1200 __ bind(&fast); | 1177 __ bind(&fast); |
| 1201 } | 1178 } |
| 1202 | 1179 |
| 1203 // All extension objects were empty and it is safe to use a normal global | 1180 // All extension objects were empty and it is safe to use a normal global |
| 1204 // load machinery. | 1181 // load machinery. |
| 1205 EmitGlobalVariableLoad(proxy, typeof_mode); | 1182 EmitGlobalVariableLoad(proxy, typeof_mode); |
| 1206 } | 1183 } |
| 1207 | 1184 |
| 1208 | |
| 1209 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, | 1185 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, |
| 1210 Label* slow) { | 1186 Label* slow) { |
| 1211 DCHECK(var->IsContextSlot()); | 1187 DCHECK(var->IsContextSlot()); |
| 1212 Register context = cp; | 1188 Register context = cp; |
| 1213 Register next = r6; | 1189 Register next = r5; |
| 1214 Register temp = r7; | 1190 Register temp = r6; |
| 1215 | 1191 |
| 1216 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { | 1192 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { |
| 1217 if (s->num_heap_slots() > 0) { | 1193 if (s->num_heap_slots() > 0) { |
| 1218 if (s->calls_sloppy_eval()) { | 1194 if (s->calls_sloppy_eval()) { |
| 1219 // Check that extension is "the hole". | 1195 // Check that extension is "the hole". |
| 1220 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | 1196 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); |
| 1221 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); | 1197 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
| 1222 } | 1198 } |
| 1223 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); | 1199 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX)); |
| 1224 // Walk the rest of the chain without clobbering cp. | 1200 // Walk the rest of the chain without clobbering cp. |
| 1225 context = next; | 1201 context = next; |
| 1226 } | 1202 } |
| 1227 } | 1203 } |
| 1228 // Check that last extension is "the hole". | 1204 // Check that last extension is "the hole". |
| 1229 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); | 1205 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX)); |
| 1230 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); | 1206 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow); |
| 1231 | 1207 |
| 1232 // This function is used only for loads, not stores, so it's safe to | 1208 // This function is used only for loads, not stores, so it's safe to |
| 1233 // return an cp-based operand (the write barrier cannot be allowed to | 1209 // return an cp-based operand (the write barrier cannot be allowed to |
| 1234 // destroy the cp register). | 1210 // destroy the cp register). |
| 1235 return ContextMemOperand(context, var->index()); | 1211 return ContextMemOperand(context, var->index()); |
| 1236 } | 1212 } |
| 1237 | 1213 |
| 1238 | |
| 1239 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, | 1214 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, |
| 1240 TypeofMode typeof_mode, | 1215 TypeofMode typeof_mode, |
| 1241 Label* slow, Label* done) { | 1216 Label* slow, Label* done) { |
| 1242 // Generate fast-case code for variables that might be shadowed by | 1217 // Generate fast-case code for variables that might be shadowed by |
| 1243 // eval-introduced variables. Eval is used a lot without | 1218 // eval-introduced variables. Eval is used a lot without |
| 1244 // introducing variables. In those cases, we do not want to | 1219 // introducing variables. In those cases, we do not want to |
| 1245 // perform a runtime call for all variables in the scope | 1220 // perform a runtime call for all variables in the scope |
| 1246 // containing the eval. | 1221 // containing the eval. |
| 1247 Variable* var = proxy->var(); | 1222 Variable* var = proxy->var(); |
| 1248 if (var->mode() == DYNAMIC_GLOBAL) { | 1223 if (var->mode() == DYNAMIC_GLOBAL) { |
| 1249 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); | 1224 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); |
| 1250 __ b(done); | 1225 __ b(done); |
| 1251 } else if (var->mode() == DYNAMIC_LOCAL) { | 1226 } else if (var->mode() == DYNAMIC_LOCAL) { |
| 1252 Variable* local = var->local_if_not_shadowed(); | 1227 Variable* local = var->local_if_not_shadowed(); |
| 1253 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow)); | 1228 __ LoadP(r2, ContextSlotOperandCheckExtensions(local, slow)); |
| 1254 if (local->mode() == LET || local->mode() == CONST || | 1229 if (local->mode() == LET || local->mode() == CONST || |
| 1255 local->mode() == CONST_LEGACY) { | 1230 local->mode() == CONST_LEGACY) { |
| 1256 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | 1231 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); |
| 1257 __ bne(done); | 1232 __ bne(done); |
| 1258 if (local->mode() == CONST_LEGACY) { | 1233 if (local->mode() == CONST_LEGACY) { |
| 1259 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 1234 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 1260 } else { // LET || CONST | 1235 } else { // LET || CONST |
| 1261 __ mov(r3, Operand(var->name())); | 1236 __ mov(r2, Operand(var->name())); |
| 1262 __ push(r3); | 1237 __ push(r2); |
| 1263 __ CallRuntime(Runtime::kThrowReferenceError); | 1238 __ CallRuntime(Runtime::kThrowReferenceError); |
| 1264 } | 1239 } |
| 1265 } | 1240 } |
| 1266 __ b(done); | 1241 __ b(done); |
| 1267 } | 1242 } |
| 1268 } | 1243 } |
| 1269 | 1244 |
| 1270 | |
| 1271 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, | 1245 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, |
| 1272 TypeofMode typeof_mode) { | 1246 TypeofMode typeof_mode) { |
| 1273 Variable* var = proxy->var(); | 1247 Variable* var = proxy->var(); |
| 1274 DCHECK(var->IsUnallocatedOrGlobalSlot() || | 1248 DCHECK(var->IsUnallocatedOrGlobalSlot() || |
| 1275 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); | 1249 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); |
| 1276 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister()); | 1250 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister()); |
| 1277 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); | 1251 __ mov(LoadDescriptor::NameRegister(), Operand(var->name())); |
| 1278 __ mov(LoadDescriptor::SlotRegister(), | 1252 __ mov(LoadDescriptor::SlotRegister(), |
| 1279 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); | 1253 Operand(SmiFromSlot(proxy->VariableFeedbackSlot()))); |
| 1280 CallLoadIC(typeof_mode); | 1254 CallLoadIC(typeof_mode); |
| 1281 } | 1255 } |
| 1282 | 1256 |
| 1283 | |
| 1284 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, | 1257 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, |
| 1285 TypeofMode typeof_mode) { | 1258 TypeofMode typeof_mode) { |
| 1286 // Record position before possible IC call. | 1259 // Record position before possible IC call. |
| 1287 SetExpressionPosition(proxy); | 1260 SetExpressionPosition(proxy); |
| 1288 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); | 1261 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); |
| 1289 Variable* var = proxy->var(); | 1262 Variable* var = proxy->var(); |
| 1290 | 1263 |
| 1291 // Three cases: global variables, lookup variables, and all other types of | 1264 // Three cases: global variables, lookup variables, and all other types of |
| 1292 // variables. | 1265 // variables. |
| 1293 switch (var->location()) { | 1266 switch (var->location()) { |
| 1294 case VariableLocation::GLOBAL: | 1267 case VariableLocation::GLOBAL: |
| 1295 case VariableLocation::UNALLOCATED: { | 1268 case VariableLocation::UNALLOCATED: { |
| 1296 Comment cmnt(masm_, "[ Global variable"); | 1269 Comment cmnt(masm_, "[ Global variable"); |
| 1297 EmitGlobalVariableLoad(proxy, typeof_mode); | 1270 EmitGlobalVariableLoad(proxy, typeof_mode); |
| 1298 context()->Plug(r3); | 1271 context()->Plug(r2); |
| 1299 break; | 1272 break; |
| 1300 } | 1273 } |
| 1301 | 1274 |
| 1302 case VariableLocation::PARAMETER: | 1275 case VariableLocation::PARAMETER: |
| 1303 case VariableLocation::LOCAL: | 1276 case VariableLocation::LOCAL: |
| 1304 case VariableLocation::CONTEXT: { | 1277 case VariableLocation::CONTEXT: { |
| 1305 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); | 1278 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); |
| 1306 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" | 1279 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" |
| 1307 : "[ Stack variable"); | 1280 : "[ Stack variable"); |
| 1308 if (NeedsHoleCheckForLoad(proxy)) { | 1281 if (NeedsHoleCheckForLoad(proxy)) { |
| 1309 Label done; | 1282 Label done; |
| 1310 // Let and const need a read barrier. | 1283 // Let and const need a read barrier. |
| 1311 GetVar(r3, var); | 1284 GetVar(r2, var); |
| 1312 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | 1285 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex); |
| 1313 __ bne(&done); | 1286 __ bne(&done); |
| 1314 if (var->mode() == LET || var->mode() == CONST) { | 1287 if (var->mode() == LET || var->mode() == CONST) { |
| 1315 // Throw a reference error when using an uninitialized let/const | 1288 // Throw a reference error when using an uninitialized let/const |
| 1316 // binding in harmony mode. | 1289 // binding in harmony mode. |
| 1317 __ mov(r3, Operand(var->name())); | 1290 __ mov(r2, Operand(var->name())); |
| 1318 __ push(r3); | 1291 __ push(r2); |
| 1319 __ CallRuntime(Runtime::kThrowReferenceError); | 1292 __ CallRuntime(Runtime::kThrowReferenceError); |
| 1320 } else { | 1293 } else { |
| 1321 // Uninitialized legacy const bindings are unholed. | 1294 // Uninitialized legacy const bindings are unholed. |
| 1322 DCHECK(var->mode() == CONST_LEGACY); | 1295 DCHECK(var->mode() == CONST_LEGACY); |
| 1323 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 1296 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 1324 } | 1297 } |
| 1325 __ bind(&done); | 1298 __ bind(&done); |
| 1326 context()->Plug(r3); | 1299 context()->Plug(r2); |
| 1327 break; | 1300 break; |
| 1328 } | 1301 } |
| 1329 context()->Plug(var); | 1302 context()->Plug(var); |
| 1330 break; | 1303 break; |
| 1331 } | 1304 } |
| 1332 | 1305 |
| 1333 case VariableLocation::LOOKUP: { | 1306 case VariableLocation::LOOKUP: { |
| 1334 Comment cmnt(masm_, "[ Lookup variable"); | 1307 Comment cmnt(masm_, "[ Lookup variable"); |
| 1335 Label done, slow; | 1308 Label done, slow; |
| 1336 // Generate code for loading from variables potentially shadowed | 1309 // Generate code for loading from variables potentially shadowed |
| 1337 // by eval-introduced variables. | 1310 // by eval-introduced variables. |
| 1338 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); | 1311 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); |
| 1339 __ bind(&slow); | 1312 __ bind(&slow); |
| 1340 __ Push(var->name()); | 1313 __ Push(var->name()); |
| 1341 Runtime::FunctionId function_id = | 1314 Runtime::FunctionId function_id = |
| 1342 typeof_mode == NOT_INSIDE_TYPEOF | 1315 typeof_mode == NOT_INSIDE_TYPEOF |
| 1343 ? Runtime::kLoadLookupSlot | 1316 ? Runtime::kLoadLookupSlot |
| 1344 : Runtime::kLoadLookupSlotInsideTypeof; | 1317 : Runtime::kLoadLookupSlotInsideTypeof; |
| 1345 __ CallRuntime(function_id); | 1318 __ CallRuntime(function_id); |
| 1346 __ bind(&done); | 1319 __ bind(&done); |
| 1347 context()->Plug(r3); | 1320 context()->Plug(r2); |
| 1348 } | 1321 } |
| 1349 } | 1322 } |
| 1350 } | 1323 } |
| 1351 | 1324 |
| 1352 | |
| 1353 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { | 1325 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
| 1354 Comment cmnt(masm_, "[ RegExpLiteral"); | 1326 Comment cmnt(masm_, "[ RegExpLiteral"); |
| 1355 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1327 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1356 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index())); | 1328 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
| 1357 __ mov(r4, Operand(expr->pattern())); | 1329 __ mov(r3, Operand(expr->pattern())); |
| 1358 __ LoadSmiLiteral(r3, Smi::FromInt(expr->flags())); | 1330 __ LoadSmiLiteral(r2, Smi::FromInt(expr->flags())); |
| 1359 FastCloneRegExpStub stub(isolate()); | 1331 FastCloneRegExpStub stub(isolate()); |
| 1360 __ CallStub(&stub); | 1332 __ CallStub(&stub); |
| 1361 context()->Plug(r3); | 1333 context()->Plug(r2); |
| 1362 } | 1334 } |
| 1363 | 1335 |
| 1364 | |
| 1365 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { | 1336 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { |
| 1366 Expression* expression = (property == NULL) ? NULL : property->value(); | 1337 Expression* expression = (property == NULL) ? NULL : property->value(); |
| 1367 if (expression == NULL) { | 1338 if (expression == NULL) { |
| 1368 __ LoadRoot(r4, Heap::kNullValueRootIndex); | 1339 __ LoadRoot(r3, Heap::kNullValueRootIndex); |
| 1369 PushOperand(r4); | 1340 PushOperand(r3); |
| 1370 } else { | 1341 } else { |
| 1371 VisitForStackValue(expression); | 1342 VisitForStackValue(expression); |
| 1372 if (NeedsHomeObject(expression)) { | 1343 if (NeedsHomeObject(expression)) { |
| 1373 DCHECK(property->kind() == ObjectLiteral::Property::GETTER || | 1344 DCHECK(property->kind() == ObjectLiteral::Property::GETTER || |
| 1374 property->kind() == ObjectLiteral::Property::SETTER); | 1345 property->kind() == ObjectLiteral::Property::SETTER); |
| 1375 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; | 1346 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; |
| 1376 EmitSetHomeObject(expression, offset, property->GetSlot()); | 1347 EmitSetHomeObject(expression, offset, property->GetSlot()); |
| 1377 } | 1348 } |
| 1378 } | 1349 } |
| 1379 } | 1350 } |
| 1380 | 1351 |
| 1381 | |
| 1382 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { | 1352 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
| 1383 Comment cmnt(masm_, "[ ObjectLiteral"); | 1353 Comment cmnt(masm_, "[ ObjectLiteral"); |
| 1384 | 1354 |
| 1385 Handle<FixedArray> constant_properties = expr->constant_properties(); | 1355 Handle<FixedArray> constant_properties = expr->constant_properties(); |
| 1386 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1356 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1387 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index())); | 1357 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
| 1388 __ mov(r4, Operand(constant_properties)); | 1358 __ mov(r3, Operand(constant_properties)); |
| 1389 int flags = expr->ComputeFlags(); | 1359 int flags = expr->ComputeFlags(); |
| 1390 __ LoadSmiLiteral(r3, Smi::FromInt(flags)); | 1360 __ LoadSmiLiteral(r2, Smi::FromInt(flags)); |
| 1391 if (MustCreateObjectLiteralWithRuntime(expr)) { | 1361 if (MustCreateObjectLiteralWithRuntime(expr)) { |
| 1392 __ Push(r6, r5, r4, r3); | 1362 __ Push(r5, r4, r3, r2); |
| 1393 __ CallRuntime(Runtime::kCreateObjectLiteral); | 1363 __ CallRuntime(Runtime::kCreateObjectLiteral); |
| 1394 } else { | 1364 } else { |
| 1395 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); | 1365 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); |
| 1396 __ CallStub(&stub); | 1366 __ CallStub(&stub); |
| 1397 } | 1367 } |
| 1398 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); | 1368 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); |
| 1399 | 1369 |
| 1400 // If result_saved is true the result is on top of the stack. If | 1370 // If result_saved is true the result is on top of the stack. If |
| 1401 // result_saved is false the result is in r3. | 1371 // result_saved is false the result is in r2. |
| 1402 bool result_saved = false; | 1372 bool result_saved = false; |
| 1403 | 1373 |
| 1404 AccessorTable accessor_table(zone()); | 1374 AccessorTable accessor_table(zone()); |
| 1405 int property_index = 0; | 1375 int property_index = 0; |
| 1406 for (; property_index < expr->properties()->length(); property_index++) { | 1376 for (; property_index < expr->properties()->length(); property_index++) { |
| 1407 ObjectLiteral::Property* property = expr->properties()->at(property_index); | 1377 ObjectLiteral::Property* property = expr->properties()->at(property_index); |
| 1408 if (property->is_computed_name()) break; | 1378 if (property->is_computed_name()) break; |
| 1409 if (property->IsCompileTimeValue()) continue; | 1379 if (property->IsCompileTimeValue()) continue; |
| 1410 | 1380 |
| 1411 Literal* key = property->key()->AsLiteral(); | 1381 Literal* key = property->key()->AsLiteral(); |
| 1412 Expression* value = property->value(); | 1382 Expression* value = property->value(); |
| 1413 if (!result_saved) { | 1383 if (!result_saved) { |
| 1414 PushOperand(r3); // Save result on stack | 1384 PushOperand(r2); // Save result on stack |
| 1415 result_saved = true; | 1385 result_saved = true; |
| 1416 } | 1386 } |
| 1417 switch (property->kind()) { | 1387 switch (property->kind()) { |
| 1418 case ObjectLiteral::Property::CONSTANT: | 1388 case ObjectLiteral::Property::CONSTANT: |
| 1419 UNREACHABLE(); | 1389 UNREACHABLE(); |
| 1420 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | 1390 case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
| 1421 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); | 1391 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value())); |
| 1422 // Fall through. | 1392 // Fall through. |
| 1423 case ObjectLiteral::Property::COMPUTED: | 1393 case ObjectLiteral::Property::COMPUTED: |
| 1424 // It is safe to use [[Put]] here because the boilerplate already | 1394 // It is safe to use [[Put]] here because the boilerplate already |
| 1425 // contains computed properties with an uninitialized value. | 1395 // contains computed properties with an uninitialized value. |
| 1426 if (key->value()->IsInternalizedString()) { | 1396 if (key->value()->IsInternalizedString()) { |
| 1427 if (property->emit_store()) { | 1397 if (property->emit_store()) { |
| 1428 VisitForAccumulatorValue(value); | 1398 VisitForAccumulatorValue(value); |
| 1429 DCHECK(StoreDescriptor::ValueRegister().is(r3)); | 1399 DCHECK(StoreDescriptor::ValueRegister().is(r2)); |
| 1430 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); | 1400 __ mov(StoreDescriptor::NameRegister(), Operand(key->value())); |
| 1431 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); | 1401 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp)); |
| 1432 EmitLoadStoreICSlot(property->GetSlot(0)); | 1402 EmitLoadStoreICSlot(property->GetSlot(0)); |
| 1433 CallStoreIC(); | 1403 CallStoreIC(); |
| 1434 PrepareForBailoutForId(key->id(), NO_REGISTERS); | 1404 PrepareForBailoutForId(key->id(), NO_REGISTERS); |
| 1435 | 1405 |
| 1436 if (NeedsHomeObject(value)) { | 1406 if (NeedsHomeObject(value)) { |
| 1437 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); | 1407 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); |
| 1438 } | 1408 } |
| 1439 } else { | 1409 } else { |
| 1440 VisitForEffect(value); | 1410 VisitForEffect(value); |
| 1441 } | 1411 } |
| 1442 break; | 1412 break; |
| 1443 } | 1413 } |
| 1444 // Duplicate receiver on stack. | 1414 // Duplicate receiver on stack. |
| 1445 __ LoadP(r3, MemOperand(sp)); | 1415 __ LoadP(r2, MemOperand(sp)); |
| 1446 PushOperand(r3); | 1416 PushOperand(r2); |
| 1447 VisitForStackValue(key); | 1417 VisitForStackValue(key); |
| 1448 VisitForStackValue(value); | 1418 VisitForStackValue(value); |
| 1449 if (property->emit_store()) { | 1419 if (property->emit_store()) { |
| 1450 if (NeedsHomeObject(value)) { | 1420 if (NeedsHomeObject(value)) { |
| 1451 EmitSetHomeObject(value, 2, property->GetSlot()); | 1421 EmitSetHomeObject(value, 2, property->GetSlot()); |
| 1452 } | 1422 } |
| 1453 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes | 1423 __ LoadSmiLiteral(r2, Smi::FromInt(SLOPPY)); // PropertyAttributes |
| 1454 PushOperand(r3); | 1424 PushOperand(r2); |
| 1455 CallRuntimeWithOperands(Runtime::kSetProperty); | 1425 CallRuntimeWithOperands(Runtime::kSetProperty); |
| 1456 } else { | 1426 } else { |
| 1457 DropOperands(3); | 1427 DropOperands(3); |
| 1458 } | 1428 } |
| 1459 break; | 1429 break; |
| 1460 case ObjectLiteral::Property::PROTOTYPE: | 1430 case ObjectLiteral::Property::PROTOTYPE: |
| 1461 // Duplicate receiver on stack. | 1431 // Duplicate receiver on stack. |
| 1462 __ LoadP(r3, MemOperand(sp)); | 1432 __ LoadP(r2, MemOperand(sp)); |
| 1463 PushOperand(r3); | 1433 PushOperand(r2); |
| 1464 VisitForStackValue(value); | 1434 VisitForStackValue(value); |
| 1465 DCHECK(property->emit_store()); | 1435 DCHECK(property->emit_store()); |
| 1466 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); | 1436 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); |
| 1467 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), | 1437 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), |
| 1468 NO_REGISTERS); | 1438 NO_REGISTERS); |
| 1469 break; | 1439 break; |
| 1470 case ObjectLiteral::Property::GETTER: | 1440 case ObjectLiteral::Property::GETTER: |
| 1471 if (property->emit_store()) { | 1441 if (property->emit_store()) { |
| 1472 accessor_table.lookup(key)->second->getter = property; | 1442 accessor_table.lookup(key)->second->getter = property; |
| 1473 } | 1443 } |
| 1474 break; | 1444 break; |
| 1475 case ObjectLiteral::Property::SETTER: | 1445 case ObjectLiteral::Property::SETTER: |
| 1476 if (property->emit_store()) { | 1446 if (property->emit_store()) { |
| 1477 accessor_table.lookup(key)->second->setter = property; | 1447 accessor_table.lookup(key)->second->setter = property; |
| 1478 } | 1448 } |
| 1479 break; | 1449 break; |
| 1480 } | 1450 } |
| 1481 } | 1451 } |
| 1482 | 1452 |
| 1483 // Emit code to define accessors, using only a single call to the runtime for | 1453 // Emit code to define accessors, using only a single call to the runtime for |
| 1484 // each pair of corresponding getters and setters. | 1454 // each pair of corresponding getters and setters. |
| 1485 for (AccessorTable::Iterator it = accessor_table.begin(); | 1455 for (AccessorTable::Iterator it = accessor_table.begin(); |
| 1486 it != accessor_table.end(); ++it) { | 1456 it != accessor_table.end(); ++it) { |
| 1487 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver. | 1457 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver. |
| 1488 PushOperand(r3); | 1458 PushOperand(r2); |
| 1489 VisitForStackValue(it->first); | 1459 VisitForStackValue(it->first); |
| 1490 EmitAccessor(it->second->getter); | 1460 EmitAccessor(it->second->getter); |
| 1491 EmitAccessor(it->second->setter); | 1461 EmitAccessor(it->second->setter); |
| 1492 __ LoadSmiLiteral(r3, Smi::FromInt(NONE)); | 1462 __ LoadSmiLiteral(r2, Smi::FromInt(NONE)); |
| 1493 PushOperand(r3); | 1463 PushOperand(r2); |
| 1494 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); | 1464 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); |
| 1495 } | 1465 } |
| 1496 | 1466 |
| 1497 // Object literals have two parts. The "static" part on the left contains no | 1467 // Object literals have two parts. The "static" part on the left contains no |
| 1498 // computed property names, and so we can compute its map ahead of time; see | 1468 // computed property names, and so we can compute its map ahead of time; see |
| 1499 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part | 1469 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part |
| 1500 // starts with the first computed property name, and continues with all | 1470 // starts with the first computed property name, and continues with all |
| 1501 // properties to its right. All the code from above initializes the static | 1471 // properties to its right. All the code from above initializes the static |
| 1502 // component of the object literal, and arranges for the map of the result to | 1472 // component of the object literal, and arranges for the map of the result to |
| 1503 // reflect the static order in which the keys appear. For the dynamic | 1473 // reflect the static order in which the keys appear. For the dynamic |
| 1504 // properties, we compile them into a series of "SetOwnProperty" runtime | 1474 // properties, we compile them into a series of "SetOwnProperty" runtime |
| 1505 // calls. This will preserve insertion order. | 1475 // calls. This will preserve insertion order. |
| 1506 for (; property_index < expr->properties()->length(); property_index++) { | 1476 for (; property_index < expr->properties()->length(); property_index++) { |
| 1507 ObjectLiteral::Property* property = expr->properties()->at(property_index); | 1477 ObjectLiteral::Property* property = expr->properties()->at(property_index); |
| 1508 | 1478 |
| 1509 Expression* value = property->value(); | 1479 Expression* value = property->value(); |
| 1510 if (!result_saved) { | 1480 if (!result_saved) { |
| 1511 PushOperand(r3); // Save result on the stack | 1481 PushOperand(r2); // Save result on the stack |
| 1512 result_saved = true; | 1482 result_saved = true; |
| 1513 } | 1483 } |
| 1514 | 1484 |
| 1515 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver. | 1485 __ LoadP(r2, MemOperand(sp)); // Duplicate receiver. |
| 1516 PushOperand(r3); | 1486 PushOperand(r2); |
| 1517 | 1487 |
| 1518 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { | 1488 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { |
| 1519 DCHECK(!property->is_computed_name()); | 1489 DCHECK(!property->is_computed_name()); |
| 1520 VisitForStackValue(value); | 1490 VisitForStackValue(value); |
| 1521 DCHECK(property->emit_store()); | 1491 DCHECK(property->emit_store()); |
| 1522 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); | 1492 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); |
| 1523 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), | 1493 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), |
| 1524 NO_REGISTERS); | 1494 NO_REGISTERS); |
| 1525 } else { | 1495 } else { |
| 1526 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); | 1496 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 1554 case ObjectLiteral::Property::SETTER: | 1524 case ObjectLiteral::Property::SETTER: |
| 1555 PushOperand(Smi::FromInt(NONE)); | 1525 PushOperand(Smi::FromInt(NONE)); |
| 1556 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); | 1526 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); |
| 1557 break; | 1527 break; |
| 1558 } | 1528 } |
| 1559 } | 1529 } |
| 1560 } | 1530 } |
| 1561 | 1531 |
| 1562 if (expr->has_function()) { | 1532 if (expr->has_function()) { |
| 1563 DCHECK(result_saved); | 1533 DCHECK(result_saved); |
| 1564 __ LoadP(r3, MemOperand(sp)); | 1534 __ LoadP(r2, MemOperand(sp)); |
| 1565 __ push(r3); | 1535 __ push(r2); |
| 1566 __ CallRuntime(Runtime::kToFastProperties); | 1536 __ CallRuntime(Runtime::kToFastProperties); |
| 1567 } | 1537 } |
| 1568 | 1538 |
| 1569 if (result_saved) { | 1539 if (result_saved) { |
| 1570 context()->PlugTOS(); | 1540 context()->PlugTOS(); |
| 1571 } else { | 1541 } else { |
| 1572 context()->Plug(r3); | 1542 context()->Plug(r2); |
| 1573 } | 1543 } |
| 1574 } | 1544 } |
| 1575 | 1545 |
| 1576 | |
| 1577 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { | 1546 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { |
| 1578 Comment cmnt(masm_, "[ ArrayLiteral"); | 1547 Comment cmnt(masm_, "[ ArrayLiteral"); |
| 1579 | 1548 |
| 1580 Handle<FixedArray> constant_elements = expr->constant_elements(); | 1549 Handle<FixedArray> constant_elements = expr->constant_elements(); |
| 1581 bool has_fast_elements = | 1550 bool has_fast_elements = |
| 1582 IsFastObjectElementsKind(expr->constant_elements_kind()); | 1551 IsFastObjectElementsKind(expr->constant_elements_kind()); |
| 1583 Handle<FixedArrayBase> constant_elements_values( | 1552 Handle<FixedArrayBase> constant_elements_values( |
| 1584 FixedArrayBase::cast(constant_elements->get(1))); | 1553 FixedArrayBase::cast(constant_elements->get(1))); |
| 1585 | 1554 |
| 1586 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; | 1555 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; |
| 1587 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { | 1556 if (has_fast_elements && !FLAG_allocation_site_pretenuring) { |
| 1588 // If the only customer of allocation sites is transitioning, then | 1557 // If the only customer of allocation sites is transitioning, then |
| 1589 // we can turn it off if we don't have anywhere else to transition to. | 1558 // we can turn it off if we don't have anywhere else to transition to. |
| 1590 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; | 1559 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; |
| 1591 } | 1560 } |
| 1592 | 1561 |
| 1593 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1562 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1594 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index())); | 1563 __ LoadSmiLiteral(r4, Smi::FromInt(expr->literal_index())); |
| 1595 __ mov(r4, Operand(constant_elements)); | 1564 __ mov(r3, Operand(constant_elements)); |
| 1596 if (MustCreateArrayLiteralWithRuntime(expr)) { | 1565 if (MustCreateArrayLiteralWithRuntime(expr)) { |
| 1597 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags())); | 1566 __ LoadSmiLiteral(r2, Smi::FromInt(expr->ComputeFlags())); |
| 1598 __ Push(r6, r5, r4, r3); | 1567 __ Push(r5, r4, r3, r2); |
| 1599 __ CallRuntime(Runtime::kCreateArrayLiteral); | 1568 __ CallRuntime(Runtime::kCreateArrayLiteral); |
| 1600 } else { | 1569 } else { |
| 1601 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); | 1570 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); |
| 1602 __ CallStub(&stub); | 1571 __ CallStub(&stub); |
| 1603 } | 1572 } |
| 1604 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); | 1573 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); |
| 1605 | 1574 |
| 1606 bool result_saved = false; // Is the result saved to the stack? | 1575 bool result_saved = false; // Is the result saved to the stack? |
| 1607 ZoneList<Expression*>* subexprs = expr->values(); | 1576 ZoneList<Expression*>* subexprs = expr->values(); |
| 1608 int length = subexprs->length(); | 1577 int length = subexprs->length(); |
| 1609 | 1578 |
| 1610 // Emit code to evaluate all the non-constant subexpressions and to store | 1579 // Emit code to evaluate all the non-constant subexpressions and to store |
| 1611 // them into the newly cloned array. | 1580 // them into the newly cloned array. |
| 1612 int array_index = 0; | 1581 int array_index = 0; |
| 1613 for (; array_index < length; array_index++) { | 1582 for (; array_index < length; array_index++) { |
| 1614 Expression* subexpr = subexprs->at(array_index); | 1583 Expression* subexpr = subexprs->at(array_index); |
| 1615 DCHECK(!subexpr->IsSpread()); | 1584 DCHECK(!subexpr->IsSpread()); |
| 1616 // If the subexpression is a literal or a simple materialized literal it | 1585 // If the subexpression is a literal or a simple materialized literal it |
| 1617 // is already set in the cloned array. | 1586 // is already set in the cloned array. |
| 1618 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; | 1587 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; |
| 1619 | 1588 |
| 1620 if (!result_saved) { | 1589 if (!result_saved) { |
| 1621 PushOperand(r3); | 1590 PushOperand(r2); |
| 1622 result_saved = true; | 1591 result_saved = true; |
| 1623 } | 1592 } |
| 1624 VisitForAccumulatorValue(subexpr); | 1593 VisitForAccumulatorValue(subexpr); |
| 1625 | 1594 |
| 1626 __ LoadSmiLiteral(StoreDescriptor::NameRegister(), | 1595 __ LoadSmiLiteral(StoreDescriptor::NameRegister(), |
| 1627 Smi::FromInt(array_index)); | 1596 Smi::FromInt(array_index)); |
| 1628 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | 1597 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 1629 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); | 1598 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); |
| 1630 Handle<Code> ic = | 1599 Handle<Code> ic = |
| 1631 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | 1600 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
| 1632 CallIC(ic); | 1601 CallIC(ic); |
| 1633 | 1602 |
| 1634 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); | 1603 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); |
| 1635 } | 1604 } |
| 1636 | 1605 |
| 1637 // In case the array literal contains spread expressions it has two parts. The | 1606 // In case the array literal contains spread expressions it has two parts. The |
| 1638 // first part is the "static" array which has a literal index is handled | 1607 // first part is the "static" array which has a literal index is handled |
| 1639 // above. The second part is the part after the first spread expression | 1608 // above. The second part is the part after the first spread expression |
| 1640 // (inclusive) and these elements gets appended to the array. Note that the | 1609 // (inclusive) and these elements gets appended to the array. Note that the |
| 1641 // number elements an iterable produces is unknown ahead of time. | 1610 // number elements an iterable produces is unknown ahead of time. |
| 1642 if (array_index < length && result_saved) { | 1611 if (array_index < length && result_saved) { |
| 1643 PopOperand(r3); | 1612 PopOperand(r2); |
| 1644 result_saved = false; | 1613 result_saved = false; |
| 1645 } | 1614 } |
| 1646 for (; array_index < length; array_index++) { | 1615 for (; array_index < length; array_index++) { |
| 1647 Expression* subexpr = subexprs->at(array_index); | 1616 Expression* subexpr = subexprs->at(array_index); |
| 1648 | 1617 |
| 1649 PushOperand(r3); | 1618 PushOperand(r2); |
| 1650 DCHECK(!subexpr->IsSpread()); | 1619 DCHECK(!subexpr->IsSpread()); |
| 1651 VisitForStackValue(subexpr); | 1620 VisitForStackValue(subexpr); |
| 1652 CallRuntimeWithOperands(Runtime::kAppendElement); | 1621 CallRuntimeWithOperands(Runtime::kAppendElement); |
| 1653 | 1622 |
| 1654 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); | 1623 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); |
| 1655 } | 1624 } |
| 1656 | 1625 |
| 1657 if (result_saved) { | 1626 if (result_saved) { |
| 1658 context()->PlugTOS(); | 1627 context()->PlugTOS(); |
| 1659 } else { | 1628 } else { |
| 1660 context()->Plug(r3); | 1629 context()->Plug(r2); |
| 1661 } | 1630 } |
| 1662 } | 1631 } |
| 1663 | 1632 |
| 1664 | |
| 1665 void FullCodeGenerator::VisitAssignment(Assignment* expr) { | 1633 void FullCodeGenerator::VisitAssignment(Assignment* expr) { |
| 1666 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); | 1634 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); |
| 1667 | 1635 |
| 1668 Comment cmnt(masm_, "[ Assignment"); | 1636 Comment cmnt(masm_, "[ Assignment"); |
| 1669 SetExpressionPosition(expr, INSERT_BREAK); | 1637 SetExpressionPosition(expr, INSERT_BREAK); |
| 1670 | 1638 |
| 1671 Property* property = expr->target()->AsProperty(); | 1639 Property* property = expr->target()->AsProperty(); |
| 1672 LhsKind assign_type = Property::GetAssignType(property); | 1640 LhsKind assign_type = Property::GetAssignType(property); |
| 1673 | 1641 |
| 1674 // Evaluate LHS expression. | 1642 // Evaluate LHS expression. |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1685 VisitForStackValue(property->obj()); | 1653 VisitForStackValue(property->obj()); |
| 1686 } | 1654 } |
| 1687 break; | 1655 break; |
| 1688 case NAMED_SUPER_PROPERTY: | 1656 case NAMED_SUPER_PROPERTY: |
| 1689 VisitForStackValue( | 1657 VisitForStackValue( |
| 1690 property->obj()->AsSuperPropertyReference()->this_var()); | 1658 property->obj()->AsSuperPropertyReference()->this_var()); |
| 1691 VisitForAccumulatorValue( | 1659 VisitForAccumulatorValue( |
| 1692 property->obj()->AsSuperPropertyReference()->home_object()); | 1660 property->obj()->AsSuperPropertyReference()->home_object()); |
| 1693 PushOperand(result_register()); | 1661 PushOperand(result_register()); |
| 1694 if (expr->is_compound()) { | 1662 if (expr->is_compound()) { |
| 1695 const Register scratch = r4; | 1663 const Register scratch = r3; |
| 1696 __ LoadP(scratch, MemOperand(sp, kPointerSize)); | 1664 __ LoadP(scratch, MemOperand(sp, kPointerSize)); |
| 1697 PushOperands(scratch, result_register()); | 1665 PushOperands(scratch, result_register()); |
| 1698 } | 1666 } |
| 1699 break; | 1667 break; |
| 1700 case KEYED_SUPER_PROPERTY: { | 1668 case KEYED_SUPER_PROPERTY: { |
| 1701 const Register scratch = r4; | 1669 const Register scratch = r3; |
| 1702 VisitForStackValue( | 1670 VisitForStackValue( |
| 1703 property->obj()->AsSuperPropertyReference()->this_var()); | 1671 property->obj()->AsSuperPropertyReference()->this_var()); |
| 1704 VisitForAccumulatorValue( | 1672 VisitForAccumulatorValue( |
| 1705 property->obj()->AsSuperPropertyReference()->home_object()); | 1673 property->obj()->AsSuperPropertyReference()->home_object()); |
| 1706 __ mr(scratch, result_register()); | 1674 __ LoadRR(scratch, result_register()); |
| 1707 VisitForAccumulatorValue(property->key()); | 1675 VisitForAccumulatorValue(property->key()); |
| 1708 PushOperands(scratch, result_register()); | 1676 PushOperands(scratch, result_register()); |
| 1709 if (expr->is_compound()) { | 1677 if (expr->is_compound()) { |
| 1710 const Register scratch1 = r5; | 1678 const Register scratch1 = r4; |
| 1711 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); | 1679 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); |
| 1712 PushOperands(scratch1, scratch, result_register()); | 1680 PushOperands(scratch1, scratch, result_register()); |
| 1713 } | 1681 } |
| 1714 break; | 1682 break; |
| 1715 } | 1683 } |
| 1716 case KEYED_PROPERTY: | 1684 case KEYED_PROPERTY: |
| 1717 if (expr->is_compound()) { | 1685 if (expr->is_compound()) { |
| 1718 VisitForStackValue(property->obj()); | 1686 VisitForStackValue(property->obj()); |
| 1719 VisitForStackValue(property->key()); | 1687 VisitForStackValue(property->key()); |
| 1720 __ LoadP(LoadDescriptor::ReceiverRegister(), | 1688 __ LoadP(LoadDescriptor::ReceiverRegister(), |
| (...skipping 29 matching lines...) Expand all Loading... |
| 1750 PrepareForBailoutForId(property->LoadId(), TOS_REG); | 1718 PrepareForBailoutForId(property->LoadId(), TOS_REG); |
| 1751 break; | 1719 break; |
| 1752 case KEYED_PROPERTY: | 1720 case KEYED_PROPERTY: |
| 1753 EmitKeyedPropertyLoad(property); | 1721 EmitKeyedPropertyLoad(property); |
| 1754 PrepareForBailoutForId(property->LoadId(), TOS_REG); | 1722 PrepareForBailoutForId(property->LoadId(), TOS_REG); |
| 1755 break; | 1723 break; |
| 1756 } | 1724 } |
| 1757 } | 1725 } |
| 1758 | 1726 |
| 1759 Token::Value op = expr->binary_op(); | 1727 Token::Value op = expr->binary_op(); |
| 1760 PushOperand(r3); // Left operand goes on the stack. | 1728 PushOperand(r2); // Left operand goes on the stack. |
| 1761 VisitForAccumulatorValue(expr->value()); | 1729 VisitForAccumulatorValue(expr->value()); |
| 1762 | 1730 |
| 1763 AccumulatorValueContext context(this); | 1731 AccumulatorValueContext context(this); |
| 1764 if (ShouldInlineSmiCase(op)) { | 1732 if (ShouldInlineSmiCase(op)) { |
| 1765 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(), | 1733 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(), |
| 1766 expr->value()); | 1734 expr->value()); |
| 1767 } else { | 1735 } else { |
| 1768 EmitBinaryOp(expr->binary_operation(), op); | 1736 EmitBinaryOp(expr->binary_operation(), op); |
| 1769 } | 1737 } |
| 1770 | 1738 |
| 1771 // Deoptimization point in case the binary operation may have side effects. | 1739 // Deoptimization point in case the binary operation may have side effects. |
| 1772 PrepareForBailout(expr->binary_operation(), TOS_REG); | 1740 PrepareForBailout(expr->binary_operation(), TOS_REG); |
| 1773 } else { | 1741 } else { |
| 1774 VisitForAccumulatorValue(expr->value()); | 1742 VisitForAccumulatorValue(expr->value()); |
| 1775 } | 1743 } |
| 1776 | 1744 |
| 1777 SetExpressionPosition(expr); | 1745 SetExpressionPosition(expr); |
| 1778 | 1746 |
| 1779 // Store the value. | 1747 // Store the value. |
| 1780 switch (assign_type) { | 1748 switch (assign_type) { |
| 1781 case VARIABLE: | 1749 case VARIABLE: |
| 1782 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), | 1750 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), |
| 1783 expr->op(), expr->AssignmentSlot()); | 1751 expr->op(), expr->AssignmentSlot()); |
| 1784 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 1752 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 1785 context()->Plug(r3); | 1753 context()->Plug(r2); |
| 1786 break; | 1754 break; |
| 1787 case NAMED_PROPERTY: | 1755 case NAMED_PROPERTY: |
| 1788 EmitNamedPropertyAssignment(expr); | 1756 EmitNamedPropertyAssignment(expr); |
| 1789 break; | 1757 break; |
| 1790 case NAMED_SUPER_PROPERTY: | 1758 case NAMED_SUPER_PROPERTY: |
| 1791 EmitNamedSuperPropertyStore(property); | 1759 EmitNamedSuperPropertyStore(property); |
| 1792 context()->Plug(r3); | 1760 context()->Plug(r2); |
| 1793 break; | 1761 break; |
| 1794 case KEYED_SUPER_PROPERTY: | 1762 case KEYED_SUPER_PROPERTY: |
| 1795 EmitKeyedSuperPropertyStore(property); | 1763 EmitKeyedSuperPropertyStore(property); |
| 1796 context()->Plug(r3); | 1764 context()->Plug(r2); |
| 1797 break; | 1765 break; |
| 1798 case KEYED_PROPERTY: | 1766 case KEYED_PROPERTY: |
| 1799 EmitKeyedPropertyAssignment(expr); | 1767 EmitKeyedPropertyAssignment(expr); |
| 1800 break; | 1768 break; |
| 1801 } | 1769 } |
| 1802 } | 1770 } |
| 1803 | 1771 |
| 1804 | |
| 1805 void FullCodeGenerator::VisitYield(Yield* expr) { | 1772 void FullCodeGenerator::VisitYield(Yield* expr) { |
| 1806 Comment cmnt(masm_, "[ Yield"); | 1773 Comment cmnt(masm_, "[ Yield"); |
| 1807 SetExpressionPosition(expr); | 1774 SetExpressionPosition(expr); |
| 1808 | 1775 |
| 1809 // Evaluate yielded value first; the initial iterator definition depends on | 1776 // Evaluate yielded value first; the initial iterator definition depends on |
| 1810 // this. It stays on the stack while we update the iterator. | 1777 // this. It stays on the stack while we update the iterator. |
| 1811 VisitForStackValue(expr->expression()); | 1778 VisitForStackValue(expr->expression()); |
| 1812 | 1779 |
| 1813 Label suspend, continuation, post_runtime, resume; | 1780 switch (expr->yield_kind()) { |
| 1781 case Yield::kSuspend: |
| 1782 // Pop value from top-of-stack slot; box result into result register. |
| 1783 EmitCreateIteratorResult(false); |
| 1784 PushOperand(result_register()); |
| 1785 // Fall through. |
| 1786 case Yield::kInitial: { |
| 1787 Label suspend, continuation, post_runtime, resume; |
| 1814 | 1788 |
| 1815 __ b(&suspend); | 1789 __ b(&suspend, Label::kNear); |
| 1816 __ bind(&continuation); | 1790 __ bind(&continuation); |
| 1817 // When we arrive here, the stack top is the resume mode and | 1791 // When we arrive here, the stack top is the resume mode and |
| 1818 // result_register() holds the input value (the argument given to the | 1792 // result_register() holds the input value (the argument given to the |
| 1819 // respective resume operation). | 1793 // respective resume operation). |
| 1820 __ RecordGeneratorContinuation(); | 1794 __ RecordGeneratorContinuation(); |
| 1821 __ pop(r4); | 1795 __ pop(r3); |
| 1822 __ CmpSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::RETURN), r0); | 1796 __ CmpSmiLiteral(r3, Smi::FromInt(JSGeneratorObject::RETURN), r0); |
| 1823 __ bne(&resume); | 1797 __ bne(&resume); |
| 1824 __ push(result_register()); | 1798 __ push(result_register()); |
| 1825 EmitCreateIteratorResult(true); | 1799 EmitCreateIteratorResult(true); |
| 1826 EmitUnwindAndReturn(); | 1800 EmitUnwindAndReturn(); |
| 1827 | 1801 |
| 1828 __ bind(&suspend); | 1802 __ bind(&suspend); |
| 1829 OperandStackDepthIncrement(1); // Not popped on this path. | 1803 OperandStackDepthIncrement(1); // Not popped on this path. |
| 1830 VisitForAccumulatorValue(expr->generator_object()); | 1804 VisitForAccumulatorValue(expr->generator_object()); |
| 1831 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); | 1805 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); |
| 1832 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos())); | 1806 __ LoadSmiLiteral(r3, Smi::FromInt(continuation.pos())); |
| 1833 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset), | 1807 __ StoreP(r3, |
| 1834 r0); | 1808 FieldMemOperand(r2, JSGeneratorObject::kContinuationOffset)); |
| 1835 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0); | 1809 __ StoreP(cp, FieldMemOperand(r2, JSGeneratorObject::kContextOffset)); |
| 1836 __ mr(r4, cp); | 1810 __ LoadRR(r3, cp); |
| 1837 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5, | 1811 __ RecordWriteField(r2, JSGeneratorObject::kContextOffset, r3, r4, |
| 1838 kLRHasBeenSaved, kDontSaveFPRegs); | 1812 kLRHasBeenSaved, kDontSaveFPRegs); |
| 1839 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset)); | 1813 __ AddP(r3, fp, Operand(StandardFrameConstants::kExpressionsOffset)); |
| 1840 __ cmp(sp, r4); | 1814 __ CmpP(sp, r3); |
| 1841 __ beq(&post_runtime); | 1815 __ beq(&post_runtime); |
| 1842 __ push(r3); // generator object | 1816 __ push(r2); // generator object |
| 1843 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); | 1817 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); |
| 1844 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 1818 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 1845 __ bind(&post_runtime); | 1819 __ bind(&post_runtime); |
| 1846 PopOperand(result_register()); | 1820 PopOperand(result_register()); |
| 1847 EmitReturnSequence(); | 1821 EmitReturnSequence(); |
| 1848 | 1822 |
| 1849 __ bind(&resume); | 1823 __ bind(&resume); |
| 1850 context()->Plug(result_register()); | 1824 context()->Plug(result_register()); |
| 1825 break; |
| 1826 } |
| 1827 |
| 1828 case Yield::kFinal: { |
| 1829 // Pop value from top-of-stack slot, box result into result register. |
| 1830 EmitCreateIteratorResult(true); |
| 1831 EmitUnwindAndReturn(); |
| 1832 break; |
| 1833 } |
| 1834 |
| 1835 case Yield::kDelegating: |
| 1836 UNREACHABLE(); |
| 1837 } |
| 1851 } | 1838 } |
| 1852 | 1839 |
| 1853 | |
| 1854 void FullCodeGenerator::EmitGeneratorResume( | 1840 void FullCodeGenerator::EmitGeneratorResume( |
| 1855 Expression* generator, Expression* value, | 1841 Expression* generator, Expression* value, |
| 1856 JSGeneratorObject::ResumeMode resume_mode) { | 1842 JSGeneratorObject::ResumeMode resume_mode) { |
| 1857 // The value stays in r3, and is ultimately read by the resumed generator, as | 1843 // The value stays in r2, and is ultimately read by the resumed generator, as |
| 1858 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it | 1844 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it |
| 1859 // is read to throw the value when the resumed generator is already closed. | 1845 // is read to throw the value when the resumed generator is already closed. |
| 1860 // r4 will hold the generator object until the activation has been resumed. | 1846 // r3 will hold the generator object until the activation has been resumed. |
| 1861 VisitForStackValue(generator); | 1847 VisitForStackValue(generator); |
| 1862 VisitForAccumulatorValue(value); | 1848 VisitForAccumulatorValue(value); |
| 1863 PopOperand(r4); | 1849 PopOperand(r3); |
| 1864 | 1850 |
| 1865 // Store input value into generator object. | 1851 // Store input value into generator object. |
| 1866 __ StoreP(result_register(), | 1852 __ StoreP(result_register(), |
| 1867 FieldMemOperand(r4, JSGeneratorObject::kInputOffset), r0); | 1853 FieldMemOperand(r3, JSGeneratorObject::kInputOffset), r0); |
| 1868 __ mr(r5, result_register()); | 1854 __ LoadRR(r4, result_register()); |
| 1869 __ RecordWriteField(r4, JSGeneratorObject::kInputOffset, r5, r6, | 1855 __ RecordWriteField(r3, JSGeneratorObject::kInputOffset, r4, r5, |
| 1870 kLRHasBeenSaved, kDontSaveFPRegs); | 1856 kLRHasBeenSaved, kDontSaveFPRegs); |
| 1871 | 1857 |
| 1872 // Load suspended function and context. | 1858 // Load suspended function and context. |
| 1873 __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset)); | 1859 __ LoadP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset)); |
| 1874 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset)); | 1860 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset)); |
| 1875 | 1861 |
| 1876 // Load receiver and store as the first argument. | 1862 // Load receiver and store as the first argument. |
| 1877 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset)); | 1863 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset)); |
| 1878 __ push(r5); | 1864 __ push(r4); |
| 1879 | 1865 |
| 1880 // Push holes for the rest of the arguments to the generator function. | 1866 // Push holes for the rest of the arguments to the generator function. |
| 1881 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset)); | 1867 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset)); |
| 1882 __ LoadWordArith( | 1868 __ LoadW( |
| 1883 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset)); | 1869 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 1884 __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); | 1870 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); |
| 1885 Label argument_loop, push_frame; | 1871 Label argument_loop, push_frame; |
| 1886 #if V8_TARGET_ARCH_PPC64 | 1872 #if V8_TARGET_ARCH_S390X |
| 1887 __ cmpi(r6, Operand::Zero()); | 1873 __ CmpP(r5, Operand::Zero()); |
| 1888 __ beq(&push_frame); | 1874 __ beq(&push_frame, Label::kNear); |
| 1889 #else | 1875 #else |
| 1890 __ SmiUntag(r6, SetRC); | 1876 __ SmiUntag(r5); |
| 1891 __ beq(&push_frame, cr0); | 1877 __ beq(&push_frame, Label::kNear); |
| 1892 #endif | 1878 #endif |
| 1893 __ mtctr(r6); | 1879 __ LoadRR(r0, r5); |
| 1894 __ bind(&argument_loop); | 1880 __ bind(&argument_loop); |
| 1895 __ push(r5); | 1881 __ push(r4); |
| 1896 __ bdnz(&argument_loop); | 1882 __ SubP(r0, Operand(1)); |
| 1883 __ bne(&argument_loop); |
| 1897 | 1884 |
| 1898 // Enter a new JavaScript frame, and initialize its slots as they were when | 1885 // Enter a new JavaScript frame, and initialize its slots as they were when |
| 1899 // the generator was suspended. | 1886 // the generator was suspended. |
| 1900 Label resume_frame, done; | 1887 Label resume_frame, done; |
| 1901 __ bind(&push_frame); | 1888 __ bind(&push_frame); |
| 1902 __ b(&resume_frame, SetLK); | 1889 __ b(r14, &resume_frame); // brasl |
| 1903 __ b(&done); | 1890 __ b(&done); |
| 1904 __ bind(&resume_frame); | 1891 __ bind(&resume_frame); |
| 1905 // lr = return address. | 1892 // lr = return address. |
| 1906 // fp = caller's frame pointer. | 1893 // fp = caller's frame pointer. |
| 1907 // cp = callee's context, | 1894 // cp = callee's context, |
| 1908 // r7 = callee's JS function. | 1895 // r6 = callee's JS function. |
| 1909 __ PushFixedFrame(r7); | 1896 __ PushFixedFrame(r6); |
| 1910 // Adjust FP to point to saved FP. | 1897 // Adjust FP to point to saved FP. |
| 1911 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp)); | 1898 __ lay(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp)); |
| 1912 | 1899 |
| 1913 // Load the operand stack size. | 1900 // Load the operand stack size. |
| 1914 __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset)); | 1901 __ LoadP(r5, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset)); |
| 1915 __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset)); | 1902 __ LoadP(r5, FieldMemOperand(r5, FixedArray::kLengthOffset)); |
| 1916 __ SmiUntag(r6, SetRC); | 1903 __ SmiUntag(r5); |
| 1917 | 1904 |
| 1918 // If we are sending a value and there is no operand stack, we can jump back | 1905 // If we are sending a value and there is no operand stack, we can jump back |
| 1919 // in directly. | 1906 // in directly. |
| 1920 Label call_resume; | 1907 Label call_resume; |
| 1921 if (resume_mode == JSGeneratorObject::NEXT) { | 1908 if (resume_mode == JSGeneratorObject::NEXT) { |
| 1922 Label slow_resume; | 1909 Label slow_resume; |
| 1923 __ bne(&slow_resume, cr0); | 1910 __ bne(&slow_resume, Label::kNear); |
| 1924 __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset)); | 1911 __ LoadP(ip, FieldMemOperand(r6, JSFunction::kCodeEntryOffset)); |
| 1925 { | 1912 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset)); |
| 1926 ConstantPoolUnavailableScope constant_pool_unavailable(masm_); | 1913 __ SmiUntag(r4); |
| 1927 if (FLAG_enable_embedded_constant_pool) { | 1914 __ AddP(ip, ip, r4); |
| 1928 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip); | 1915 __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); |
| 1929 } | 1916 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset)); |
| 1930 __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset)); | 1917 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. |
| 1931 __ SmiUntag(r5); | 1918 __ Jump(ip); |
| 1932 __ add(ip, ip, r5); | 1919 __ bind(&slow_resume); |
| 1933 __ LoadSmiLiteral(r5, | |
| 1934 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); | |
| 1935 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset), | |
| 1936 r0); | |
| 1937 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. | |
| 1938 __ Jump(ip); | |
| 1939 __ bind(&slow_resume); | |
| 1940 } | |
| 1941 } else { | 1920 } else { |
| 1942 __ beq(&call_resume, cr0); | 1921 __ beq(&call_resume); |
| 1943 } | 1922 } |
| 1944 | 1923 |
| 1945 // Otherwise, we push holes for the operand stack and call the runtime to fix | 1924 // Otherwise, we push holes for the operand stack and call the runtime to fix |
| 1946 // up the stack and the handlers. | 1925 // up the stack and the handlers. |
| 1947 Label operand_loop; | 1926 Label operand_loop; |
| 1948 __ mtctr(r6); | 1927 __ LoadRR(r0, r5); |
| 1949 __ bind(&operand_loop); | 1928 __ bind(&operand_loop); |
| 1950 __ push(r5); | 1929 __ push(r4); |
| 1951 __ bdnz(&operand_loop); | 1930 __ SubP(r0, Operand(1)); |
| 1931 __ bne(&operand_loop); |
| 1952 | 1932 |
| 1953 __ bind(&call_resume); | 1933 __ bind(&call_resume); |
| 1954 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. | 1934 __ Push(Smi::FromInt(resume_mode)); // Consumed in continuation. |
| 1955 DCHECK(!result_register().is(r4)); | 1935 DCHECK(!result_register().is(r3)); |
| 1956 __ Push(r4, result_register()); | 1936 __ Push(r3, result_register()); |
| 1957 __ Push(Smi::FromInt(resume_mode)); | 1937 __ Push(Smi::FromInt(resume_mode)); |
| 1958 __ CallRuntime(Runtime::kResumeJSGeneratorObject); | 1938 __ CallRuntime(Runtime::kResumeJSGeneratorObject); |
| 1959 // Not reached: the runtime call returns elsewhere. | 1939 // Not reached: the runtime call returns elsewhere. |
| 1960 __ stop("not-reached"); | 1940 __ stop("not-reached"); |
| 1961 | 1941 |
| 1962 __ bind(&done); | 1942 __ bind(&done); |
| 1963 context()->Plug(result_register()); | 1943 context()->Plug(result_register()); |
| 1964 } | 1944 } |
| 1965 | 1945 |
| 1966 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) { | 1946 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) { |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1982 | 1962 |
| 1983 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) { | 1963 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) { |
| 1984 OperandStackDepthDecrement(2); | 1964 OperandStackDepthDecrement(2); |
| 1985 __ Pop(reg1, reg2); | 1965 __ Pop(reg1, reg2); |
| 1986 } | 1966 } |
| 1987 | 1967 |
| 1988 void FullCodeGenerator::EmitOperandStackDepthCheck() { | 1968 void FullCodeGenerator::EmitOperandStackDepthCheck() { |
| 1989 if (FLAG_debug_code) { | 1969 if (FLAG_debug_code) { |
| 1990 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp + | 1970 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp + |
| 1991 operand_stack_depth_ * kPointerSize; | 1971 operand_stack_depth_ * kPointerSize; |
| 1992 __ sub(r3, fp, sp); | 1972 __ SubP(r2, fp, sp); |
| 1993 __ cmpi(r3, Operand(expected_diff)); | 1973 __ CmpP(r2, Operand(expected_diff)); |
| 1994 __ Assert(eq, kUnexpectedStackDepth); | 1974 __ Assert(eq, kUnexpectedStackDepth); |
| 1995 } | 1975 } |
| 1996 } | 1976 } |
| 1997 | 1977 |
| 1998 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { | 1978 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { |
| 1999 Label allocate, done_allocate; | 1979 Label allocate, done_allocate; |
| 2000 | 1980 |
| 2001 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate, TAG_OBJECT); | 1981 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &allocate, TAG_OBJECT); |
| 2002 __ b(&done_allocate); | 1982 __ b(&done_allocate); |
| 2003 | 1983 |
| 2004 __ bind(&allocate); | 1984 __ bind(&allocate); |
| 2005 __ Push(Smi::FromInt(JSIteratorResult::kSize)); | 1985 __ Push(Smi::FromInt(JSIteratorResult::kSize)); |
| 2006 __ CallRuntime(Runtime::kAllocateInNewSpace); | 1986 __ CallRuntime(Runtime::kAllocateInNewSpace); |
| 2007 | 1987 |
| 2008 __ bind(&done_allocate); | 1988 __ bind(&done_allocate); |
| 2009 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4); | 1989 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3); |
| 2010 PopOperand(r5); | 1990 PopOperand(r4); |
| 2011 __ LoadRoot(r6, | 1991 __ LoadRoot(r5, |
| 2012 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); | 1992 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); |
| 2013 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex); | 1993 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
| 2014 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0); | 1994 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0); |
| 2015 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); | 1995 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0); |
| 2016 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); | 1996 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0); |
| 2017 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0); | 1997 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0); |
| 2018 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0); | 1998 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0); |
| 2019 } | 1999 } |
| 2020 | 2000 |
| 2021 | |
| 2022 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | 2001 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, |
| 2023 Token::Value op, | 2002 Token::Value op, |
| 2024 Expression* left_expr, | 2003 Expression* left_expr, |
| 2025 Expression* right_expr) { | 2004 Expression* right_expr) { |
| 2026 Label done, smi_case, stub_call; | 2005 Label done, smi_case, stub_call; |
| 2027 | 2006 |
| 2028 Register scratch1 = r5; | 2007 Register scratch1 = r4; |
| 2029 Register scratch2 = r6; | 2008 Register scratch2 = r5; |
| 2030 | 2009 |
| 2031 // Get the arguments. | 2010 // Get the arguments. |
| 2032 Register left = r4; | 2011 Register left = r3; |
| 2033 Register right = r3; | 2012 Register right = r2; |
| 2034 PopOperand(left); | 2013 PopOperand(left); |
| 2035 | 2014 |
| 2036 // Perform combined smi check on both operands. | 2015 // Perform combined smi check on both operands. |
| 2037 __ orx(scratch1, left, right); | 2016 __ LoadRR(scratch1, right); |
| 2017 __ OrP(scratch1, left); |
| 2038 STATIC_ASSERT(kSmiTag == 0); | 2018 STATIC_ASSERT(kSmiTag == 0); |
| 2039 JumpPatchSite patch_site(masm_); | 2019 JumpPatchSite patch_site(masm_); |
| 2040 patch_site.EmitJumpIfSmi(scratch1, &smi_case); | 2020 patch_site.EmitJumpIfSmi(scratch1, &smi_case); |
| 2041 | 2021 |
| 2042 __ bind(&stub_call); | 2022 __ bind(&stub_call); |
| 2043 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); | 2023 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); |
| 2044 CallIC(code, expr->BinaryOperationFeedbackId()); | 2024 CallIC(code, expr->BinaryOperationFeedbackId()); |
| 2045 patch_site.EmitPatchInfo(); | 2025 patch_site.EmitPatchInfo(); |
| 2046 __ b(&done); | 2026 __ b(&done); |
| 2047 | 2027 |
| 2048 __ bind(&smi_case); | 2028 __ bind(&smi_case); |
| 2049 // Smi case. This code works the same way as the smi-smi case in the type | 2029 // Smi case. This code works the same way as the smi-smi case in the type |
| 2050 // recording binary operation stub. | 2030 // recording binary operation stub. |
| 2051 switch (op) { | 2031 switch (op) { |
| 2052 case Token::SAR: | 2032 case Token::SAR: |
| 2053 __ GetLeastBitsFromSmi(scratch1, right, 5); | 2033 __ GetLeastBitsFromSmi(scratch1, right, 5); |
| 2054 __ ShiftRightArith(right, left, scratch1); | 2034 __ ShiftRightArithP(right, left, scratch1); |
| 2055 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize)); | 2035 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize)); |
| 2056 break; | 2036 break; |
| 2057 case Token::SHL: { | 2037 case Token::SHL: { |
| 2058 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2038 __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 2059 #if V8_TARGET_ARCH_PPC64 | 2039 #if V8_TARGET_ARCH_S390X |
| 2060 __ ShiftLeft_(right, left, scratch2); | 2040 __ ShiftLeftP(right, left, scratch2); |
| 2061 #else | 2041 #else |
| 2062 __ SmiUntag(scratch1, left); | 2042 __ SmiUntag(scratch1, left); |
| 2063 __ ShiftLeft_(scratch1, scratch1, scratch2); | 2043 __ ShiftLeftP(scratch1, scratch1, scratch2); |
| 2064 // Check that the *signed* result fits in a smi | 2044 // Check that the *signed* result fits in a smi |
| 2065 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call); | 2045 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call); |
| 2066 __ SmiTag(right, scratch1); | 2046 __ SmiTag(right, scratch1); |
| 2067 #endif | 2047 #endif |
| 2068 break; | 2048 break; |
| 2069 } | 2049 } |
| 2070 case Token::SHR: { | 2050 case Token::SHR: { |
| 2071 __ SmiUntag(scratch1, left); | 2051 __ SmiUntag(scratch1, left); |
| 2072 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2052 __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 2073 __ srw(scratch1, scratch1, scratch2); | 2053 __ srl(scratch1, scratch2); |
| 2074 // Unsigned shift is not allowed to produce a negative number. | 2054 // Unsigned shift is not allowed to produce a negative number. |
| 2075 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call); | 2055 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call); |
| 2076 __ SmiTag(right, scratch1); | 2056 __ SmiTag(right, scratch1); |
| 2077 break; | 2057 break; |
| 2078 } | 2058 } |
| 2079 case Token::ADD: { | 2059 case Token::ADD: { |
| 2080 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0); | 2060 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0); |
| 2081 __ BranchOnOverflow(&stub_call); | 2061 __ BranchOnOverflow(&stub_call); |
| 2082 __ mr(right, scratch1); | 2062 __ LoadRR(right, scratch1); |
| 2083 break; | 2063 break; |
| 2084 } | 2064 } |
| 2085 case Token::SUB: { | 2065 case Token::SUB: { |
| 2086 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0); | 2066 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0); |
| 2087 __ BranchOnOverflow(&stub_call); | 2067 __ BranchOnOverflow(&stub_call); |
| 2088 __ mr(right, scratch1); | 2068 __ LoadRR(right, scratch1); |
| 2089 break; | 2069 break; |
| 2090 } | 2070 } |
| 2091 case Token::MUL: { | 2071 case Token::MUL: { |
| 2092 Label mul_zero; | 2072 Label mul_zero; |
| 2093 #if V8_TARGET_ARCH_PPC64 | 2073 #if V8_TARGET_ARCH_S390X |
| 2094 // Remove tag from both operands. | 2074 // Remove tag from both operands. |
| 2095 __ SmiUntag(ip, right); | 2075 __ SmiUntag(ip, right); |
| 2096 __ SmiUntag(r0, left); | 2076 __ SmiUntag(scratch2, left); |
| 2097 __ Mul(scratch1, r0, ip); | 2077 __ mr_z(scratch1, ip); |
| 2098 // Check for overflowing the smi range - no overflow if higher 33 bits of | 2078 // Check for overflowing the smi range - no overflow if higher 33 bits of |
| 2099 // the result are identical. | 2079 // the result are identical. |
| 2100 __ TestIfInt32(scratch1, r0); | 2080 __ lr(ip, scratch2); // 32 bit load |
| 2081 __ sra(ip, Operand(31)); |
| 2082 __ cr_z(ip, scratch1); // 32 bit compare |
| 2101 __ bne(&stub_call); | 2083 __ bne(&stub_call); |
| 2102 #else | 2084 #else |
| 2103 __ SmiUntag(ip, right); | 2085 __ SmiUntag(ip, right); |
| 2104 __ mullw(scratch1, left, ip); | 2086 __ LoadRR(scratch2, left); // load into low order of reg pair |
| 2105 __ mulhw(scratch2, left, ip); | 2087 __ mr_z(scratch1, ip); // R4:R5 = R5 * ip |
| 2106 // Check for overflowing the smi range - no overflow if higher 33 bits of | 2088 // Check for overflowing the smi range - no overflow if higher 33 bits of |
| 2107 // the result are identical. | 2089 // the result are identical. |
| 2108 __ TestIfInt32(scratch2, scratch1, ip); | 2090 __ TestIfInt32(scratch1, scratch2, ip); |
| 2109 __ bne(&stub_call); | 2091 __ bne(&stub_call); |
| 2110 #endif | 2092 #endif |
| 2111 // Go slow on zero result to handle -0. | 2093 // Go slow on zero result to handle -0. |
| 2112 __ cmpi(scratch1, Operand::Zero()); | 2094 __ chi(scratch2, Operand::Zero()); |
| 2113 __ beq(&mul_zero); | 2095 __ beq(&mul_zero, Label::kNear); |
| 2114 #if V8_TARGET_ARCH_PPC64 | 2096 #if V8_TARGET_ARCH_S390X |
| 2115 __ SmiTag(right, scratch1); | 2097 __ SmiTag(right, scratch2); |
| 2116 #else | 2098 #else |
| 2117 __ mr(right, scratch1); | 2099 __ LoadRR(right, scratch2); |
| 2118 #endif | 2100 #endif |
| 2119 __ b(&done); | 2101 __ b(&done); |
| 2120 // We need -0 if we were multiplying a negative number with 0 to get 0. | 2102 // We need -0 if we were multiplying a negative number with 0 to get 0. |
| 2121 // We know one of them was zero. | 2103 // We know one of them was zero. |
| 2122 __ bind(&mul_zero); | 2104 __ bind(&mul_zero); |
| 2123 __ add(scratch2, right, left); | 2105 __ AddP(scratch2, right, left); |
| 2124 __ cmpi(scratch2, Operand::Zero()); | 2106 __ CmpP(scratch2, Operand::Zero()); |
| 2125 __ blt(&stub_call); | 2107 __ blt(&stub_call); |
| 2126 __ LoadSmiLiteral(right, Smi::FromInt(0)); | 2108 __ LoadSmiLiteral(right, Smi::FromInt(0)); |
| 2127 break; | 2109 break; |
| 2128 } | 2110 } |
| 2129 case Token::BIT_OR: | 2111 case Token::BIT_OR: |
| 2130 __ orx(right, left, right); | 2112 __ OrP(right, left); |
| 2131 break; | 2113 break; |
| 2132 case Token::BIT_AND: | 2114 case Token::BIT_AND: |
| 2133 __ and_(right, left, right); | 2115 __ AndP(right, left); |
| 2134 break; | 2116 break; |
| 2135 case Token::BIT_XOR: | 2117 case Token::BIT_XOR: |
| 2136 __ xor_(right, left, right); | 2118 __ XorP(right, left); |
| 2137 break; | 2119 break; |
| 2138 default: | 2120 default: |
| 2139 UNREACHABLE(); | 2121 UNREACHABLE(); |
| 2140 } | 2122 } |
| 2141 | 2123 |
| 2142 __ bind(&done); | 2124 __ bind(&done); |
| 2143 context()->Plug(r3); | 2125 context()->Plug(r2); |
| 2144 } | 2126 } |
| 2145 | 2127 |
| 2146 | |
| 2147 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) { | 2128 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) { |
| 2148 for (int i = 0; i < lit->properties()->length(); i++) { | 2129 for (int i = 0; i < lit->properties()->length(); i++) { |
| 2149 ObjectLiteral::Property* property = lit->properties()->at(i); | 2130 ObjectLiteral::Property* property = lit->properties()->at(i); |
| 2150 Expression* value = property->value(); | 2131 Expression* value = property->value(); |
| 2151 | 2132 |
| 2152 Register scratch = r4; | 2133 Register scratch = r3; |
| 2153 if (property->is_static()) { | 2134 if (property->is_static()) { |
| 2154 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor | 2135 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor |
| 2155 } else { | 2136 } else { |
| 2156 __ LoadP(scratch, MemOperand(sp, 0)); // prototype | 2137 __ LoadP(scratch, MemOperand(sp, 0)); // prototype |
| 2157 } | 2138 } |
| 2158 PushOperand(scratch); | 2139 PushOperand(scratch); |
| 2159 EmitPropertyKey(property, lit->GetIdForProperty(i)); | 2140 EmitPropertyKey(property, lit->GetIdForProperty(i)); |
| 2160 | 2141 |
| 2161 // The static prototype property is read only. We handle the non computed | 2142 // The static prototype property is read only. We handle the non computed |
| 2162 // property name case in the parser. Since this is the only case where we | 2143 // property name case in the parser. Since this is the only case where we |
| 2163 // need to check for an own read only property we special case this so we do | 2144 // need to check for an own read only property we special case this so we do |
| 2164 // not need to do this for every property. | 2145 // not need to do this for every property. |
| 2165 if (property->is_static() && property->is_computed_name()) { | 2146 if (property->is_static() && property->is_computed_name()) { |
| 2166 __ CallRuntime(Runtime::kThrowIfStaticPrototype); | 2147 __ CallRuntime(Runtime::kThrowIfStaticPrototype); |
| 2167 __ push(r3); | 2148 __ push(r2); |
| 2168 } | 2149 } |
| 2169 | 2150 |
| 2170 VisitForStackValue(value); | 2151 VisitForStackValue(value); |
| 2171 if (NeedsHomeObject(value)) { | 2152 if (NeedsHomeObject(value)) { |
| 2172 EmitSetHomeObject(value, 2, property->GetSlot()); | 2153 EmitSetHomeObject(value, 2, property->GetSlot()); |
| 2173 } | 2154 } |
| 2174 | 2155 |
| 2175 switch (property->kind()) { | 2156 switch (property->kind()) { |
| 2176 case ObjectLiteral::Property::CONSTANT: | 2157 case ObjectLiteral::Property::CONSTANT: |
| 2177 case ObjectLiteral::Property::MATERIALIZED_LITERAL: | 2158 case ObjectLiteral::Property::MATERIALIZED_LITERAL: |
| (...skipping 14 matching lines...) Expand all Loading... |
| 2192 PushOperand(Smi::FromInt(DONT_ENUM)); | 2173 PushOperand(Smi::FromInt(DONT_ENUM)); |
| 2193 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); | 2174 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); |
| 2194 break; | 2175 break; |
| 2195 | 2176 |
| 2196 default: | 2177 default: |
| 2197 UNREACHABLE(); | 2178 UNREACHABLE(); |
| 2198 } | 2179 } |
| 2199 } | 2180 } |
| 2200 } | 2181 } |
| 2201 | 2182 |
| 2202 | |
| 2203 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { | 2183 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { |
| 2204 PopOperand(r4); | 2184 PopOperand(r3); |
| 2205 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); | 2185 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); |
| 2206 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. | 2186 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. |
| 2207 CallIC(code, expr->BinaryOperationFeedbackId()); | 2187 CallIC(code, expr->BinaryOperationFeedbackId()); |
| 2208 patch_site.EmitPatchInfo(); | 2188 patch_site.EmitPatchInfo(); |
| 2209 context()->Plug(r3); | 2189 context()->Plug(r2); |
| 2210 } | 2190 } |
| 2211 | 2191 |
| 2212 | |
| 2213 void FullCodeGenerator::EmitAssignment(Expression* expr, | 2192 void FullCodeGenerator::EmitAssignment(Expression* expr, |
| 2214 FeedbackVectorSlot slot) { | 2193 FeedbackVectorSlot slot) { |
| 2215 DCHECK(expr->IsValidReferenceExpressionOrThis()); | 2194 DCHECK(expr->IsValidReferenceExpressionOrThis()); |
| 2216 | 2195 |
| 2217 Property* prop = expr->AsProperty(); | 2196 Property* prop = expr->AsProperty(); |
| 2218 LhsKind assign_type = Property::GetAssignType(prop); | 2197 LhsKind assign_type = Property::GetAssignType(prop); |
| 2219 | 2198 |
| 2220 switch (assign_type) { | 2199 switch (assign_type) { |
| 2221 case VARIABLE: { | 2200 case VARIABLE: { |
| 2222 Variable* var = expr->AsVariableProxy()->var(); | 2201 Variable* var = expr->AsVariableProxy()->var(); |
| 2223 EffectContext context(this); | 2202 EffectContext context(this); |
| 2224 EmitVariableAssignment(var, Token::ASSIGN, slot); | 2203 EmitVariableAssignment(var, Token::ASSIGN, slot); |
| 2225 break; | 2204 break; |
| 2226 } | 2205 } |
| 2227 case NAMED_PROPERTY: { | 2206 case NAMED_PROPERTY: { |
| 2228 PushOperand(r3); // Preserve value. | 2207 PushOperand(r2); // Preserve value. |
| 2229 VisitForAccumulatorValue(prop->obj()); | 2208 VisitForAccumulatorValue(prop->obj()); |
| 2230 __ Move(StoreDescriptor::ReceiverRegister(), r3); | 2209 __ Move(StoreDescriptor::ReceiverRegister(), r2); |
| 2231 PopOperand(StoreDescriptor::ValueRegister()); // Restore value. | 2210 PopOperand(StoreDescriptor::ValueRegister()); // Restore value. |
| 2232 __ mov(StoreDescriptor::NameRegister(), | 2211 __ mov(StoreDescriptor::NameRegister(), |
| 2233 Operand(prop->key()->AsLiteral()->value())); | 2212 Operand(prop->key()->AsLiteral()->value())); |
| 2234 EmitLoadStoreICSlot(slot); | 2213 EmitLoadStoreICSlot(slot); |
| 2235 CallStoreIC(); | 2214 CallStoreIC(); |
| 2236 break; | 2215 break; |
| 2237 } | 2216 } |
| 2238 case NAMED_SUPER_PROPERTY: { | 2217 case NAMED_SUPER_PROPERTY: { |
| 2239 PushOperand(r3); | 2218 PushOperand(r2); |
| 2240 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | 2219 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
| 2241 VisitForAccumulatorValue( | 2220 VisitForAccumulatorValue( |
| 2242 prop->obj()->AsSuperPropertyReference()->home_object()); | 2221 prop->obj()->AsSuperPropertyReference()->home_object()); |
| 2243 // stack: value, this; r3: home_object | 2222 // stack: value, this; r2: home_object |
| 2244 Register scratch = r5; | 2223 Register scratch = r4; |
| 2245 Register scratch2 = r6; | 2224 Register scratch2 = r5; |
| 2246 __ mr(scratch, result_register()); // home_object | 2225 __ LoadRR(scratch, result_register()); // home_object |
| 2247 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value | 2226 __ LoadP(r2, MemOperand(sp, kPointerSize)); // value |
| 2248 __ LoadP(scratch2, MemOperand(sp, 0)); // this | 2227 __ LoadP(scratch2, MemOperand(sp, 0)); // this |
| 2249 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this | 2228 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this |
| 2250 __ StoreP(scratch, MemOperand(sp, 0)); // home_object | 2229 __ StoreP(scratch, MemOperand(sp, 0)); // home_object |
| 2251 // stack: this, home_object; r3: value | 2230 // stack: this, home_object; r2: value |
| 2252 EmitNamedSuperPropertyStore(prop); | 2231 EmitNamedSuperPropertyStore(prop); |
| 2253 break; | 2232 break; |
| 2254 } | 2233 } |
| 2255 case KEYED_SUPER_PROPERTY: { | 2234 case KEYED_SUPER_PROPERTY: { |
| 2256 PushOperand(r3); | 2235 PushOperand(r2); |
| 2257 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | 2236 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
| 2258 VisitForStackValue( | 2237 VisitForStackValue( |
| 2259 prop->obj()->AsSuperPropertyReference()->home_object()); | 2238 prop->obj()->AsSuperPropertyReference()->home_object()); |
| 2260 VisitForAccumulatorValue(prop->key()); | 2239 VisitForAccumulatorValue(prop->key()); |
| 2261 Register scratch = r5; | 2240 Register scratch = r4; |
| 2262 Register scratch2 = r6; | 2241 Register scratch2 = r5; |
| 2263 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value | 2242 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value |
| 2264 // stack: value, this, home_object; r3: key, r6: value | 2243 // stack: value, this, home_object; r3: key, r6: value |
| 2265 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this | 2244 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this |
| 2266 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize)); | 2245 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize)); |
| 2267 __ LoadP(scratch, MemOperand(sp, 0)); // home_object | 2246 __ LoadP(scratch, MemOperand(sp, 0)); // home_object |
| 2268 __ StoreP(scratch, MemOperand(sp, kPointerSize)); | 2247 __ StoreP(scratch, MemOperand(sp, kPointerSize)); |
| 2269 __ StoreP(r3, MemOperand(sp, 0)); | 2248 __ StoreP(r2, MemOperand(sp, 0)); |
| 2270 __ Move(r3, scratch2); | 2249 __ Move(r2, scratch2); |
| 2271 // stack: this, home_object, key; r3: value. | 2250 // stack: this, home_object, key; r2: value. |
| 2272 EmitKeyedSuperPropertyStore(prop); | 2251 EmitKeyedSuperPropertyStore(prop); |
| 2273 break; | 2252 break; |
| 2274 } | 2253 } |
| 2275 case KEYED_PROPERTY: { | 2254 case KEYED_PROPERTY: { |
| 2276 PushOperand(r3); // Preserve value. | 2255 PushOperand(r2); // Preserve value. |
| 2277 VisitForStackValue(prop->obj()); | 2256 VisitForStackValue(prop->obj()); |
| 2278 VisitForAccumulatorValue(prop->key()); | 2257 VisitForAccumulatorValue(prop->key()); |
| 2279 __ Move(StoreDescriptor::NameRegister(), r3); | 2258 __ Move(StoreDescriptor::NameRegister(), r2); |
| 2280 PopOperands(StoreDescriptor::ValueRegister(), | 2259 PopOperands(StoreDescriptor::ValueRegister(), |
| 2281 StoreDescriptor::ReceiverRegister()); | 2260 StoreDescriptor::ReceiverRegister()); |
| 2282 EmitLoadStoreICSlot(slot); | 2261 EmitLoadStoreICSlot(slot); |
| 2283 Handle<Code> ic = | 2262 Handle<Code> ic = |
| 2284 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | 2263 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
| 2285 CallIC(ic); | 2264 CallIC(ic); |
| 2286 break; | 2265 break; |
| 2287 } | 2266 } |
| 2288 } | 2267 } |
| 2289 context()->Plug(r3); | 2268 context()->Plug(r2); |
| 2290 } | 2269 } |
| 2291 | 2270 |
| 2292 | |
| 2293 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( | 2271 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( |
| 2294 Variable* var, MemOperand location) { | 2272 Variable* var, MemOperand location) { |
| 2295 __ StoreP(result_register(), location, r0); | 2273 __ StoreP(result_register(), location); |
| 2296 if (var->IsContextSlot()) { | 2274 if (var->IsContextSlot()) { |
| 2297 // RecordWrite may destroy all its register arguments. | 2275 // RecordWrite may destroy all its register arguments. |
| 2298 __ mr(r6, result_register()); | 2276 __ LoadRR(r5, result_register()); |
| 2299 int offset = Context::SlotOffset(var->index()); | 2277 int offset = Context::SlotOffset(var->index()); |
| 2300 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved, | 2278 __ RecordWriteContextSlot(r3, offset, r5, r4, kLRHasBeenSaved, |
| 2301 kDontSaveFPRegs); | 2279 kDontSaveFPRegs); |
| 2302 } | 2280 } |
| 2303 } | 2281 } |
| 2304 | 2282 |
| 2305 | |
| 2306 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, | 2283 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, |
| 2307 FeedbackVectorSlot slot) { | 2284 FeedbackVectorSlot slot) { |
| 2308 if (var->IsUnallocated()) { | 2285 if (var->IsUnallocated()) { |
| 2309 // Global var, const, or let. | 2286 // Global var, const, or let. |
| 2310 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); | 2287 __ mov(StoreDescriptor::NameRegister(), Operand(var->name())); |
| 2311 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); | 2288 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); |
| 2312 EmitLoadStoreICSlot(slot); | 2289 EmitLoadStoreICSlot(slot); |
| 2313 CallStoreIC(); | 2290 CallStoreIC(); |
| 2314 | 2291 |
| 2315 } else if (var->mode() == LET && op != Token::INIT) { | 2292 } else if (var->mode() == LET && op != Token::INIT) { |
| 2316 // Non-initializing assignment to let variable needs a write barrier. | 2293 // Non-initializing assignment to let variable needs a write barrier. |
| 2317 DCHECK(!var->IsLookupSlot()); | 2294 DCHECK(!var->IsLookupSlot()); |
| 2318 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2295 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2319 Label assign; | 2296 Label assign; |
| 2320 MemOperand location = VarOperand(var, r4); | 2297 MemOperand location = VarOperand(var, r3); |
| 2321 __ LoadP(r6, location); | 2298 __ LoadP(r5, location); |
| 2322 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); | 2299 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
| 2323 __ bne(&assign); | 2300 __ bne(&assign); |
| 2324 __ mov(r6, Operand(var->name())); | 2301 __ mov(r5, Operand(var->name())); |
| 2325 __ push(r6); | 2302 __ push(r5); |
| 2326 __ CallRuntime(Runtime::kThrowReferenceError); | 2303 __ CallRuntime(Runtime::kThrowReferenceError); |
| 2327 // Perform the assignment. | 2304 // Perform the assignment. |
| 2328 __ bind(&assign); | 2305 __ bind(&assign); |
| 2329 EmitStoreToStackLocalOrContextSlot(var, location); | 2306 EmitStoreToStackLocalOrContextSlot(var, location); |
| 2330 | 2307 |
| 2331 } else if (var->mode() == CONST && op != Token::INIT) { | 2308 } else if (var->mode() == CONST && op != Token::INIT) { |
| 2332 // Assignment to const variable needs a write barrier. | 2309 // Assignment to const variable needs a write barrier. |
| 2333 DCHECK(!var->IsLookupSlot()); | 2310 DCHECK(!var->IsLookupSlot()); |
| 2334 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2311 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2335 Label const_error; | 2312 Label const_error; |
| 2336 MemOperand location = VarOperand(var, r4); | 2313 MemOperand location = VarOperand(var, r3); |
| 2337 __ LoadP(r6, location); | 2314 __ LoadP(r5, location); |
| 2338 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); | 2315 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
| 2339 __ bne(&const_error); | 2316 __ bne(&const_error, Label::kNear); |
| 2340 __ mov(r6, Operand(var->name())); | 2317 __ mov(r5, Operand(var->name())); |
| 2341 __ push(r6); | 2318 __ push(r5); |
| 2342 __ CallRuntime(Runtime::kThrowReferenceError); | 2319 __ CallRuntime(Runtime::kThrowReferenceError); |
| 2343 __ bind(&const_error); | 2320 __ bind(&const_error); |
| 2344 __ CallRuntime(Runtime::kThrowConstAssignError); | 2321 __ CallRuntime(Runtime::kThrowConstAssignError); |
| 2345 | 2322 |
| 2346 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { | 2323 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { |
| 2347 // Initializing assignment to const {this} needs a write barrier. | 2324 // Initializing assignment to const {this} needs a write barrier. |
| 2348 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2325 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2349 Label uninitialized_this; | 2326 Label uninitialized_this; |
| 2350 MemOperand location = VarOperand(var, r4); | 2327 MemOperand location = VarOperand(var, r3); |
| 2351 __ LoadP(r6, location); | 2328 __ LoadP(r5, location); |
| 2352 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); | 2329 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); |
| 2353 __ beq(&uninitialized_this); | 2330 __ beq(&uninitialized_this); |
| 2354 __ mov(r4, Operand(var->name())); | 2331 __ mov(r3, Operand(var->name())); |
| 2355 __ push(r4); | 2332 __ push(r3); |
| 2356 __ CallRuntime(Runtime::kThrowReferenceError); | 2333 __ CallRuntime(Runtime::kThrowReferenceError); |
| 2357 __ bind(&uninitialized_this); | 2334 __ bind(&uninitialized_this); |
| 2358 EmitStoreToStackLocalOrContextSlot(var, location); | 2335 EmitStoreToStackLocalOrContextSlot(var, location); |
| 2359 | 2336 |
| 2360 } else if (!var->is_const_mode() || | 2337 } else if (!var->is_const_mode() || |
| 2361 (var->mode() == CONST && op == Token::INIT)) { | 2338 (var->mode() == CONST && op == Token::INIT)) { |
| 2362 if (var->IsLookupSlot()) { | 2339 if (var->IsLookupSlot()) { |
| 2363 // Assignment to var. | 2340 // Assignment to var. |
| 2364 __ Push(var->name()); | 2341 __ Push(var->name()); |
| 2365 __ Push(r3); | 2342 __ Push(r2); |
| 2366 __ CallRuntime(is_strict(language_mode()) | 2343 __ CallRuntime(is_strict(language_mode()) |
| 2367 ? Runtime::kStoreLookupSlot_Strict | 2344 ? Runtime::kStoreLookupSlot_Strict |
| 2368 : Runtime::kStoreLookupSlot_Sloppy); | 2345 : Runtime::kStoreLookupSlot_Sloppy); |
| 2369 } else { | 2346 } else { |
| 2370 // Assignment to var or initializing assignment to let/const in harmony | 2347 // Assignment to var or initializing assignment to let/const in harmony |
| 2371 // mode. | 2348 // mode. |
| 2372 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); | 2349 DCHECK((var->IsStackAllocated() || var->IsContextSlot())); |
| 2373 MemOperand location = VarOperand(var, r4); | 2350 MemOperand location = VarOperand(var, r3); |
| 2374 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { | 2351 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { |
| 2375 // Check for an uninitialized let binding. | 2352 // Check for an uninitialized let binding. |
| 2376 __ LoadP(r5, location); | 2353 __ LoadP(r4, location); |
| 2377 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); | 2354 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
| 2378 __ Check(eq, kLetBindingReInitialization); | 2355 __ Check(eq, kLetBindingReInitialization); |
| 2379 } | 2356 } |
| 2380 EmitStoreToStackLocalOrContextSlot(var, location); | 2357 EmitStoreToStackLocalOrContextSlot(var, location); |
| 2381 } | 2358 } |
| 2382 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) { | 2359 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) { |
| 2383 // Const initializers need a write barrier. | 2360 // Const initializers need a write barrier. |
| 2384 DCHECK(!var->IsParameter()); // No const parameters. | 2361 DCHECK(!var->IsParameter()); // No const parameters. |
| 2385 if (var->IsLookupSlot()) { | 2362 if (var->IsLookupSlot()) { |
| 2386 __ push(r3); | 2363 __ push(r2); |
| 2387 __ mov(r3, Operand(var->name())); | 2364 __ mov(r2, Operand(var->name())); |
| 2388 __ Push(cp, r3); // Context and name. | 2365 __ Push(cp, r2); // Context and name. |
| 2389 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot); | 2366 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot); |
| 2390 } else { | 2367 } else { |
| 2391 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); | 2368 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); |
| 2392 Label skip; | 2369 Label skip; |
| 2393 MemOperand location = VarOperand(var, r4); | 2370 MemOperand location = VarOperand(var, r3); |
| 2394 __ LoadP(r5, location); | 2371 __ LoadP(r4, location); |
| 2395 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex); | 2372 __ CompareRoot(r4, Heap::kTheHoleValueRootIndex); |
| 2396 __ bne(&skip); | 2373 __ bne(&skip); |
| 2397 EmitStoreToStackLocalOrContextSlot(var, location); | 2374 EmitStoreToStackLocalOrContextSlot(var, location); |
| 2398 __ bind(&skip); | 2375 __ bind(&skip); |
| 2399 } | 2376 } |
| 2400 | 2377 |
| 2401 } else { | 2378 } else { |
| 2402 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT); | 2379 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT); |
| 2403 if (is_strict(language_mode())) { | 2380 if (is_strict(language_mode())) { |
| 2404 __ CallRuntime(Runtime::kThrowConstAssignError); | 2381 __ CallRuntime(Runtime::kThrowConstAssignError); |
| 2405 } | 2382 } |
| 2406 // Silently ignore store in sloppy mode. | 2383 // Silently ignore store in sloppy mode. |
| 2407 } | 2384 } |
| 2408 } | 2385 } |
| 2409 | 2386 |
| 2410 | |
| 2411 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { | 2387 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { |
| 2412 // Assignment to a property, using a named store IC. | 2388 // Assignment to a property, using a named store IC. |
| 2413 Property* prop = expr->target()->AsProperty(); | 2389 Property* prop = expr->target()->AsProperty(); |
| 2414 DCHECK(prop != NULL); | 2390 DCHECK(prop != NULL); |
| 2415 DCHECK(prop->key()->IsLiteral()); | 2391 DCHECK(prop->key()->IsLiteral()); |
| 2416 | 2392 |
| 2417 __ mov(StoreDescriptor::NameRegister(), | 2393 __ mov(StoreDescriptor::NameRegister(), |
| 2418 Operand(prop->key()->AsLiteral()->value())); | 2394 Operand(prop->key()->AsLiteral()->value())); |
| 2419 PopOperand(StoreDescriptor::ReceiverRegister()); | 2395 PopOperand(StoreDescriptor::ReceiverRegister()); |
| 2420 EmitLoadStoreICSlot(expr->AssignmentSlot()); | 2396 EmitLoadStoreICSlot(expr->AssignmentSlot()); |
| 2421 CallStoreIC(); | 2397 CallStoreIC(); |
| 2422 | 2398 |
| 2423 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 2399 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 2424 context()->Plug(r3); | 2400 context()->Plug(r2); |
| 2425 } | 2401 } |
| 2426 | 2402 |
| 2427 | |
| 2428 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { | 2403 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { |
| 2429 // Assignment to named property of super. | 2404 // Assignment to named property of super. |
| 2430 // r3 : value | 2405 // r2 : value |
| 2431 // stack : receiver ('this'), home_object | 2406 // stack : receiver ('this'), home_object |
| 2432 DCHECK(prop != NULL); | 2407 DCHECK(prop != NULL); |
| 2433 Literal* key = prop->key()->AsLiteral(); | 2408 Literal* key = prop->key()->AsLiteral(); |
| 2434 DCHECK(key != NULL); | 2409 DCHECK(key != NULL); |
| 2435 | 2410 |
| 2436 PushOperand(key->value()); | 2411 PushOperand(key->value()); |
| 2437 PushOperand(r3); | 2412 PushOperand(r2); |
| 2438 CallRuntimeWithOperands((is_strict(language_mode()) | 2413 CallRuntimeWithOperands((is_strict(language_mode()) |
| 2439 ? Runtime::kStoreToSuper_Strict | 2414 ? Runtime::kStoreToSuper_Strict |
| 2440 : Runtime::kStoreToSuper_Sloppy)); | 2415 : Runtime::kStoreToSuper_Sloppy)); |
| 2441 } | 2416 } |
| 2442 | 2417 |
| 2443 | |
| 2444 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { | 2418 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { |
| 2445 // Assignment to named property of super. | 2419 // Assignment to named property of super. |
| 2446 // r3 : value | 2420 // r2 : value |
| 2447 // stack : receiver ('this'), home_object, key | 2421 // stack : receiver ('this'), home_object, key |
| 2448 DCHECK(prop != NULL); | 2422 DCHECK(prop != NULL); |
| 2449 | 2423 |
| 2450 PushOperand(r3); | 2424 PushOperand(r2); |
| 2451 CallRuntimeWithOperands((is_strict(language_mode()) | 2425 CallRuntimeWithOperands((is_strict(language_mode()) |
| 2452 ? Runtime::kStoreKeyedToSuper_Strict | 2426 ? Runtime::kStoreKeyedToSuper_Strict |
| 2453 : Runtime::kStoreKeyedToSuper_Sloppy)); | 2427 : Runtime::kStoreKeyedToSuper_Sloppy)); |
| 2454 } | 2428 } |
| 2455 | 2429 |
| 2456 | |
| 2457 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { | 2430 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { |
| 2458 // Assignment to a property, using a keyed store IC. | 2431 // Assignment to a property, using a keyed store IC. |
| 2459 PopOperands(StoreDescriptor::ReceiverRegister(), | 2432 PopOperands(StoreDescriptor::ReceiverRegister(), |
| 2460 StoreDescriptor::NameRegister()); | 2433 StoreDescriptor::NameRegister()); |
| 2461 DCHECK(StoreDescriptor::ValueRegister().is(r3)); | 2434 DCHECK(StoreDescriptor::ValueRegister().is(r2)); |
| 2462 | 2435 |
| 2463 Handle<Code> ic = | 2436 Handle<Code> ic = |
| 2464 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | 2437 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
| 2465 EmitLoadStoreICSlot(expr->AssignmentSlot()); | 2438 EmitLoadStoreICSlot(expr->AssignmentSlot()); |
| 2466 CallIC(ic); | 2439 CallIC(ic); |
| 2467 | 2440 |
| 2468 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 2441 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 2469 context()->Plug(r3); | 2442 context()->Plug(r2); |
| 2470 } | 2443 } |
| 2471 | 2444 |
| 2472 | |
| 2473 void FullCodeGenerator::VisitProperty(Property* expr) { | 2445 void FullCodeGenerator::VisitProperty(Property* expr) { |
| 2474 Comment cmnt(masm_, "[ Property"); | 2446 Comment cmnt(masm_, "[ Property"); |
| 2475 SetExpressionPosition(expr); | 2447 SetExpressionPosition(expr); |
| 2476 | 2448 |
| 2477 Expression* key = expr->key(); | 2449 Expression* key = expr->key(); |
| 2478 | 2450 |
| 2479 if (key->IsPropertyName()) { | 2451 if (key->IsPropertyName()) { |
| 2480 if (!expr->IsSuperAccess()) { | 2452 if (!expr->IsSuperAccess()) { |
| 2481 VisitForAccumulatorValue(expr->obj()); | 2453 VisitForAccumulatorValue(expr->obj()); |
| 2482 __ Move(LoadDescriptor::ReceiverRegister(), r3); | 2454 __ Move(LoadDescriptor::ReceiverRegister(), r2); |
| 2483 EmitNamedPropertyLoad(expr); | 2455 EmitNamedPropertyLoad(expr); |
| 2484 } else { | 2456 } else { |
| 2485 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); | 2457 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); |
| 2486 VisitForStackValue( | 2458 VisitForStackValue( |
| 2487 expr->obj()->AsSuperPropertyReference()->home_object()); | 2459 expr->obj()->AsSuperPropertyReference()->home_object()); |
| 2488 EmitNamedSuperPropertyLoad(expr); | 2460 EmitNamedSuperPropertyLoad(expr); |
| 2489 } | 2461 } |
| 2490 } else { | 2462 } else { |
| 2491 if (!expr->IsSuperAccess()) { | 2463 if (!expr->IsSuperAccess()) { |
| 2492 VisitForStackValue(expr->obj()); | 2464 VisitForStackValue(expr->obj()); |
| 2493 VisitForAccumulatorValue(expr->key()); | 2465 VisitForAccumulatorValue(expr->key()); |
| 2494 __ Move(LoadDescriptor::NameRegister(), r3); | 2466 __ Move(LoadDescriptor::NameRegister(), r2); |
| 2495 PopOperand(LoadDescriptor::ReceiverRegister()); | 2467 PopOperand(LoadDescriptor::ReceiverRegister()); |
| 2496 EmitKeyedPropertyLoad(expr); | 2468 EmitKeyedPropertyLoad(expr); |
| 2497 } else { | 2469 } else { |
| 2498 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); | 2470 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); |
| 2499 VisitForStackValue( | 2471 VisitForStackValue( |
| 2500 expr->obj()->AsSuperPropertyReference()->home_object()); | 2472 expr->obj()->AsSuperPropertyReference()->home_object()); |
| 2501 VisitForStackValue(expr->key()); | 2473 VisitForStackValue(expr->key()); |
| 2502 EmitKeyedSuperPropertyLoad(expr); | 2474 EmitKeyedSuperPropertyLoad(expr); |
| 2503 } | 2475 } |
| 2504 } | 2476 } |
| 2505 PrepareForBailoutForId(expr->LoadId(), TOS_REG); | 2477 PrepareForBailoutForId(expr->LoadId(), TOS_REG); |
| 2506 context()->Plug(r3); | 2478 context()->Plug(r2); |
| 2507 } | 2479 } |
| 2508 | 2480 |
| 2509 | |
| 2510 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) { | 2481 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) { |
| 2511 ic_total_count_++; | 2482 ic_total_count_++; |
| 2512 __ Call(code, RelocInfo::CODE_TARGET, ast_id); | 2483 __ Call(code, RelocInfo::CODE_TARGET, ast_id); |
| 2513 } | 2484 } |
| 2514 | 2485 |
| 2515 | |
| 2516 // Code common for calls using the IC. | 2486 // Code common for calls using the IC. |
| 2517 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { | 2487 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { |
| 2518 Expression* callee = expr->expression(); | 2488 Expression* callee = expr->expression(); |
| 2519 | 2489 |
| 2520 // Get the target function. | 2490 // Get the target function. |
| 2521 ConvertReceiverMode convert_mode; | 2491 ConvertReceiverMode convert_mode; |
| 2522 if (callee->IsVariableProxy()) { | 2492 if (callee->IsVariableProxy()) { |
| 2523 { | 2493 { |
| 2524 StackValueContext context(this); | 2494 StackValueContext context(this); |
| 2525 EmitVariableLoad(callee->AsVariableProxy()); | 2495 EmitVariableLoad(callee->AsVariableProxy()); |
| 2526 PrepareForBailout(callee, NO_REGISTERS); | 2496 PrepareForBailout(callee, NO_REGISTERS); |
| 2527 } | 2497 } |
| 2528 // Push undefined as receiver. This is patched in the method prologue if it | 2498 // Push undefined as receiver. This is patched in the method prologue if it |
| 2529 // is a sloppy mode method. | 2499 // is a sloppy mode method. |
| 2530 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 2500 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); |
| 2531 PushOperand(r0); | 2501 PushOperand(r1); |
| 2532 convert_mode = ConvertReceiverMode::kNullOrUndefined; | 2502 convert_mode = ConvertReceiverMode::kNullOrUndefined; |
| 2533 } else { | 2503 } else { |
| 2534 // Load the function from the receiver. | 2504 // Load the function from the receiver. |
| 2535 DCHECK(callee->IsProperty()); | 2505 DCHECK(callee->IsProperty()); |
| 2536 DCHECK(!callee->AsProperty()->IsSuperAccess()); | 2506 DCHECK(!callee->AsProperty()->IsSuperAccess()); |
| 2537 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | 2507 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 2538 EmitNamedPropertyLoad(callee->AsProperty()); | 2508 EmitNamedPropertyLoad(callee->AsProperty()); |
| 2539 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | 2509 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
| 2540 // Push the target function under the receiver. | 2510 // Push the target function under the receiver. |
| 2541 __ LoadP(r0, MemOperand(sp, 0)); | 2511 __ LoadP(r1, MemOperand(sp, 0)); |
| 2542 PushOperand(r0); | 2512 PushOperand(r1); |
| 2543 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 2513 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 2544 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; | 2514 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; |
| 2545 } | 2515 } |
| 2546 | 2516 |
| 2547 EmitCall(expr, convert_mode); | 2517 EmitCall(expr, convert_mode); |
| 2548 } | 2518 } |
| 2549 | 2519 |
| 2550 | |
| 2551 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { | 2520 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { |
| 2552 Expression* callee = expr->expression(); | 2521 Expression* callee = expr->expression(); |
| 2553 DCHECK(callee->IsProperty()); | 2522 DCHECK(callee->IsProperty()); |
| 2554 Property* prop = callee->AsProperty(); | 2523 Property* prop = callee->AsProperty(); |
| 2555 DCHECK(prop->IsSuperAccess()); | 2524 DCHECK(prop->IsSuperAccess()); |
| 2556 SetExpressionPosition(prop); | 2525 SetExpressionPosition(prop); |
| 2557 | 2526 |
| 2558 Literal* key = prop->key()->AsLiteral(); | 2527 Literal* key = prop->key()->AsLiteral(); |
| 2559 DCHECK(!key->value()->IsSmi()); | 2528 DCHECK(!key->value()->IsSmi()); |
| 2560 // Load the function from the receiver. | 2529 // Load the function from the receiver. |
| 2561 const Register scratch = r4; | 2530 const Register scratch = r3; |
| 2562 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); | 2531 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); |
| 2563 VisitForAccumulatorValue(super_ref->home_object()); | 2532 VisitForAccumulatorValue(super_ref->home_object()); |
| 2564 __ mr(scratch, r3); | 2533 __ LoadRR(scratch, r2); |
| 2565 VisitForAccumulatorValue(super_ref->this_var()); | 2534 VisitForAccumulatorValue(super_ref->this_var()); |
| 2566 PushOperands(scratch, r3, r3, scratch); | 2535 PushOperands(scratch, r2, r2, scratch); |
| 2567 PushOperand(key->value()); | 2536 PushOperand(key->value()); |
| 2568 | 2537 |
| 2569 // Stack here: | 2538 // Stack here: |
| 2570 // - home_object | 2539 // - home_object |
| 2571 // - this (receiver) | 2540 // - this (receiver) |
| 2572 // - this (receiver) <-- LoadFromSuper will pop here and below. | 2541 // - this (receiver) <-- LoadFromSuper will pop here and below. |
| 2573 // - home_object | 2542 // - home_object |
| 2574 // - key | 2543 // - key |
| 2575 CallRuntimeWithOperands(Runtime::kLoadFromSuper); | 2544 CallRuntimeWithOperands(Runtime::kLoadFromSuper); |
| 2576 | 2545 |
| 2577 // Replace home_object with target function. | 2546 // Replace home_object with target function. |
| 2578 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 2547 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 2579 | 2548 |
| 2580 // Stack here: | 2549 // Stack here: |
| 2581 // - target function | 2550 // - target function |
| 2582 // - this (receiver) | 2551 // - this (receiver) |
| 2583 EmitCall(expr); | 2552 EmitCall(expr); |
| 2584 } | 2553 } |
| 2585 | 2554 |
| 2586 | |
| 2587 // Code common for calls using the IC. | 2555 // Code common for calls using the IC. |
| 2588 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) { | 2556 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) { |
| 2589 // Load the key. | 2557 // Load the key. |
| 2590 VisitForAccumulatorValue(key); | 2558 VisitForAccumulatorValue(key); |
| 2591 | 2559 |
| 2592 Expression* callee = expr->expression(); | 2560 Expression* callee = expr->expression(); |
| 2593 | 2561 |
| 2594 // Load the function from the receiver. | 2562 // Load the function from the receiver. |
| 2595 DCHECK(callee->IsProperty()); | 2563 DCHECK(callee->IsProperty()); |
| 2596 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | 2564 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 2597 __ Move(LoadDescriptor::NameRegister(), r3); | 2565 __ Move(LoadDescriptor::NameRegister(), r2); |
| 2598 EmitKeyedPropertyLoad(callee->AsProperty()); | 2566 EmitKeyedPropertyLoad(callee->AsProperty()); |
| 2599 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); | 2567 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); |
| 2600 | 2568 |
| 2601 // Push the target function under the receiver. | 2569 // Push the target function under the receiver. |
| 2602 __ LoadP(ip, MemOperand(sp, 0)); | 2570 __ LoadP(ip, MemOperand(sp, 0)); |
| 2603 PushOperand(ip); | 2571 PushOperand(ip); |
| 2604 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 2572 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 2605 | 2573 |
| 2606 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); | 2574 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); |
| 2607 } | 2575 } |
| 2608 | 2576 |
| 2609 | |
| 2610 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { | 2577 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { |
| 2611 Expression* callee = expr->expression(); | 2578 Expression* callee = expr->expression(); |
| 2612 DCHECK(callee->IsProperty()); | 2579 DCHECK(callee->IsProperty()); |
| 2613 Property* prop = callee->AsProperty(); | 2580 Property* prop = callee->AsProperty(); |
| 2614 DCHECK(prop->IsSuperAccess()); | 2581 DCHECK(prop->IsSuperAccess()); |
| 2615 | 2582 |
| 2616 SetExpressionPosition(prop); | 2583 SetExpressionPosition(prop); |
| 2617 // Load the function from the receiver. | 2584 // Load the function from the receiver. |
| 2618 const Register scratch = r4; | 2585 const Register scratch = r3; |
| 2619 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); | 2586 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); |
| 2620 VisitForAccumulatorValue(super_ref->home_object()); | 2587 VisitForAccumulatorValue(super_ref->home_object()); |
| 2621 __ mr(scratch, r3); | 2588 __ LoadRR(scratch, r2); |
| 2622 VisitForAccumulatorValue(super_ref->this_var()); | 2589 VisitForAccumulatorValue(super_ref->this_var()); |
| 2623 PushOperands(scratch, r3, r3, scratch); | 2590 PushOperands(scratch, r2, r2, scratch); |
| 2624 VisitForStackValue(prop->key()); | 2591 VisitForStackValue(prop->key()); |
| 2625 | 2592 |
| 2626 // Stack here: | 2593 // Stack here: |
| 2627 // - home_object | 2594 // - home_object |
| 2628 // - this (receiver) | 2595 // - this (receiver) |
| 2629 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. | 2596 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. |
| 2630 // - home_object | 2597 // - home_object |
| 2631 // - key | 2598 // - key |
| 2632 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper); | 2599 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper); |
| 2633 | 2600 |
| 2634 // Replace home_object with target function. | 2601 // Replace home_object with target function. |
| 2635 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 2602 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 2636 | 2603 |
| 2637 // Stack here: | 2604 // Stack here: |
| 2638 // - target function | 2605 // - target function |
| 2639 // - this (receiver) | 2606 // - this (receiver) |
| 2640 EmitCall(expr); | 2607 EmitCall(expr); |
| 2641 } | 2608 } |
| 2642 | 2609 |
| 2643 | |
| 2644 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { | 2610 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { |
| 2645 // Load the arguments. | 2611 // Load the arguments. |
| 2646 ZoneList<Expression*>* args = expr->arguments(); | 2612 ZoneList<Expression*>* args = expr->arguments(); |
| 2647 int arg_count = args->length(); | 2613 int arg_count = args->length(); |
| 2648 for (int i = 0; i < arg_count; i++) { | 2614 for (int i = 0; i < arg_count; i++) { |
| 2649 VisitForStackValue(args->at(i)); | 2615 VisitForStackValue(args->at(i)); |
| 2650 } | 2616 } |
| 2651 | 2617 |
| 2652 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 2618 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
| 2653 SetCallPosition(expr); | 2619 SetCallPosition(expr); |
| 2654 if (expr->tail_call_mode() == TailCallMode::kAllow) { | 2620 if (expr->tail_call_mode() == TailCallMode::kAllow) { |
| 2655 if (FLAG_trace) { | 2621 if (FLAG_trace) { |
| 2656 __ CallRuntime(Runtime::kTraceTailCall); | 2622 __ CallRuntime(Runtime::kTraceTailCall); |
| 2657 } | 2623 } |
| 2658 // Update profiling counters before the tail call since we will | 2624 // Update profiling counters before the tail call since we will |
| 2659 // not return to this function. | 2625 // not return to this function. |
| 2660 EmitProfilingCounterHandlingForReturnSequence(true); | 2626 EmitProfilingCounterHandlingForReturnSequence(true); |
| 2661 } | 2627 } |
| 2662 Handle<Code> ic = | 2628 Handle<Code> ic = |
| 2663 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) | 2629 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) |
| 2664 .code(); | 2630 .code(); |
| 2665 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot())); | 2631 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallFeedbackICSlot())); |
| 2666 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2632 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 2667 // Don't assign a type feedback id to the IC, since type feedback is provided | 2633 // Don't assign a type feedback id to the IC, since type feedback is provided |
| 2668 // by the vector above. | 2634 // by the vector above. |
| 2669 CallIC(ic); | 2635 CallIC(ic); |
| 2670 OperandStackDepthDecrement(arg_count + 1); | 2636 OperandStackDepthDecrement(arg_count + 1); |
| 2671 | 2637 |
| 2672 RecordJSReturnSite(expr); | 2638 RecordJSReturnSite(expr); |
| 2673 // Restore context register. | 2639 // Restore context register. |
| 2674 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2640 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2675 context()->DropAndPlug(1, r3); | 2641 context()->DropAndPlug(1, r2); |
| 2676 } | 2642 } |
| 2677 | 2643 |
| 2678 | |
| 2679 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { | 2644 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { |
| 2680 // r7: copy of the first argument or undefined if it doesn't exist. | 2645 // r6: copy of the first argument or undefined if it doesn't exist. |
| 2681 if (arg_count > 0) { | 2646 if (arg_count > 0) { |
| 2682 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0); | 2647 __ LoadP(r6, MemOperand(sp, arg_count * kPointerSize), r0); |
| 2683 } else { | 2648 } else { |
| 2684 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); | 2649 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); |
| 2685 } | 2650 } |
| 2686 | 2651 |
| 2687 // r6: the receiver of the enclosing function. | 2652 // r5: the receiver of the enclosing function. |
| 2688 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 2653 __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 2689 | 2654 |
| 2690 // r5: language mode. | 2655 // r4: language mode. |
| 2691 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode())); | 2656 __ LoadSmiLiteral(r4, Smi::FromInt(language_mode())); |
| 2692 | 2657 |
| 2693 // r4: the start position of the scope the calls resides in. | 2658 // r3: the start position of the scope the calls resides in. |
| 2694 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position())); | 2659 __ LoadSmiLiteral(r3, Smi::FromInt(scope()->start_position())); |
| 2695 | 2660 |
| 2696 // Do the runtime call. | 2661 // Do the runtime call. |
| 2697 __ Push(r7, r6, r5, r4); | 2662 __ Push(r6, r5, r4, r3); |
| 2698 __ CallRuntime(Runtime::kResolvePossiblyDirectEval); | 2663 __ CallRuntime(Runtime::kResolvePossiblyDirectEval); |
| 2699 } | 2664 } |
| 2700 | 2665 |
| 2701 | |
| 2702 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. | 2666 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. |
| 2703 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { | 2667 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { |
| 2704 VariableProxy* callee = expr->expression()->AsVariableProxy(); | 2668 VariableProxy* callee = expr->expression()->AsVariableProxy(); |
| 2705 if (callee->var()->IsLookupSlot()) { | 2669 if (callee->var()->IsLookupSlot()) { |
| 2706 Label slow, done; | 2670 Label slow, done; |
| 2707 SetExpressionPosition(callee); | 2671 SetExpressionPosition(callee); |
| 2708 // Generate code for loading from variables potentially shadowed by | 2672 // Generate code for loading from variables potentially shadowed by |
| 2709 // eval-introduced variables. | 2673 // eval-introduced variables. |
| 2710 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); | 2674 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); |
| 2711 | 2675 |
| 2712 __ bind(&slow); | 2676 __ bind(&slow); |
| 2713 // Call the runtime to find the function to call (returned in r3) and | 2677 // Call the runtime to find the function to call (returned in r2) and |
| 2714 // the object holding it (returned in r4). | 2678 // the object holding it (returned in r3). |
| 2715 __ Push(callee->name()); | 2679 __ Push(callee->name()); |
| 2716 __ CallRuntime(Runtime::kLoadLookupSlotForCall); | 2680 __ CallRuntime(Runtime::kLoadLookupSlotForCall); |
| 2717 PushOperands(r3, r4); // Function, receiver. | 2681 PushOperands(r2, r3); // Function, receiver. |
| 2718 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); | 2682 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); |
| 2719 | 2683 |
| 2720 // If fast case code has been generated, emit code to push the function | 2684 // If fast case code has been generated, emit code to push the function |
| 2721 // and receiver and have the slow path jump around this code. | 2685 // and receiver and have the slow path jump around this code. |
| 2722 if (done.is_linked()) { | 2686 if (done.is_linked()) { |
| 2723 Label call; | 2687 Label call; |
| 2724 __ b(&call); | 2688 __ b(&call); |
| 2725 __ bind(&done); | 2689 __ bind(&done); |
| 2726 // Push function. | 2690 // Push function. |
| 2727 __ push(r3); | 2691 __ push(r2); |
| 2728 // Pass undefined as the receiver, which is the WithBaseObject of a | 2692 // Pass undefined as the receiver, which is the WithBaseObject of a |
| 2729 // non-object environment record. If the callee is sloppy, it will patch | 2693 // non-object environment record. If the callee is sloppy, it will patch |
| 2730 // it up to be the global receiver. | 2694 // it up to be the global receiver. |
| 2731 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); | 2695 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); |
| 2732 __ push(r4); | 2696 __ push(r3); |
| 2733 __ bind(&call); | 2697 __ bind(&call); |
| 2734 } | 2698 } |
| 2735 } else { | 2699 } else { |
| 2736 VisitForStackValue(callee); | 2700 VisitForStackValue(callee); |
| 2737 // refEnv.WithBaseObject() | 2701 // refEnv.WithBaseObject() |
| 2738 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); | 2702 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); |
| 2739 PushOperand(r5); // Reserved receiver slot. | 2703 PushOperand(r4); // Reserved receiver slot. |
| 2740 } | 2704 } |
| 2741 } | 2705 } |
| 2742 | 2706 |
| 2743 | |
| 2744 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) { | 2707 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) { |
| 2745 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval | 2708 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval |
| 2746 // to resolve the function we need to call. Then we call the resolved | 2709 // to resolve the function we need to call. Then we call the resolved |
| 2747 // function using the given arguments. | 2710 // function using the given arguments. |
| 2748 ZoneList<Expression*>* args = expr->arguments(); | 2711 ZoneList<Expression*>* args = expr->arguments(); |
| 2749 int arg_count = args->length(); | 2712 int arg_count = args->length(); |
| 2750 | 2713 |
| 2751 PushCalleeAndWithBaseObject(expr); | 2714 PushCalleeAndWithBaseObject(expr); |
| 2752 | 2715 |
| 2753 // Push the arguments. | 2716 // Push the arguments. |
| 2754 for (int i = 0; i < arg_count; i++) { | 2717 for (int i = 0; i < arg_count; i++) { |
| 2755 VisitForStackValue(args->at(i)); | 2718 VisitForStackValue(args->at(i)); |
| 2756 } | 2719 } |
| 2757 | 2720 |
| 2758 // Push a copy of the function (found below the arguments) and | 2721 // Push a copy of the function (found below the arguments) and |
| 2759 // resolve eval. | 2722 // resolve eval. |
| 2760 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2723 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 2761 __ push(r4); | 2724 __ push(r3); |
| 2762 EmitResolvePossiblyDirectEval(arg_count); | 2725 EmitResolvePossiblyDirectEval(arg_count); |
| 2763 | 2726 |
| 2764 // Touch up the stack with the resolved function. | 2727 // Touch up the stack with the resolved function. |
| 2765 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2728 __ StoreP(r2, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 2766 | 2729 |
| 2767 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); | 2730 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); |
| 2768 | 2731 |
| 2769 // Record source position for debugger. | 2732 // Record source position for debugger. |
| 2770 SetCallPosition(expr); | 2733 SetCallPosition(expr); |
| 2771 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 2734 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 2772 __ mov(r3, Operand(arg_count)); | 2735 __ mov(r2, Operand(arg_count)); |
| 2773 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny, | 2736 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
| 2774 expr->tail_call_mode()), | 2737 expr->tail_call_mode()), |
| 2775 RelocInfo::CODE_TARGET); | 2738 RelocInfo::CODE_TARGET); |
| 2776 OperandStackDepthDecrement(arg_count + 1); | 2739 OperandStackDepthDecrement(arg_count + 1); |
| 2777 RecordJSReturnSite(expr); | 2740 RecordJSReturnSite(expr); |
| 2778 // Restore context register. | 2741 // Restore context register. |
| 2779 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2742 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2780 context()->DropAndPlug(1, r3); | 2743 context()->DropAndPlug(1, r2); |
| 2781 } | 2744 } |
| 2782 | 2745 |
| 2783 | |
| 2784 void FullCodeGenerator::VisitCallNew(CallNew* expr) { | 2746 void FullCodeGenerator::VisitCallNew(CallNew* expr) { |
| 2785 Comment cmnt(masm_, "[ CallNew"); | 2747 Comment cmnt(masm_, "[ CallNew"); |
| 2786 // According to ECMA-262, section 11.2.2, page 44, the function | 2748 // According to ECMA-262, section 11.2.2, page 44, the function |
| 2787 // expression in new calls must be evaluated before the | 2749 // expression in new calls must be evaluated before the |
| 2788 // arguments. | 2750 // arguments. |
| 2789 | 2751 |
| 2790 // Push constructor on the stack. If it's not a function it's used as | 2752 // Push constructor on the stack. If it's not a function it's used as |
| 2791 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is | 2753 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is |
| 2792 // ignored. | 2754 // ignored. |
| 2793 DCHECK(!expr->expression()->IsSuperPropertyReference()); | 2755 DCHECK(!expr->expression()->IsSuperPropertyReference()); |
| 2794 VisitForStackValue(expr->expression()); | 2756 VisitForStackValue(expr->expression()); |
| 2795 | 2757 |
| 2796 // Push the arguments ("left-to-right") on the stack. | 2758 // Push the arguments ("left-to-right") on the stack. |
| 2797 ZoneList<Expression*>* args = expr->arguments(); | 2759 ZoneList<Expression*>* args = expr->arguments(); |
| 2798 int arg_count = args->length(); | 2760 int arg_count = args->length(); |
| 2799 for (int i = 0; i < arg_count; i++) { | 2761 for (int i = 0; i < arg_count; i++) { |
| 2800 VisitForStackValue(args->at(i)); | 2762 VisitForStackValue(args->at(i)); |
| 2801 } | 2763 } |
| 2802 | 2764 |
| 2803 // Call the construct call builtin that handles allocation and | 2765 // Call the construct call builtin that handles allocation and |
| 2804 // constructor invocation. | 2766 // constructor invocation. |
| 2805 SetConstructCallPosition(expr); | 2767 SetConstructCallPosition(expr); |
| 2806 | 2768 |
| 2807 // Load function and argument count into r4 and r3. | 2769 // Load function and argument count into r3 and r2. |
| 2808 __ mov(r3, Operand(arg_count)); | 2770 __ mov(r2, Operand(arg_count)); |
| 2809 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0); | 2771 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize), r0); |
| 2810 | 2772 |
| 2811 // Record call targets in unoptimized code. | 2773 // Record call targets in unoptimized code. |
| 2812 __ EmitLoadTypeFeedbackVector(r5); | 2774 __ EmitLoadTypeFeedbackVector(r4); |
| 2813 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot())); | 2775 __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallNewFeedbackSlot())); |
| 2814 | 2776 |
| 2815 CallConstructStub stub(isolate()); | 2777 CallConstructStub stub(isolate()); |
| 2816 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); | 2778 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 2817 OperandStackDepthDecrement(arg_count + 1); | 2779 OperandStackDepthDecrement(arg_count + 1); |
| 2818 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); | 2780 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); |
| 2819 // Restore context register. | 2781 // Restore context register. |
| 2820 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2782 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2821 context()->Plug(r3); | 2783 context()->Plug(r2); |
| 2822 } | 2784 } |
| 2823 | 2785 |
| 2824 | |
| 2825 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { | 2786 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { |
| 2826 SuperCallReference* super_call_ref = | 2787 SuperCallReference* super_call_ref = |
| 2827 expr->expression()->AsSuperCallReference(); | 2788 expr->expression()->AsSuperCallReference(); |
| 2828 DCHECK_NOT_NULL(super_call_ref); | 2789 DCHECK_NOT_NULL(super_call_ref); |
| 2829 | 2790 |
| 2830 // Push the super constructor target on the stack (may be null, | 2791 // Push the super constructor target on the stack (may be null, |
| 2831 // but the Construct builtin can deal with that properly). | 2792 // but the Construct builtin can deal with that properly). |
| 2832 VisitForAccumulatorValue(super_call_ref->this_function_var()); | 2793 VisitForAccumulatorValue(super_call_ref->this_function_var()); |
| 2833 __ AssertFunction(result_register()); | 2794 __ AssertFunction(result_register()); |
| 2834 __ LoadP(result_register(), | 2795 __ LoadP(result_register(), |
| 2835 FieldMemOperand(result_register(), HeapObject::kMapOffset)); | 2796 FieldMemOperand(result_register(), HeapObject::kMapOffset)); |
| 2836 __ LoadP(result_register(), | 2797 __ LoadP(result_register(), |
| 2837 FieldMemOperand(result_register(), Map::kPrototypeOffset)); | 2798 FieldMemOperand(result_register(), Map::kPrototypeOffset)); |
| 2838 PushOperand(result_register()); | 2799 PushOperand(result_register()); |
| 2839 | 2800 |
| 2840 // Push the arguments ("left-to-right") on the stack. | 2801 // Push the arguments ("left-to-right") on the stack. |
| 2841 ZoneList<Expression*>* args = expr->arguments(); | 2802 ZoneList<Expression*>* args = expr->arguments(); |
| 2842 int arg_count = args->length(); | 2803 int arg_count = args->length(); |
| 2843 for (int i = 0; i < arg_count; i++) { | 2804 for (int i = 0; i < arg_count; i++) { |
| 2844 VisitForStackValue(args->at(i)); | 2805 VisitForStackValue(args->at(i)); |
| 2845 } | 2806 } |
| 2846 | 2807 |
| 2847 // Call the construct call builtin that handles allocation and | 2808 // Call the construct call builtin that handles allocation and |
| 2848 // constructor invocation. | 2809 // constructor invocation. |
| 2849 SetConstructCallPosition(expr); | 2810 SetConstructCallPosition(expr); |
| 2850 | 2811 |
| 2851 // Load new target into r6. | 2812 // Load new target into r5. |
| 2852 VisitForAccumulatorValue(super_call_ref->new_target_var()); | 2813 VisitForAccumulatorValue(super_call_ref->new_target_var()); |
| 2853 __ mr(r6, result_register()); | 2814 __ LoadRR(r5, result_register()); |
| 2854 | 2815 |
| 2855 // Load function and argument count into r1 and r0. | 2816 // Load function and argument count into r1 and r0. |
| 2856 __ mov(r3, Operand(arg_count)); | 2817 __ mov(r2, Operand(arg_count)); |
| 2857 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize)); | 2818 __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize)); |
| 2858 | 2819 |
| 2859 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); | 2820 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
| 2860 OperandStackDepthDecrement(arg_count + 1); | 2821 OperandStackDepthDecrement(arg_count + 1); |
| 2861 | 2822 |
| 2862 RecordJSReturnSite(expr); | 2823 RecordJSReturnSite(expr); |
| 2863 | 2824 |
| 2864 // Restore context register. | 2825 // Restore context register. |
| 2865 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2826 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2866 context()->Plug(r3); | 2827 context()->Plug(r2); |
| 2867 } | 2828 } |
| 2868 | 2829 |
| 2869 | |
| 2870 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { | 2830 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { |
| 2871 ZoneList<Expression*>* args = expr->arguments(); | 2831 ZoneList<Expression*>* args = expr->arguments(); |
| 2872 DCHECK(args->length() == 1); | 2832 DCHECK(args->length() == 1); |
| 2873 | 2833 |
| 2874 VisitForAccumulatorValue(args->at(0)); | 2834 VisitForAccumulatorValue(args->at(0)); |
| 2875 | 2835 |
| 2876 Label materialize_true, materialize_false; | 2836 Label materialize_true, materialize_false, skip_lookup; |
| 2877 Label* if_true = NULL; | 2837 Label* if_true = NULL; |
| 2878 Label* if_false = NULL; | 2838 Label* if_false = NULL; |
| 2879 Label* fall_through = NULL; | 2839 Label* fall_through = NULL; |
| 2880 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2840 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2881 &if_false, &fall_through); | 2841 &if_false, &fall_through); |
| 2882 | 2842 |
| 2883 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2843 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2884 __ TestIfSmi(r3, r0); | 2844 __ TestIfSmi(r2); |
| 2885 Split(eq, if_true, if_false, fall_through, cr0); | 2845 Split(eq, if_true, if_false, fall_through); |
| 2886 | 2846 |
| 2887 context()->Plug(if_true, if_false); | 2847 context()->Plug(if_true, if_false); |
| 2888 } | 2848 } |
| 2889 | 2849 |
| 2890 | |
| 2891 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { | 2850 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { |
| 2892 ZoneList<Expression*>* args = expr->arguments(); | 2851 ZoneList<Expression*>* args = expr->arguments(); |
| 2893 DCHECK(args->length() == 1); | 2852 DCHECK(args->length() == 1); |
| 2894 | 2853 |
| 2895 VisitForAccumulatorValue(args->at(0)); | 2854 VisitForAccumulatorValue(args->at(0)); |
| 2896 | 2855 |
| 2897 Label materialize_true, materialize_false; | 2856 Label materialize_true, materialize_false; |
| 2898 Label* if_true = NULL; | 2857 Label* if_true = NULL; |
| 2899 Label* if_false = NULL; | 2858 Label* if_false = NULL; |
| 2900 Label* fall_through = NULL; | 2859 Label* fall_through = NULL; |
| 2901 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2860 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2902 &if_false, &fall_through); | 2861 &if_false, &fall_through); |
| 2903 | 2862 |
| 2904 __ JumpIfSmi(r3, if_false); | 2863 __ JumpIfSmi(r2, if_false); |
| 2905 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); | 2864 __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE); |
| 2906 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2865 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2907 Split(ge, if_true, if_false, fall_through); | 2866 Split(ge, if_true, if_false, fall_through); |
| 2908 | 2867 |
| 2909 context()->Plug(if_true, if_false); | 2868 context()->Plug(if_true, if_false); |
| 2910 } | 2869 } |
| 2911 | 2870 |
| 2912 | |
| 2913 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { | 2871 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { |
| 2914 ZoneList<Expression*>* args = expr->arguments(); | 2872 ZoneList<Expression*>* args = expr->arguments(); |
| 2915 DCHECK(args->length() == 1); | 2873 DCHECK(args->length() == 1); |
| 2916 | 2874 |
| 2917 VisitForAccumulatorValue(args->at(0)); | 2875 VisitForAccumulatorValue(args->at(0)); |
| 2918 | 2876 |
| 2919 Label materialize_true, materialize_false; | 2877 Label materialize_true, materialize_false; |
| 2920 Label* if_true = NULL; | 2878 Label* if_true = NULL; |
| 2921 Label* if_false = NULL; | 2879 Label* if_false = NULL; |
| 2922 Label* fall_through = NULL; | 2880 Label* fall_through = NULL; |
| 2923 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2881 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2924 &if_false, &fall_through); | 2882 &if_false, &fall_through); |
| 2925 | 2883 |
| 2926 __ JumpIfSmi(r3, if_false); | 2884 __ JumpIfSmi(r2, if_false); |
| 2927 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE); | 2885 __ CompareObjectType(r2, r3, r3, JS_ARRAY_TYPE); |
| 2928 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2886 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2929 Split(eq, if_true, if_false, fall_through); | 2887 Split(eq, if_true, if_false, fall_through); |
| 2930 | 2888 |
| 2931 context()->Plug(if_true, if_false); | 2889 context()->Plug(if_true, if_false); |
| 2932 } | 2890 } |
| 2933 | 2891 |
| 2934 | |
| 2935 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { | 2892 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { |
| 2936 ZoneList<Expression*>* args = expr->arguments(); | 2893 ZoneList<Expression*>* args = expr->arguments(); |
| 2937 DCHECK(args->length() == 1); | 2894 DCHECK(args->length() == 1); |
| 2938 | 2895 |
| 2939 VisitForAccumulatorValue(args->at(0)); | 2896 VisitForAccumulatorValue(args->at(0)); |
| 2940 | 2897 |
| 2941 Label materialize_true, materialize_false; | 2898 Label materialize_true, materialize_false; |
| 2942 Label* if_true = NULL; | 2899 Label* if_true = NULL; |
| 2943 Label* if_false = NULL; | 2900 Label* if_false = NULL; |
| 2944 Label* fall_through = NULL; | 2901 Label* fall_through = NULL; |
| 2945 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2902 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2946 &if_false, &fall_through); | 2903 &if_false, &fall_through); |
| 2947 | 2904 |
| 2948 __ JumpIfSmi(r3, if_false); | 2905 __ JumpIfSmi(r2, if_false); |
| 2949 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE); | 2906 __ CompareObjectType(r2, r3, r3, JS_TYPED_ARRAY_TYPE); |
| 2950 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2907 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2951 Split(eq, if_true, if_false, fall_through); | 2908 Split(eq, if_true, if_false, fall_through); |
| 2952 | 2909 |
| 2953 context()->Plug(if_true, if_false); | 2910 context()->Plug(if_true, if_false); |
| 2954 } | 2911 } |
| 2955 | 2912 |
| 2956 | |
| 2957 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { | 2913 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { |
| 2958 ZoneList<Expression*>* args = expr->arguments(); | 2914 ZoneList<Expression*>* args = expr->arguments(); |
| 2959 DCHECK(args->length() == 1); | 2915 DCHECK(args->length() == 1); |
| 2960 | 2916 |
| 2961 VisitForAccumulatorValue(args->at(0)); | 2917 VisitForAccumulatorValue(args->at(0)); |
| 2962 | 2918 |
| 2963 Label materialize_true, materialize_false; | 2919 Label materialize_true, materialize_false; |
| 2964 Label* if_true = NULL; | 2920 Label* if_true = NULL; |
| 2965 Label* if_false = NULL; | 2921 Label* if_false = NULL; |
| 2966 Label* fall_through = NULL; | 2922 Label* fall_through = NULL; |
| 2967 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2923 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2968 &if_false, &fall_through); | 2924 &if_false, &fall_through); |
| 2969 | 2925 |
| 2970 __ JumpIfSmi(r3, if_false); | 2926 __ JumpIfSmi(r2, if_false); |
| 2971 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE); | 2927 __ CompareObjectType(r2, r3, r3, JS_REGEXP_TYPE); |
| 2972 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2928 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2973 Split(eq, if_true, if_false, fall_through); | 2929 Split(eq, if_true, if_false, fall_through); |
| 2974 | 2930 |
| 2975 context()->Plug(if_true, if_false); | 2931 context()->Plug(if_true, if_false); |
| 2976 } | 2932 } |
| 2977 | 2933 |
| 2978 | |
| 2979 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { | 2934 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { |
| 2980 ZoneList<Expression*>* args = expr->arguments(); | 2935 ZoneList<Expression*>* args = expr->arguments(); |
| 2981 DCHECK(args->length() == 1); | 2936 DCHECK(args->length() == 1); |
| 2982 | 2937 |
| 2983 VisitForAccumulatorValue(args->at(0)); | 2938 VisitForAccumulatorValue(args->at(0)); |
| 2984 | 2939 |
| 2985 Label materialize_true, materialize_false; | 2940 Label materialize_true, materialize_false; |
| 2986 Label* if_true = NULL; | 2941 Label* if_true = NULL; |
| 2987 Label* if_false = NULL; | 2942 Label* if_false = NULL; |
| 2988 Label* fall_through = NULL; | 2943 Label* fall_through = NULL; |
| 2989 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 2944 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 2990 &if_false, &fall_through); | 2945 &if_false, &fall_through); |
| 2991 | 2946 |
| 2992 __ JumpIfSmi(r3, if_false); | 2947 __ JumpIfSmi(r2, if_false); |
| 2993 __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE); | 2948 __ CompareObjectType(r2, r3, r3, JS_PROXY_TYPE); |
| 2994 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2949 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2995 Split(eq, if_true, if_false, fall_through); | 2950 Split(eq, if_true, if_false, fall_through); |
| 2996 | 2951 |
| 2997 context()->Plug(if_true, if_false); | 2952 context()->Plug(if_true, if_false); |
| 2998 } | 2953 } |
| 2999 | 2954 |
| 3000 | |
| 3001 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { | 2955 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { |
| 3002 ZoneList<Expression*>* args = expr->arguments(); | 2956 ZoneList<Expression*>* args = expr->arguments(); |
| 3003 DCHECK(args->length() == 1); | 2957 DCHECK(args->length() == 1); |
| 3004 Label done, null, function, non_function_constructor; | 2958 Label done, null, function, non_function_constructor; |
| 3005 | 2959 |
| 3006 VisitForAccumulatorValue(args->at(0)); | 2960 VisitForAccumulatorValue(args->at(0)); |
| 3007 | 2961 |
| 3008 // If the object is not a JSReceiver, we return null. | 2962 // If the object is not a JSReceiver, we return null. |
| 3009 __ JumpIfSmi(r3, &null); | 2963 __ JumpIfSmi(r2, &null); |
| 3010 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | 2964 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); |
| 3011 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE); | 2965 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE); |
| 3012 // Map is now in r3. | 2966 // Map is now in r2. |
| 3013 __ blt(&null); | 2967 __ blt(&null); |
| 3014 | 2968 |
| 3015 // Return 'Function' for JSFunction objects. | 2969 // Return 'Function' for JSFunction objects. |
| 3016 __ cmpi(r4, Operand(JS_FUNCTION_TYPE)); | 2970 __ CmpP(r3, Operand(JS_FUNCTION_TYPE)); |
| 3017 __ beq(&function); | 2971 __ beq(&function); |
| 3018 | 2972 |
| 3019 // Check if the constructor in the map is a JS function. | 2973 // Check if the constructor in the map is a JS function. |
| 3020 Register instance_type = r5; | 2974 Register instance_type = r4; |
| 3021 __ GetMapConstructor(r3, r3, r4, instance_type); | 2975 __ GetMapConstructor(r2, r2, r3, instance_type); |
| 3022 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE)); | 2976 __ CmpP(instance_type, Operand(JS_FUNCTION_TYPE)); |
| 3023 __ bne(&non_function_constructor); | 2977 __ bne(&non_function_constructor, Label::kNear); |
| 3024 | 2978 |
| 3025 // r3 now contains the constructor function. Grab the | 2979 // r2 now contains the constructor function. Grab the |
| 3026 // instance class name from there. | 2980 // instance class name from there. |
| 3027 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); | 2981 __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset)); |
| 3028 __ LoadP(r3, | 2982 __ LoadP(r2, |
| 3029 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset)); | 2983 FieldMemOperand(r2, SharedFunctionInfo::kInstanceClassNameOffset)); |
| 3030 __ b(&done); | 2984 __ b(&done, Label::kNear); |
| 3031 | 2985 |
| 3032 // Functions have class 'Function'. | 2986 // Functions have class 'Function'. |
| 3033 __ bind(&function); | 2987 __ bind(&function); |
| 3034 __ LoadRoot(r3, Heap::kFunction_stringRootIndex); | 2988 __ LoadRoot(r2, Heap::kFunction_stringRootIndex); |
| 3035 __ b(&done); | 2989 __ b(&done, Label::kNear); |
| 3036 | 2990 |
| 3037 // Objects with a non-function constructor have class 'Object'. | 2991 // Objects with a non-function constructor have class 'Object'. |
| 3038 __ bind(&non_function_constructor); | 2992 __ bind(&non_function_constructor); |
| 3039 __ LoadRoot(r3, Heap::kObject_stringRootIndex); | 2993 __ LoadRoot(r2, Heap::kObject_stringRootIndex); |
| 3040 __ b(&done); | 2994 __ b(&done, Label::kNear); |
| 3041 | 2995 |
| 3042 // Non-JS objects have class null. | 2996 // Non-JS objects have class null. |
| 3043 __ bind(&null); | 2997 __ bind(&null); |
| 3044 __ LoadRoot(r3, Heap::kNullValueRootIndex); | 2998 __ LoadRoot(r2, Heap::kNullValueRootIndex); |
| 3045 | 2999 |
| 3046 // All done. | 3000 // All done. |
| 3047 __ bind(&done); | 3001 __ bind(&done); |
| 3048 | 3002 |
| 3049 context()->Plug(r3); | 3003 context()->Plug(r2); |
| 3050 } | 3004 } |
| 3051 | 3005 |
| 3052 | |
| 3053 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { | 3006 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { |
| 3054 ZoneList<Expression*>* args = expr->arguments(); | 3007 ZoneList<Expression*>* args = expr->arguments(); |
| 3055 DCHECK(args->length() == 1); | 3008 DCHECK(args->length() == 1); |
| 3056 VisitForAccumulatorValue(args->at(0)); // Load the object. | 3009 VisitForAccumulatorValue(args->at(0)); // Load the object. |
| 3057 | 3010 |
| 3058 Label done; | 3011 Label done; |
| 3059 // If the object is a smi return the object. | 3012 // If the object is a smi return the object. |
| 3060 __ JumpIfSmi(r3, &done); | 3013 __ JumpIfSmi(r2, &done); |
| 3061 // If the object is not a value type, return the object. | 3014 // If the object is not a value type, return the object. |
| 3062 __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE); | 3015 __ CompareObjectType(r2, r3, r3, JS_VALUE_TYPE); |
| 3063 __ bne(&done); | 3016 __ bne(&done, Label::kNear); |
| 3064 __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset)); | 3017 __ LoadP(r2, FieldMemOperand(r2, JSValue::kValueOffset)); |
| 3065 | 3018 |
| 3066 __ bind(&done); | 3019 __ bind(&done); |
| 3067 context()->Plug(r3); | 3020 context()->Plug(r2); |
| 3068 } | 3021 } |
| 3069 | 3022 |
| 3070 | |
| 3071 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { | 3023 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { |
| 3072 ZoneList<Expression*>* args = expr->arguments(); | 3024 ZoneList<Expression*>* args = expr->arguments(); |
| 3073 DCHECK_EQ(3, args->length()); | 3025 DCHECK_EQ(3, args->length()); |
| 3074 | 3026 |
| 3075 Register string = r3; | 3027 Register string = r2; |
| 3076 Register index = r4; | 3028 Register index = r3; |
| 3077 Register value = r5; | 3029 Register value = r4; |
| 3078 | 3030 |
| 3079 VisitForStackValue(args->at(0)); // index | 3031 VisitForStackValue(args->at(0)); // index |
| 3080 VisitForStackValue(args->at(1)); // value | 3032 VisitForStackValue(args->at(1)); // value |
| 3081 VisitForAccumulatorValue(args->at(2)); // string | 3033 VisitForAccumulatorValue(args->at(2)); // string |
| 3082 PopOperands(index, value); | 3034 PopOperands(index, value); |
| 3083 | 3035 |
| 3084 if (FLAG_debug_code) { | 3036 if (FLAG_debug_code) { |
| 3085 __ TestIfSmi(value, r0); | 3037 __ TestIfSmi(value); |
| 3086 __ Check(eq, kNonSmiValue, cr0); | 3038 __ Check(eq, kNonSmiValue, cr0); |
| 3087 __ TestIfSmi(index, r0); | 3039 __ TestIfSmi(index); |
| 3088 __ Check(eq, kNonSmiIndex, cr0); | 3040 __ Check(eq, kNonSmiIndex, cr0); |
| 3089 __ SmiUntag(index, index); | 3041 __ SmiUntag(index); |
| 3090 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; | 3042 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; |
| 3091 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); | 3043 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); |
| 3092 __ SmiTag(index, index); | 3044 __ SmiTag(index); |
| 3093 } | 3045 } |
| 3094 | 3046 |
| 3095 __ SmiUntag(value); | 3047 __ SmiUntag(value); |
| 3096 __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); | 3048 __ AddP(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); |
| 3097 __ SmiToByteArrayOffset(r0, index); | 3049 __ SmiToByteArrayOffset(r1, index); |
| 3098 __ stbx(value, MemOperand(ip, r0)); | 3050 __ StoreByte(value, MemOperand(ip, r1)); |
| 3099 context()->Plug(string); | 3051 context()->Plug(string); |
| 3100 } | 3052 } |
| 3101 | 3053 |
| 3102 | |
| 3103 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { | 3054 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { |
| 3104 ZoneList<Expression*>* args = expr->arguments(); | 3055 ZoneList<Expression*>* args = expr->arguments(); |
| 3105 DCHECK_EQ(3, args->length()); | 3056 DCHECK_EQ(3, args->length()); |
| 3106 | 3057 |
| 3107 Register string = r3; | 3058 Register string = r2; |
| 3108 Register index = r4; | 3059 Register index = r3; |
| 3109 Register value = r5; | 3060 Register value = r4; |
| 3110 | 3061 |
| 3111 VisitForStackValue(args->at(0)); // index | 3062 VisitForStackValue(args->at(0)); // index |
| 3112 VisitForStackValue(args->at(1)); // value | 3063 VisitForStackValue(args->at(1)); // value |
| 3113 VisitForAccumulatorValue(args->at(2)); // string | 3064 VisitForAccumulatorValue(args->at(2)); // string |
| 3114 PopOperands(index, value); | 3065 PopOperands(index, value); |
| 3115 | 3066 |
| 3116 if (FLAG_debug_code) { | 3067 if (FLAG_debug_code) { |
| 3117 __ TestIfSmi(value, r0); | 3068 __ TestIfSmi(value); |
| 3118 __ Check(eq, kNonSmiValue, cr0); | 3069 __ Check(eq, kNonSmiValue, cr0); |
| 3119 __ TestIfSmi(index, r0); | 3070 __ TestIfSmi(index); |
| 3120 __ Check(eq, kNonSmiIndex, cr0); | 3071 __ Check(eq, kNonSmiIndex, cr0); |
| 3121 __ SmiUntag(index, index); | 3072 __ SmiUntag(index, index); |
| 3122 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; | 3073 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; |
| 3123 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); | 3074 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); |
| 3124 __ SmiTag(index, index); | 3075 __ SmiTag(index, index); |
| 3125 } | 3076 } |
| 3126 | 3077 |
| 3127 __ SmiUntag(value); | 3078 __ SmiUntag(value); |
| 3128 __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); | 3079 __ SmiToShortArrayOffset(r1, index); |
| 3129 __ SmiToShortArrayOffset(r0, index); | 3080 __ StoreHalfWord(value, MemOperand(r1, string, SeqTwoByteString::kHeaderSize - |
| 3130 __ sthx(value, MemOperand(ip, r0)); | 3081 kHeapObjectTag)); |
| 3131 context()->Plug(string); | 3082 context()->Plug(string); |
| 3132 } | 3083 } |
| 3133 | 3084 |
| 3134 | |
| 3135 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) { | 3085 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) { |
| 3136 ZoneList<Expression*>* args = expr->arguments(); | 3086 ZoneList<Expression*>* args = expr->arguments(); |
| 3137 DCHECK_EQ(1, args->length()); | 3087 DCHECK_EQ(1, args->length()); |
| 3138 | 3088 |
| 3139 // Load the argument into r3 and convert it. | 3089 // Load the argument into r2 and convert it. |
| 3140 VisitForAccumulatorValue(args->at(0)); | 3090 VisitForAccumulatorValue(args->at(0)); |
| 3141 | 3091 |
| 3142 // Convert the object to an integer. | 3092 // Convert the object to an integer. |
| 3143 Label done_convert; | 3093 Label done_convert; |
| 3144 __ JumpIfSmi(r3, &done_convert); | 3094 __ JumpIfSmi(r2, &done_convert); |
| 3145 __ Push(r3); | 3095 __ Push(r2); |
| 3146 __ CallRuntime(Runtime::kToInteger); | 3096 __ CallRuntime(Runtime::kToInteger); |
| 3147 __ bind(&done_convert); | 3097 __ bind(&done_convert); |
| 3148 context()->Plug(r3); | 3098 context()->Plug(r2); |
| 3149 } | 3099 } |
| 3150 | 3100 |
| 3151 | |
| 3152 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { | 3101 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { |
| 3153 ZoneList<Expression*>* args = expr->arguments(); | 3102 ZoneList<Expression*>* args = expr->arguments(); |
| 3154 DCHECK(args->length() == 1); | 3103 DCHECK(args->length() == 1); |
| 3155 VisitForAccumulatorValue(args->at(0)); | 3104 VisitForAccumulatorValue(args->at(0)); |
| 3156 | 3105 |
| 3157 Label done; | 3106 Label done; |
| 3158 StringCharFromCodeGenerator generator(r3, r4); | 3107 StringCharFromCodeGenerator generator(r2, r3); |
| 3159 generator.GenerateFast(masm_); | 3108 generator.GenerateFast(masm_); |
| 3160 __ b(&done); | 3109 __ b(&done); |
| 3161 | 3110 |
| 3162 NopRuntimeCallHelper call_helper; | 3111 NopRuntimeCallHelper call_helper; |
| 3163 generator.GenerateSlow(masm_, call_helper); | 3112 generator.GenerateSlow(masm_, call_helper); |
| 3164 | 3113 |
| 3165 __ bind(&done); | 3114 __ bind(&done); |
| 3166 context()->Plug(r4); | 3115 context()->Plug(r3); |
| 3167 } | 3116 } |
| 3168 | 3117 |
| 3169 | |
| 3170 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { | 3118 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { |
| 3171 ZoneList<Expression*>* args = expr->arguments(); | 3119 ZoneList<Expression*>* args = expr->arguments(); |
| 3172 DCHECK(args->length() == 2); | 3120 DCHECK(args->length() == 2); |
| 3173 VisitForStackValue(args->at(0)); | 3121 VisitForStackValue(args->at(0)); |
| 3174 VisitForAccumulatorValue(args->at(1)); | 3122 VisitForAccumulatorValue(args->at(1)); |
| 3175 | 3123 |
| 3176 Register object = r4; | 3124 Register object = r3; |
| 3177 Register index = r3; | 3125 Register index = r2; |
| 3178 Register result = r6; | 3126 Register result = r5; |
| 3179 | 3127 |
| 3180 PopOperand(object); | 3128 PopOperand(object); |
| 3181 | 3129 |
| 3182 Label need_conversion; | 3130 Label need_conversion; |
| 3183 Label index_out_of_range; | 3131 Label index_out_of_range; |
| 3184 Label done; | 3132 Label done; |
| 3185 StringCharCodeAtGenerator generator(object, index, result, &need_conversion, | 3133 StringCharCodeAtGenerator generator(object, index, result, &need_conversion, |
| 3186 &need_conversion, &index_out_of_range, | 3134 &need_conversion, &index_out_of_range, |
| 3187 STRING_INDEX_IS_NUMBER); | 3135 STRING_INDEX_IS_NUMBER); |
| 3188 generator.GenerateFast(masm_); | 3136 generator.GenerateFast(masm_); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 3200 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); | 3148 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); |
| 3201 __ b(&done); | 3149 __ b(&done); |
| 3202 | 3150 |
| 3203 NopRuntimeCallHelper call_helper; | 3151 NopRuntimeCallHelper call_helper; |
| 3204 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); | 3152 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); |
| 3205 | 3153 |
| 3206 __ bind(&done); | 3154 __ bind(&done); |
| 3207 context()->Plug(result); | 3155 context()->Plug(result); |
| 3208 } | 3156 } |
| 3209 | 3157 |
| 3210 | |
| 3211 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { | 3158 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { |
| 3212 ZoneList<Expression*>* args = expr->arguments(); | 3159 ZoneList<Expression*>* args = expr->arguments(); |
| 3213 DCHECK(args->length() == 2); | 3160 DCHECK(args->length() == 2); |
| 3214 VisitForStackValue(args->at(0)); | 3161 VisitForStackValue(args->at(0)); |
| 3215 VisitForAccumulatorValue(args->at(1)); | 3162 VisitForAccumulatorValue(args->at(1)); |
| 3216 | 3163 |
| 3217 Register object = r4; | 3164 Register object = r3; |
| 3218 Register index = r3; | 3165 Register index = r2; |
| 3219 Register scratch = r6; | 3166 Register scratch = r5; |
| 3220 Register result = r3; | 3167 Register result = r2; |
| 3221 | 3168 |
| 3222 PopOperand(object); | 3169 PopOperand(object); |
| 3223 | 3170 |
| 3224 Label need_conversion; | 3171 Label need_conversion; |
| 3225 Label index_out_of_range; | 3172 Label index_out_of_range; |
| 3226 Label done; | 3173 Label done; |
| 3227 StringCharAtGenerator generator(object, index, scratch, result, | 3174 StringCharAtGenerator generator(object, index, scratch, result, |
| 3228 &need_conversion, &need_conversion, | 3175 &need_conversion, &need_conversion, |
| 3229 &index_out_of_range, STRING_INDEX_IS_NUMBER); | 3176 &index_out_of_range, STRING_INDEX_IS_NUMBER); |
| 3230 generator.GenerateFast(masm_); | 3177 generator.GenerateFast(masm_); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 3242 __ LoadSmiLiteral(result, Smi::FromInt(0)); | 3189 __ LoadSmiLiteral(result, Smi::FromInt(0)); |
| 3243 __ b(&done); | 3190 __ b(&done); |
| 3244 | 3191 |
| 3245 NopRuntimeCallHelper call_helper; | 3192 NopRuntimeCallHelper call_helper; |
| 3246 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); | 3193 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); |
| 3247 | 3194 |
| 3248 __ bind(&done); | 3195 __ bind(&done); |
| 3249 context()->Plug(result); | 3196 context()->Plug(result); |
| 3250 } | 3197 } |
| 3251 | 3198 |
| 3252 | |
| 3253 void FullCodeGenerator::EmitCall(CallRuntime* expr) { | 3199 void FullCodeGenerator::EmitCall(CallRuntime* expr) { |
| 3254 ZoneList<Expression*>* args = expr->arguments(); | 3200 ZoneList<Expression*>* args = expr->arguments(); |
| 3255 DCHECK_LE(2, args->length()); | 3201 DCHECK_LE(2, args->length()); |
| 3256 // Push target, receiver and arguments onto the stack. | 3202 // Push target, receiver and arguments onto the stack. |
| 3257 for (Expression* const arg : *args) { | 3203 for (Expression* const arg : *args) { |
| 3258 VisitForStackValue(arg); | 3204 VisitForStackValue(arg); |
| 3259 } | 3205 } |
| 3260 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 3206 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
| 3261 // Move target to r4. | 3207 // Move target to r3. |
| 3262 int const argc = args->length() - 2; | 3208 int const argc = args->length() - 2; |
| 3263 __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize)); | 3209 __ LoadP(r3, MemOperand(sp, (argc + 1) * kPointerSize)); |
| 3264 // Call the target. | 3210 // Call the target. |
| 3265 __ mov(r3, Operand(argc)); | 3211 __ mov(r2, Operand(argc)); |
| 3266 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | 3212 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
| 3267 OperandStackDepthDecrement(argc + 1); | 3213 OperandStackDepthDecrement(argc + 1); |
| 3268 // Restore context register. | 3214 // Restore context register. |
| 3269 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3215 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3270 // Discard the function left on TOS. | 3216 // Discard the function left on TOS. |
| 3271 context()->DropAndPlug(1, r3); | 3217 context()->DropAndPlug(1, r2); |
| 3272 } | 3218 } |
| 3273 | 3219 |
| 3274 | |
| 3275 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { | 3220 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { |
| 3276 ZoneList<Expression*>* args = expr->arguments(); | 3221 ZoneList<Expression*>* args = expr->arguments(); |
| 3277 VisitForAccumulatorValue(args->at(0)); | 3222 VisitForAccumulatorValue(args->at(0)); |
| 3278 | 3223 |
| 3279 Label materialize_true, materialize_false; | 3224 Label materialize_true, materialize_false; |
| 3280 Label* if_true = NULL; | 3225 Label* if_true = NULL; |
| 3281 Label* if_false = NULL; | 3226 Label* if_false = NULL; |
| 3282 Label* fall_through = NULL; | 3227 Label* fall_through = NULL; |
| 3283 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 3228 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 3284 &if_false, &fall_through); | 3229 &if_false, &fall_through); |
| 3285 | 3230 |
| 3286 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset)); | 3231 __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset)); |
| 3287 // PPC - assume ip is free | 3232 __ AndP(r0, r2, Operand(String::kContainsCachedArrayIndexMask)); |
| 3288 __ mov(ip, Operand(String::kContainsCachedArrayIndexMask)); | |
| 3289 __ and_(r0, r3, ip, SetRC); | |
| 3290 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3233 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3291 Split(eq, if_true, if_false, fall_through, cr0); | 3234 Split(eq, if_true, if_false, fall_through); |
| 3292 | 3235 |
| 3293 context()->Plug(if_true, if_false); | 3236 context()->Plug(if_true, if_false); |
| 3294 } | 3237 } |
| 3295 | 3238 |
| 3296 | |
| 3297 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { | 3239 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { |
| 3298 ZoneList<Expression*>* args = expr->arguments(); | 3240 ZoneList<Expression*>* args = expr->arguments(); |
| 3299 DCHECK(args->length() == 1); | 3241 DCHECK(args->length() == 1); |
| 3300 VisitForAccumulatorValue(args->at(0)); | 3242 VisitForAccumulatorValue(args->at(0)); |
| 3301 | 3243 |
| 3302 __ AssertString(r3); | 3244 __ AssertString(r2); |
| 3303 | 3245 |
| 3304 __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset)); | 3246 __ LoadlW(r2, FieldMemOperand(r2, String::kHashFieldOffset)); |
| 3305 __ IndexFromHash(r3, r3); | 3247 __ IndexFromHash(r2, r2); |
| 3306 | 3248 |
| 3307 context()->Plug(r3); | 3249 context()->Plug(r2); |
| 3308 } | 3250 } |
| 3309 | 3251 |
| 3310 | |
| 3311 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { | 3252 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { |
| 3312 ZoneList<Expression*>* args = expr->arguments(); | 3253 ZoneList<Expression*>* args = expr->arguments(); |
| 3313 DCHECK_EQ(1, args->length()); | 3254 DCHECK_EQ(1, args->length()); |
| 3314 VisitForAccumulatorValue(args->at(0)); | 3255 VisitForAccumulatorValue(args->at(0)); |
| 3315 __ AssertFunction(r3); | 3256 __ AssertFunction(r2); |
| 3316 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3257 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3317 __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset)); | 3258 __ LoadP(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); |
| 3318 context()->Plug(r3); | 3259 context()->Plug(r2); |
| 3319 } | 3260 } |
| 3320 | 3261 |
| 3321 | |
| 3322 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { | 3262 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { |
| 3323 DCHECK(expr->arguments()->length() == 0); | 3263 DCHECK(expr->arguments()->length() == 0); |
| 3324 ExternalReference debug_is_active = | 3264 ExternalReference debug_is_active = |
| 3325 ExternalReference::debug_is_active_address(isolate()); | 3265 ExternalReference::debug_is_active_address(isolate()); |
| 3326 __ mov(ip, Operand(debug_is_active)); | 3266 __ mov(ip, Operand(debug_is_active)); |
| 3327 __ lbz(r3, MemOperand(ip)); | 3267 __ LoadlB(r2, MemOperand(ip)); |
| 3328 __ SmiTag(r3); | 3268 __ SmiTag(r2); |
| 3329 context()->Plug(r3); | 3269 context()->Plug(r2); |
| 3330 } | 3270 } |
| 3331 | 3271 |
| 3332 | |
| 3333 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { | 3272 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { |
| 3334 ZoneList<Expression*>* args = expr->arguments(); | 3273 ZoneList<Expression*>* args = expr->arguments(); |
| 3335 DCHECK_EQ(2, args->length()); | 3274 DCHECK_EQ(2, args->length()); |
| 3336 VisitForStackValue(args->at(0)); | 3275 VisitForStackValue(args->at(0)); |
| 3337 VisitForStackValue(args->at(1)); | 3276 VisitForStackValue(args->at(1)); |
| 3338 | 3277 |
| 3339 Label runtime, done; | 3278 Label runtime, done; |
| 3340 | 3279 |
| 3341 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime, TAG_OBJECT); | 3280 __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &runtime, TAG_OBJECT); |
| 3342 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4); | 3281 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3); |
| 3343 __ Pop(r5, r6); | 3282 __ Pop(r4, r5); |
| 3344 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex); | 3283 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex); |
| 3345 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0); | 3284 __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0); |
| 3346 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0); | 3285 __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0); |
| 3347 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0); | 3286 __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0); |
| 3348 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0); | 3287 __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0); |
| 3349 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0); | 3288 __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0); |
| 3350 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); | 3289 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); |
| 3351 __ b(&done); | 3290 __ b(&done); |
| 3352 | 3291 |
| 3353 __ bind(&runtime); | 3292 __ bind(&runtime); |
| 3354 CallRuntimeWithOperands(Runtime::kCreateIterResultObject); | 3293 CallRuntimeWithOperands(Runtime::kCreateIterResultObject); |
| 3355 | 3294 |
| 3356 __ bind(&done); | 3295 __ bind(&done); |
| 3357 context()->Plug(r3); | 3296 context()->Plug(r2); |
| 3358 } | 3297 } |
| 3359 | 3298 |
| 3360 | |
| 3361 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { | 3299 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { |
| 3362 // Push undefined as the receiver. | 3300 // Push undefined as the receiver. |
| 3363 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); | 3301 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 3364 PushOperand(r3); | 3302 PushOperand(r2); |
| 3365 | 3303 |
| 3366 __ LoadNativeContextSlot(expr->context_index(), r3); | 3304 __ LoadNativeContextSlot(expr->context_index(), r2); |
| 3367 } | 3305 } |
| 3368 | 3306 |
| 3369 | |
| 3370 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { | 3307 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { |
| 3371 ZoneList<Expression*>* args = expr->arguments(); | 3308 ZoneList<Expression*>* args = expr->arguments(); |
| 3372 int arg_count = args->length(); | 3309 int arg_count = args->length(); |
| 3373 | 3310 |
| 3374 SetCallPosition(expr); | 3311 SetCallPosition(expr); |
| 3375 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); | 3312 __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0); |
| 3376 __ mov(r3, Operand(arg_count)); | 3313 __ mov(r2, Operand(arg_count)); |
| 3377 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), | 3314 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), |
| 3378 RelocInfo::CODE_TARGET); | 3315 RelocInfo::CODE_TARGET); |
| 3379 OperandStackDepthDecrement(arg_count + 1); | 3316 OperandStackDepthDecrement(arg_count + 1); |
| 3380 } | 3317 } |
| 3381 | 3318 |
| 3382 | |
| 3383 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { | 3319 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { |
| 3384 ZoneList<Expression*>* args = expr->arguments(); | 3320 ZoneList<Expression*>* args = expr->arguments(); |
| 3385 int arg_count = args->length(); | 3321 int arg_count = args->length(); |
| 3386 | 3322 |
| 3387 if (expr->is_jsruntime()) { | 3323 if (expr->is_jsruntime()) { |
| 3388 Comment cmnt(masm_, "[ CallRuntime"); | 3324 Comment cmnt(masm_, "[ CallRuntime"); |
| 3389 EmitLoadJSRuntimeFunction(expr); | 3325 EmitLoadJSRuntimeFunction(expr); |
| 3390 | 3326 |
| 3391 // Push the target function under the receiver. | 3327 // Push the target function under the receiver. |
| 3392 __ LoadP(ip, MemOperand(sp, 0)); | 3328 __ LoadP(ip, MemOperand(sp, 0)); |
| 3393 PushOperand(ip); | 3329 PushOperand(ip); |
| 3394 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 3330 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 3395 | 3331 |
| 3396 // Push the arguments ("left-to-right"). | 3332 // Push the arguments ("left-to-right"). |
| 3397 for (int i = 0; i < arg_count; i++) { | 3333 for (int i = 0; i < arg_count; i++) { |
| 3398 VisitForStackValue(args->at(i)); | 3334 VisitForStackValue(args->at(i)); |
| 3399 } | 3335 } |
| 3400 | 3336 |
| 3401 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 3337 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
| 3402 EmitCallJSRuntimeFunction(expr); | 3338 EmitCallJSRuntimeFunction(expr); |
| 3403 | 3339 |
| 3404 // Restore context register. | 3340 // Restore context register. |
| 3405 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3341 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3406 | 3342 |
| 3407 context()->DropAndPlug(1, r3); | 3343 context()->DropAndPlug(1, r2); |
| 3408 | 3344 |
| 3409 } else { | 3345 } else { |
| 3410 const Runtime::Function* function = expr->function(); | 3346 const Runtime::Function* function = expr->function(); |
| 3411 switch (function->function_id) { | 3347 switch (function->function_id) { |
| 3412 #define CALL_INTRINSIC_GENERATOR(Name) \ | 3348 #define CALL_INTRINSIC_GENERATOR(Name) \ |
| 3413 case Runtime::kInline##Name: { \ | 3349 case Runtime::kInline##Name: { \ |
| 3414 Comment cmnt(masm_, "[ Inline" #Name); \ | 3350 Comment cmnt(masm_, "[ Inline" #Name); \ |
| 3415 return Emit##Name(expr); \ | 3351 return Emit##Name(expr); \ |
| 3416 } | 3352 } |
| 3417 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) | 3353 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) |
| 3418 #undef CALL_INTRINSIC_GENERATOR | 3354 #undef CALL_INTRINSIC_GENERATOR |
| 3419 default: { | 3355 default: { |
| 3420 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); | 3356 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); |
| 3421 // Push the arguments ("left-to-right"). | 3357 // Push the arguments ("left-to-right"). |
| 3422 for (int i = 0; i < arg_count; i++) { | 3358 for (int i = 0; i < arg_count; i++) { |
| 3423 VisitForStackValue(args->at(i)); | 3359 VisitForStackValue(args->at(i)); |
| 3424 } | 3360 } |
| 3425 | 3361 |
| 3426 // Call the C runtime function. | 3362 // Call the C runtime function. |
| 3427 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); | 3363 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); |
| 3428 __ CallRuntime(expr->function(), arg_count); | 3364 __ CallRuntime(expr->function(), arg_count); |
| 3429 OperandStackDepthDecrement(arg_count); | 3365 OperandStackDepthDecrement(arg_count); |
| 3430 context()->Plug(r3); | 3366 context()->Plug(r2); |
| 3431 } | 3367 } |
| 3432 } | 3368 } |
| 3433 } | 3369 } |
| 3434 } | 3370 } |
| 3435 | 3371 |
| 3436 | |
| 3437 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { | 3372 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
| 3438 switch (expr->op()) { | 3373 switch (expr->op()) { |
| 3439 case Token::DELETE: { | 3374 case Token::DELETE: { |
| 3440 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); | 3375 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); |
| 3441 Property* property = expr->expression()->AsProperty(); | 3376 Property* property = expr->expression()->AsProperty(); |
| 3442 VariableProxy* proxy = expr->expression()->AsVariableProxy(); | 3377 VariableProxy* proxy = expr->expression()->AsVariableProxy(); |
| 3443 | 3378 |
| 3444 if (property != NULL) { | 3379 if (property != NULL) { |
| 3445 VisitForStackValue(property->obj()); | 3380 VisitForStackValue(property->obj()); |
| 3446 VisitForStackValue(property->key()); | 3381 VisitForStackValue(property->key()); |
| 3447 CallRuntimeWithOperands(is_strict(language_mode()) | 3382 CallRuntimeWithOperands(is_strict(language_mode()) |
| 3448 ? Runtime::kDeleteProperty_Strict | 3383 ? Runtime::kDeleteProperty_Strict |
| 3449 : Runtime::kDeleteProperty_Sloppy); | 3384 : Runtime::kDeleteProperty_Sloppy); |
| 3450 context()->Plug(r3); | 3385 context()->Plug(r2); |
| 3451 } else if (proxy != NULL) { | 3386 } else if (proxy != NULL) { |
| 3452 Variable* var = proxy->var(); | 3387 Variable* var = proxy->var(); |
| 3453 // Delete of an unqualified identifier is disallowed in strict mode but | 3388 // Delete of an unqualified identifier is disallowed in strict mode but |
| 3454 // "delete this" is allowed. | 3389 // "delete this" is allowed. |
| 3455 bool is_this = var->HasThisName(isolate()); | 3390 bool is_this = var->HasThisName(isolate()); |
| 3456 DCHECK(is_sloppy(language_mode()) || is_this); | 3391 DCHECK(is_sloppy(language_mode()) || is_this); |
| 3457 if (var->IsUnallocatedOrGlobalSlot()) { | 3392 if (var->IsUnallocatedOrGlobalSlot()) { |
| 3458 __ LoadGlobalObject(r5); | 3393 __ LoadGlobalObject(r4); |
| 3459 __ mov(r4, Operand(var->name())); | 3394 __ mov(r3, Operand(var->name())); |
| 3460 __ Push(r5, r4); | 3395 __ Push(r4, r3); |
| 3461 __ CallRuntime(Runtime::kDeleteProperty_Sloppy); | 3396 __ CallRuntime(Runtime::kDeleteProperty_Sloppy); |
| 3462 context()->Plug(r3); | 3397 context()->Plug(r2); |
| 3463 } else if (var->IsStackAllocated() || var->IsContextSlot()) { | 3398 } else if (var->IsStackAllocated() || var->IsContextSlot()) { |
| 3464 // Result of deleting non-global, non-dynamic variables is false. | 3399 // Result of deleting non-global, non-dynamic variables is false. |
| 3465 // The subexpression does not have side effects. | 3400 // The subexpression does not have side effects. |
| 3466 context()->Plug(is_this); | 3401 context()->Plug(is_this); |
| 3467 } else { | 3402 } else { |
| 3468 // Non-global variable. Call the runtime to try to delete from the | 3403 // Non-global variable. Call the runtime to try to delete from the |
| 3469 // context where the variable was introduced. | 3404 // context where the variable was introduced. |
| 3470 __ Push(var->name()); | 3405 __ Push(var->name()); |
| 3471 __ CallRuntime(Runtime::kDeleteLookupSlot); | 3406 __ CallRuntime(Runtime::kDeleteLookupSlot); |
| 3472 context()->Plug(r3); | 3407 context()->Plug(r2); |
| 3473 } | 3408 } |
| 3474 } else { | 3409 } else { |
| 3475 // Result of deleting non-property, non-variable reference is true. | 3410 // Result of deleting non-property, non-variable reference is true. |
| 3476 // The subexpression may have side effects. | 3411 // The subexpression may have side effects. |
| 3477 VisitForEffect(expr->expression()); | 3412 VisitForEffect(expr->expression()); |
| 3478 context()->Plug(true); | 3413 context()->Plug(true); |
| 3479 } | 3414 } |
| 3480 break; | 3415 break; |
| 3481 } | 3416 } |
| 3482 | 3417 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 3504 // for control and plugging the control flow into the context, | 3439 // for control and plugging the control flow into the context, |
| 3505 // because we need to prepare a pair of extra administrative AST ids | 3440 // because we need to prepare a pair of extra administrative AST ids |
| 3506 // for the optimizing compiler. | 3441 // for the optimizing compiler. |
| 3507 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); | 3442 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); |
| 3508 Label materialize_true, materialize_false, done; | 3443 Label materialize_true, materialize_false, done; |
| 3509 VisitForControl(expr->expression(), &materialize_false, | 3444 VisitForControl(expr->expression(), &materialize_false, |
| 3510 &materialize_true, &materialize_true); | 3445 &materialize_true, &materialize_true); |
| 3511 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); | 3446 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); |
| 3512 __ bind(&materialize_true); | 3447 __ bind(&materialize_true); |
| 3513 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); | 3448 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); |
| 3514 __ LoadRoot(r3, Heap::kTrueValueRootIndex); | 3449 __ LoadRoot(r2, Heap::kTrueValueRootIndex); |
| 3515 if (context()->IsStackValue()) __ push(r3); | 3450 if (context()->IsStackValue()) __ push(r2); |
| 3516 __ b(&done); | 3451 __ b(&done); |
| 3517 __ bind(&materialize_false); | 3452 __ bind(&materialize_false); |
| 3518 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); | 3453 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); |
| 3519 __ LoadRoot(r3, Heap::kFalseValueRootIndex); | 3454 __ LoadRoot(r2, Heap::kFalseValueRootIndex); |
| 3520 if (context()->IsStackValue()) __ push(r3); | 3455 if (context()->IsStackValue()) __ push(r2); |
| 3521 __ bind(&done); | 3456 __ bind(&done); |
| 3522 } | 3457 } |
| 3523 break; | 3458 break; |
| 3524 } | 3459 } |
| 3525 | 3460 |
| 3526 case Token::TYPEOF: { | 3461 case Token::TYPEOF: { |
| 3527 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); | 3462 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); |
| 3528 { | 3463 { |
| 3529 AccumulatorValueContext context(this); | 3464 AccumulatorValueContext context(this); |
| 3530 VisitForTypeofValue(expr->expression()); | 3465 VisitForTypeofValue(expr->expression()); |
| 3531 } | 3466 } |
| 3532 __ mr(r6, r3); | 3467 __ LoadRR(r5, r2); |
| 3533 TypeofStub typeof_stub(isolate()); | 3468 TypeofStub typeof_stub(isolate()); |
| 3534 __ CallStub(&typeof_stub); | 3469 __ CallStub(&typeof_stub); |
| 3535 context()->Plug(r3); | 3470 context()->Plug(r2); |
| 3536 break; | 3471 break; |
| 3537 } | 3472 } |
| 3538 | 3473 |
| 3539 default: | 3474 default: |
| 3540 UNREACHABLE(); | 3475 UNREACHABLE(); |
| 3541 } | 3476 } |
| 3542 } | 3477 } |
| 3543 | 3478 |
| 3544 | |
| 3545 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { | 3479 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
| 3546 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); | 3480 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); |
| 3547 | 3481 |
| 3548 Comment cmnt(masm_, "[ CountOperation"); | 3482 Comment cmnt(masm_, "[ CountOperation"); |
| 3549 | 3483 |
| 3550 Property* prop = expr->expression()->AsProperty(); | 3484 Property* prop = expr->expression()->AsProperty(); |
| 3551 LhsKind assign_type = Property::GetAssignType(prop); | 3485 LhsKind assign_type = Property::GetAssignType(prop); |
| 3552 | 3486 |
| 3553 // Evaluate expression and get value. | 3487 // Evaluate expression and get value. |
| 3554 if (assign_type == VARIABLE) { | 3488 if (assign_type == VARIABLE) { |
| (...skipping 13 matching lines...) Expand all Loading... |
| 3568 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); | 3502 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0)); |
| 3569 EmitNamedPropertyLoad(prop); | 3503 EmitNamedPropertyLoad(prop); |
| 3570 break; | 3504 break; |
| 3571 } | 3505 } |
| 3572 | 3506 |
| 3573 case NAMED_SUPER_PROPERTY: { | 3507 case NAMED_SUPER_PROPERTY: { |
| 3574 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | 3508 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
| 3575 VisitForAccumulatorValue( | 3509 VisitForAccumulatorValue( |
| 3576 prop->obj()->AsSuperPropertyReference()->home_object()); | 3510 prop->obj()->AsSuperPropertyReference()->home_object()); |
| 3577 PushOperand(result_register()); | 3511 PushOperand(result_register()); |
| 3578 const Register scratch = r4; | 3512 const Register scratch = r3; |
| 3579 __ LoadP(scratch, MemOperand(sp, kPointerSize)); | 3513 __ LoadP(scratch, MemOperand(sp, kPointerSize)); |
| 3580 PushOperands(scratch, result_register()); | 3514 PushOperands(scratch, result_register()); |
| 3581 EmitNamedSuperPropertyLoad(prop); | 3515 EmitNamedSuperPropertyLoad(prop); |
| 3582 break; | 3516 break; |
| 3583 } | 3517 } |
| 3584 | 3518 |
| 3585 case KEYED_SUPER_PROPERTY: { | 3519 case KEYED_SUPER_PROPERTY: { |
| 3586 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); | 3520 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); |
| 3587 VisitForAccumulatorValue( | 3521 VisitForAccumulatorValue( |
| 3588 prop->obj()->AsSuperPropertyReference()->home_object()); | 3522 prop->obj()->AsSuperPropertyReference()->home_object()); |
| 3589 const Register scratch = r4; | 3523 const Register scratch = r3; |
| 3590 const Register scratch1 = r5; | 3524 const Register scratch1 = r4; |
| 3591 __ mr(scratch, result_register()); | 3525 __ LoadRR(scratch, result_register()); |
| 3592 VisitForAccumulatorValue(prop->key()); | 3526 VisitForAccumulatorValue(prop->key()); |
| 3593 PushOperands(scratch, result_register()); | 3527 PushOperands(scratch, result_register()); |
| 3594 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); | 3528 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize)); |
| 3595 PushOperands(scratch1, scratch, result_register()); | 3529 PushOperands(scratch1, scratch, result_register()); |
| 3596 EmitKeyedSuperPropertyLoad(prop); | 3530 EmitKeyedSuperPropertyLoad(prop); |
| 3597 break; | 3531 break; |
| 3598 } | 3532 } |
| 3599 | 3533 |
| 3600 case KEYED_PROPERTY: { | 3534 case KEYED_PROPERTY: { |
| 3601 VisitForStackValue(prop->obj()); | 3535 VisitForStackValue(prop->obj()); |
| (...skipping 18 matching lines...) Expand all Loading... |
| 3620 PrepareForBailoutForId(prop->LoadId(), TOS_REG); | 3554 PrepareForBailoutForId(prop->LoadId(), TOS_REG); |
| 3621 } | 3555 } |
| 3622 | 3556 |
| 3623 // Inline smi case if we are in a loop. | 3557 // Inline smi case if we are in a loop. |
| 3624 Label stub_call, done; | 3558 Label stub_call, done; |
| 3625 JumpPatchSite patch_site(masm_); | 3559 JumpPatchSite patch_site(masm_); |
| 3626 | 3560 |
| 3627 int count_value = expr->op() == Token::INC ? 1 : -1; | 3561 int count_value = expr->op() == Token::INC ? 1 : -1; |
| 3628 if (ShouldInlineSmiCase(expr->op())) { | 3562 if (ShouldInlineSmiCase(expr->op())) { |
| 3629 Label slow; | 3563 Label slow; |
| 3630 patch_site.EmitJumpIfNotSmi(r3, &slow); | 3564 patch_site.EmitJumpIfNotSmi(r2, &slow); |
| 3631 | 3565 |
| 3632 // Save result for postfix expressions. | 3566 // Save result for postfix expressions. |
| 3633 if (expr->is_postfix()) { | 3567 if (expr->is_postfix()) { |
| 3634 if (!context()->IsEffect()) { | 3568 if (!context()->IsEffect()) { |
| 3635 // Save the result on the stack. If we have a named or keyed property | 3569 // Save the result on the stack. If we have a named or keyed property |
| 3636 // we store the result under the receiver that is currently on top | 3570 // we store the result under the receiver that is currently on top |
| 3637 // of the stack. | 3571 // of the stack. |
| 3638 switch (assign_type) { | 3572 switch (assign_type) { |
| 3639 case VARIABLE: | 3573 case VARIABLE: |
| 3640 __ push(r3); | 3574 __ push(r2); |
| 3641 break; | 3575 break; |
| 3642 case NAMED_PROPERTY: | 3576 case NAMED_PROPERTY: |
| 3643 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 3577 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 3644 break; | 3578 break; |
| 3645 case NAMED_SUPER_PROPERTY: | 3579 case NAMED_SUPER_PROPERTY: |
| 3646 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); | 3580 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
| 3647 break; | 3581 break; |
| 3648 case KEYED_PROPERTY: | 3582 case KEYED_PROPERTY: |
| 3649 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); | 3583 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
| 3650 break; | 3584 break; |
| 3651 case KEYED_SUPER_PROPERTY: | 3585 case KEYED_SUPER_PROPERTY: |
| 3652 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize)); | 3586 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize)); |
| 3653 break; | 3587 break; |
| 3654 } | 3588 } |
| 3655 } | 3589 } |
| 3656 } | 3590 } |
| 3657 | 3591 |
| 3658 Register scratch1 = r4; | 3592 Register scratch1 = r3; |
| 3659 Register scratch2 = r5; | 3593 Register scratch2 = r4; |
| 3660 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value)); | 3594 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value)); |
| 3661 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0); | 3595 __ AddAndCheckForOverflow(r2, r2, scratch1, scratch2, r0); |
| 3662 __ BranchOnNoOverflow(&done); | 3596 __ BranchOnNoOverflow(&done); |
| 3663 // Call stub. Undo operation first. | 3597 // Call stub. Undo operation first. |
| 3664 __ sub(r3, r3, scratch1); | 3598 __ SubP(r2, r2, scratch1); |
| 3665 __ b(&stub_call); | 3599 __ b(&stub_call); |
| 3666 __ bind(&slow); | 3600 __ bind(&slow); |
| 3667 } | 3601 } |
| 3668 | 3602 |
| 3669 // Convert old value into a number. | 3603 // Convert old value into a number. |
| 3670 ToNumberStub convert_stub(isolate()); | 3604 ToNumberStub convert_stub(isolate()); |
| 3671 __ CallStub(&convert_stub); | 3605 __ CallStub(&convert_stub); |
| 3672 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); | 3606 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); |
| 3673 | 3607 |
| 3674 // Save result for postfix expressions. | 3608 // Save result for postfix expressions. |
| 3675 if (expr->is_postfix()) { | 3609 if (expr->is_postfix()) { |
| 3676 if (!context()->IsEffect()) { | 3610 if (!context()->IsEffect()) { |
| 3677 // Save the result on the stack. If we have a named or keyed property | 3611 // Save the result on the stack. If we have a named or keyed property |
| 3678 // we store the result under the receiver that is currently on top | 3612 // we store the result under the receiver that is currently on top |
| 3679 // of the stack. | 3613 // of the stack. |
| 3680 switch (assign_type) { | 3614 switch (assign_type) { |
| 3681 case VARIABLE: | 3615 case VARIABLE: |
| 3682 PushOperand(r3); | 3616 PushOperand(r2); |
| 3683 break; | 3617 break; |
| 3684 case NAMED_PROPERTY: | 3618 case NAMED_PROPERTY: |
| 3685 __ StoreP(r3, MemOperand(sp, kPointerSize)); | 3619 __ StoreP(r2, MemOperand(sp, kPointerSize)); |
| 3686 break; | 3620 break; |
| 3687 case NAMED_SUPER_PROPERTY: | 3621 case NAMED_SUPER_PROPERTY: |
| 3688 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); | 3622 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
| 3689 break; | 3623 break; |
| 3690 case KEYED_PROPERTY: | 3624 case KEYED_PROPERTY: |
| 3691 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize)); | 3625 __ StoreP(r2, MemOperand(sp, 2 * kPointerSize)); |
| 3692 break; | 3626 break; |
| 3693 case KEYED_SUPER_PROPERTY: | 3627 case KEYED_SUPER_PROPERTY: |
| 3694 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize)); | 3628 __ StoreP(r2, MemOperand(sp, 3 * kPointerSize)); |
| 3695 break; | 3629 break; |
| 3696 } | 3630 } |
| 3697 } | 3631 } |
| 3698 } | 3632 } |
| 3699 | 3633 |
| 3700 __ bind(&stub_call); | 3634 __ bind(&stub_call); |
| 3701 __ mr(r4, r3); | 3635 __ LoadRR(r3, r2); |
| 3702 __ LoadSmiLiteral(r3, Smi::FromInt(count_value)); | 3636 __ LoadSmiLiteral(r2, Smi::FromInt(count_value)); |
| 3703 | 3637 |
| 3704 SetExpressionPosition(expr); | 3638 SetExpressionPosition(expr); |
| 3705 | 3639 |
| 3706 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code(); | 3640 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code(); |
| 3707 CallIC(code, expr->CountBinOpFeedbackId()); | 3641 CallIC(code, expr->CountBinOpFeedbackId()); |
| 3708 patch_site.EmitPatchInfo(); | 3642 patch_site.EmitPatchInfo(); |
| 3709 __ bind(&done); | 3643 __ bind(&done); |
| 3710 | 3644 |
| 3711 // Store the value returned in r3. | 3645 // Store the value returned in r2. |
| 3712 switch (assign_type) { | 3646 switch (assign_type) { |
| 3713 case VARIABLE: | 3647 case VARIABLE: |
| 3714 if (expr->is_postfix()) { | 3648 if (expr->is_postfix()) { |
| 3715 { | 3649 { |
| 3716 EffectContext context(this); | 3650 EffectContext context(this); |
| 3717 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3651 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3718 Token::ASSIGN, expr->CountSlot()); | 3652 Token::ASSIGN, expr->CountSlot()); |
| 3719 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 3653 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3720 context.Plug(r3); | 3654 context.Plug(r2); |
| 3721 } | 3655 } |
| 3722 // For all contexts except EffectConstant We have the result on | 3656 // For all contexts except EffectConstant We have the result on |
| 3723 // top of the stack. | 3657 // top of the stack. |
| 3724 if (!context()->IsEffect()) { | 3658 if (!context()->IsEffect()) { |
| 3725 context()->PlugTOS(); | 3659 context()->PlugTOS(); |
| 3726 } | 3660 } |
| 3727 } else { | 3661 } else { |
| 3728 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3662 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
| 3729 Token::ASSIGN, expr->CountSlot()); | 3663 Token::ASSIGN, expr->CountSlot()); |
| 3730 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 3664 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3731 context()->Plug(r3); | 3665 context()->Plug(r2); |
| 3732 } | 3666 } |
| 3733 break; | 3667 break; |
| 3734 case NAMED_PROPERTY: { | 3668 case NAMED_PROPERTY: { |
| 3735 __ mov(StoreDescriptor::NameRegister(), | 3669 __ mov(StoreDescriptor::NameRegister(), |
| 3736 Operand(prop->key()->AsLiteral()->value())); | 3670 Operand(prop->key()->AsLiteral()->value())); |
| 3737 PopOperand(StoreDescriptor::ReceiverRegister()); | 3671 PopOperand(StoreDescriptor::ReceiverRegister()); |
| 3738 EmitLoadStoreICSlot(expr->CountSlot()); | 3672 EmitLoadStoreICSlot(expr->CountSlot()); |
| 3739 CallStoreIC(); | 3673 CallStoreIC(); |
| 3740 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 3674 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3741 if (expr->is_postfix()) { | 3675 if (expr->is_postfix()) { |
| 3742 if (!context()->IsEffect()) { | 3676 if (!context()->IsEffect()) { |
| 3743 context()->PlugTOS(); | 3677 context()->PlugTOS(); |
| 3744 } | 3678 } |
| 3745 } else { | 3679 } else { |
| 3746 context()->Plug(r3); | 3680 context()->Plug(r2); |
| 3747 } | 3681 } |
| 3748 break; | 3682 break; |
| 3749 } | 3683 } |
| 3750 case NAMED_SUPER_PROPERTY: { | 3684 case NAMED_SUPER_PROPERTY: { |
| 3751 EmitNamedSuperPropertyStore(prop); | 3685 EmitNamedSuperPropertyStore(prop); |
| 3752 if (expr->is_postfix()) { | 3686 if (expr->is_postfix()) { |
| 3753 if (!context()->IsEffect()) { | 3687 if (!context()->IsEffect()) { |
| 3754 context()->PlugTOS(); | 3688 context()->PlugTOS(); |
| 3755 } | 3689 } |
| 3756 } else { | 3690 } else { |
| 3757 context()->Plug(r3); | 3691 context()->Plug(r2); |
| 3758 } | 3692 } |
| 3759 break; | 3693 break; |
| 3760 } | 3694 } |
| 3761 case KEYED_SUPER_PROPERTY: { | 3695 case KEYED_SUPER_PROPERTY: { |
| 3762 EmitKeyedSuperPropertyStore(prop); | 3696 EmitKeyedSuperPropertyStore(prop); |
| 3763 if (expr->is_postfix()) { | 3697 if (expr->is_postfix()) { |
| 3764 if (!context()->IsEffect()) { | 3698 if (!context()->IsEffect()) { |
| 3765 context()->PlugTOS(); | 3699 context()->PlugTOS(); |
| 3766 } | 3700 } |
| 3767 } else { | 3701 } else { |
| 3768 context()->Plug(r3); | 3702 context()->Plug(r2); |
| 3769 } | 3703 } |
| 3770 break; | 3704 break; |
| 3771 } | 3705 } |
| 3772 case KEYED_PROPERTY: { | 3706 case KEYED_PROPERTY: { |
| 3773 PopOperands(StoreDescriptor::ReceiverRegister(), | 3707 PopOperands(StoreDescriptor::ReceiverRegister(), |
| 3774 StoreDescriptor::NameRegister()); | 3708 StoreDescriptor::NameRegister()); |
| 3775 Handle<Code> ic = | 3709 Handle<Code> ic = |
| 3776 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); | 3710 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); |
| 3777 EmitLoadStoreICSlot(expr->CountSlot()); | 3711 EmitLoadStoreICSlot(expr->CountSlot()); |
| 3778 CallIC(ic); | 3712 CallIC(ic); |
| 3779 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); | 3713 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); |
| 3780 if (expr->is_postfix()) { | 3714 if (expr->is_postfix()) { |
| 3781 if (!context()->IsEffect()) { | 3715 if (!context()->IsEffect()) { |
| 3782 context()->PlugTOS(); | 3716 context()->PlugTOS(); |
| 3783 } | 3717 } |
| 3784 } else { | 3718 } else { |
| 3785 context()->Plug(r3); | 3719 context()->Plug(r2); |
| 3786 } | 3720 } |
| 3787 break; | 3721 break; |
| 3788 } | 3722 } |
| 3789 } | 3723 } |
| 3790 } | 3724 } |
| 3791 | 3725 |
| 3792 | |
| 3793 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, | 3726 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, |
| 3794 Expression* sub_expr, | 3727 Expression* sub_expr, |
| 3795 Handle<String> check) { | 3728 Handle<String> check) { |
| 3796 Label materialize_true, materialize_false; | 3729 Label materialize_true, materialize_false; |
| 3797 Label* if_true = NULL; | 3730 Label* if_true = NULL; |
| 3798 Label* if_false = NULL; | 3731 Label* if_false = NULL; |
| 3799 Label* fall_through = NULL; | 3732 Label* fall_through = NULL; |
| 3800 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 3733 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 3801 &if_false, &fall_through); | 3734 &if_false, &fall_through); |
| 3802 | 3735 |
| 3803 { | 3736 { |
| 3804 AccumulatorValueContext context(this); | 3737 AccumulatorValueContext context(this); |
| 3805 VisitForTypeofValue(sub_expr); | 3738 VisitForTypeofValue(sub_expr); |
| 3806 } | 3739 } |
| 3807 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3740 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3808 | 3741 |
| 3809 Factory* factory = isolate()->factory(); | 3742 Factory* factory = isolate()->factory(); |
| 3810 if (String::Equals(check, factory->number_string())) { | 3743 if (String::Equals(check, factory->number_string())) { |
| 3811 __ JumpIfSmi(r3, if_true); | 3744 __ JumpIfSmi(r2, if_true); |
| 3812 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3745 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3813 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 3746 __ CompareRoot(r2, Heap::kHeapNumberMapRootIndex); |
| 3814 __ cmp(r3, ip); | |
| 3815 Split(eq, if_true, if_false, fall_through); | 3747 Split(eq, if_true, if_false, fall_through); |
| 3816 } else if (String::Equals(check, factory->string_string())) { | 3748 } else if (String::Equals(check, factory->string_string())) { |
| 3817 __ JumpIfSmi(r3, if_false); | 3749 __ JumpIfSmi(r2, if_false); |
| 3818 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE); | 3750 __ CompareObjectType(r2, r2, r3, FIRST_NONSTRING_TYPE); |
| 3819 Split(lt, if_true, if_false, fall_through); | 3751 Split(lt, if_true, if_false, fall_through); |
| 3820 } else if (String::Equals(check, factory->symbol_string())) { | 3752 } else if (String::Equals(check, factory->symbol_string())) { |
| 3821 __ JumpIfSmi(r3, if_false); | 3753 __ JumpIfSmi(r2, if_false); |
| 3822 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE); | 3754 __ CompareObjectType(r2, r2, r3, SYMBOL_TYPE); |
| 3823 Split(eq, if_true, if_false, fall_through); | 3755 Split(eq, if_true, if_false, fall_through); |
| 3824 } else if (String::Equals(check, factory->boolean_string())) { | 3756 } else if (String::Equals(check, factory->boolean_string())) { |
| 3825 __ CompareRoot(r3, Heap::kTrueValueRootIndex); | 3757 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 3826 __ beq(if_true); | 3758 __ beq(if_true); |
| 3827 __ CompareRoot(r3, Heap::kFalseValueRootIndex); | 3759 __ CompareRoot(r2, Heap::kFalseValueRootIndex); |
| 3828 Split(eq, if_true, if_false, fall_through); | 3760 Split(eq, if_true, if_false, fall_through); |
| 3829 } else if (String::Equals(check, factory->undefined_string())) { | 3761 } else if (String::Equals(check, factory->undefined_string())) { |
| 3830 __ CompareRoot(r3, Heap::kNullValueRootIndex); | 3762 __ CompareRoot(r2, Heap::kNullValueRootIndex); |
| 3831 __ beq(if_false); | 3763 __ beq(if_false); |
| 3832 __ JumpIfSmi(r3, if_false); | 3764 __ JumpIfSmi(r2, if_false); |
| 3833 // Check for undetectable objects => true. | 3765 // Check for undetectable objects => true. |
| 3834 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3766 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3835 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3767 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset), |
| 3836 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable)); | 3768 Operand(1 << Map::kIsUndetectable)); |
| 3837 Split(ne, if_true, if_false, fall_through, cr0); | 3769 Split(ne, if_true, if_false, fall_through); |
| 3838 | 3770 |
| 3839 } else if (String::Equals(check, factory->function_string())) { | 3771 } else if (String::Equals(check, factory->function_string())) { |
| 3840 __ JumpIfSmi(r3, if_false); | 3772 __ JumpIfSmi(r2, if_false); |
| 3841 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3773 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 3842 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3774 __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); |
| 3843 __ andi(r4, r4, | 3775 __ AndP(r3, r3, |
| 3844 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); | 3776 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); |
| 3845 __ cmpi(r4, Operand(1 << Map::kIsCallable)); | 3777 __ CmpP(r3, Operand(1 << Map::kIsCallable)); |
| 3846 Split(eq, if_true, if_false, fall_through); | 3778 Split(eq, if_true, if_false, fall_through); |
| 3847 } else if (String::Equals(check, factory->object_string())) { | 3779 } else if (String::Equals(check, factory->object_string())) { |
| 3848 __ JumpIfSmi(r3, if_false); | 3780 __ JumpIfSmi(r2, if_false); |
| 3849 __ CompareRoot(r3, Heap::kNullValueRootIndex); | 3781 __ CompareRoot(r2, Heap::kNullValueRootIndex); |
| 3850 __ beq(if_true); | 3782 __ beq(if_true); |
| 3851 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); | 3783 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); |
| 3852 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE); | 3784 __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE); |
| 3853 __ blt(if_false); | 3785 __ blt(if_false); |
| 3854 // Check for callable or undetectable objects => false. | 3786 __ tm(FieldMemOperand(r2, Map::kBitFieldOffset), |
| 3855 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3787 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); |
| 3856 __ andi(r0, r4, | 3788 Split(eq, if_true, if_false, fall_through); |
| 3857 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); | |
| 3858 Split(eq, if_true, if_false, fall_through, cr0); | |
| 3859 // clang-format off | 3789 // clang-format off |
| 3860 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ | 3790 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ |
| 3861 } else if (String::Equals(check, factory->type##_string())) { \ | 3791 } else if (String::Equals(check, factory->type##_string())) { \ |
| 3862 __ JumpIfSmi(r3, if_false); \ | 3792 __ JumpIfSmi(r2, if_false); \ |
| 3863 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); \ | 3793 __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); \ |
| 3864 __ CompareRoot(r3, Heap::k##Type##MapRootIndex); \ | 3794 __ CompareRoot(r2, Heap::k##Type##MapRootIndex); \ |
| 3865 Split(eq, if_true, if_false, fall_through); | 3795 Split(eq, if_true, if_false, fall_through); |
| 3866 SIMD128_TYPES(SIMD128_TYPE) | 3796 SIMD128_TYPES(SIMD128_TYPE) |
| 3867 #undef SIMD128_TYPE | 3797 #undef SIMD128_TYPE |
| 3868 // clang-format on | 3798 // clang-format on |
| 3869 } else { | 3799 } else { |
| 3870 if (if_false != fall_through) __ b(if_false); | 3800 if (if_false != fall_through) __ b(if_false); |
| 3871 } | 3801 } |
| 3872 context()->Plug(if_true, if_false); | 3802 context()->Plug(if_true, if_false); |
| 3873 } | 3803 } |
| 3874 | 3804 |
| 3875 | |
| 3876 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { | 3805 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
| 3877 Comment cmnt(masm_, "[ CompareOperation"); | 3806 Comment cmnt(masm_, "[ CompareOperation"); |
| 3878 SetExpressionPosition(expr); | 3807 SetExpressionPosition(expr); |
| 3879 | 3808 |
| 3880 // First we try a fast inlined version of the compare when one of | 3809 // First we try a fast inlined version of the compare when one of |
| 3881 // the operands is a literal. | 3810 // the operands is a literal. |
| 3882 if (TryLiteralCompare(expr)) return; | 3811 if (TryLiteralCompare(expr)) return; |
| 3883 | 3812 |
| 3884 // Always perform the comparison for its control flow. Pack the result | 3813 // Always perform the comparison for its control flow. Pack the result |
| 3885 // into the expression's context after the comparison is performed. | 3814 // into the expression's context after the comparison is performed. |
| 3886 Label materialize_true, materialize_false; | 3815 Label materialize_true, materialize_false; |
| 3887 Label* if_true = NULL; | 3816 Label* if_true = NULL; |
| 3888 Label* if_false = NULL; | 3817 Label* if_false = NULL; |
| 3889 Label* fall_through = NULL; | 3818 Label* fall_through = NULL; |
| 3890 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 3819 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 3891 &if_false, &fall_through); | 3820 &if_false, &fall_through); |
| 3892 | 3821 |
| 3893 Token::Value op = expr->op(); | 3822 Token::Value op = expr->op(); |
| 3894 VisitForStackValue(expr->left()); | 3823 VisitForStackValue(expr->left()); |
| 3895 switch (op) { | 3824 switch (op) { |
| 3896 case Token::IN: | 3825 case Token::IN: |
| 3897 VisitForStackValue(expr->right()); | 3826 VisitForStackValue(expr->right()); |
| 3898 CallRuntimeWithOperands(Runtime::kHasProperty); | 3827 CallRuntimeWithOperands(Runtime::kHasProperty); |
| 3899 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); | 3828 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); |
| 3900 __ CompareRoot(r3, Heap::kTrueValueRootIndex); | 3829 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 3901 Split(eq, if_true, if_false, fall_through); | 3830 Split(eq, if_true, if_false, fall_through); |
| 3902 break; | 3831 break; |
| 3903 | 3832 |
| 3904 case Token::INSTANCEOF: { | 3833 case Token::INSTANCEOF: { |
| 3905 VisitForAccumulatorValue(expr->right()); | 3834 VisitForAccumulatorValue(expr->right()); |
| 3906 PopOperand(r4); | 3835 PopOperand(r3); |
| 3907 InstanceOfStub stub(isolate()); | 3836 InstanceOfStub stub(isolate()); |
| 3908 __ CallStub(&stub); | 3837 __ CallStub(&stub); |
| 3909 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); | 3838 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); |
| 3910 __ CompareRoot(r3, Heap::kTrueValueRootIndex); | 3839 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 3911 Split(eq, if_true, if_false, fall_through); | 3840 Split(eq, if_true, if_false, fall_through); |
| 3912 break; | 3841 break; |
| 3913 } | 3842 } |
| 3914 | 3843 |
| 3915 default: { | 3844 default: { |
| 3916 VisitForAccumulatorValue(expr->right()); | 3845 VisitForAccumulatorValue(expr->right()); |
| 3917 Condition cond = CompareIC::ComputeCondition(op); | 3846 Condition cond = CompareIC::ComputeCondition(op); |
| 3918 PopOperand(r4); | 3847 PopOperand(r3); |
| 3919 | 3848 |
| 3920 bool inline_smi_code = ShouldInlineSmiCase(op); | 3849 bool inline_smi_code = ShouldInlineSmiCase(op); |
| 3921 JumpPatchSite patch_site(masm_); | 3850 JumpPatchSite patch_site(masm_); |
| 3922 if (inline_smi_code) { | 3851 if (inline_smi_code) { |
| 3923 Label slow_case; | 3852 Label slow_case; |
| 3924 __ orx(r5, r3, r4); | 3853 __ LoadRR(r4, r3); |
| 3925 patch_site.EmitJumpIfNotSmi(r5, &slow_case); | 3854 __ OrP(r4, r2); |
| 3926 __ cmp(r4, r3); | 3855 patch_site.EmitJumpIfNotSmi(r4, &slow_case); |
| 3856 __ CmpP(r3, r2); |
| 3927 Split(cond, if_true, if_false, NULL); | 3857 Split(cond, if_true, if_false, NULL); |
| 3928 __ bind(&slow_case); | 3858 __ bind(&slow_case); |
| 3929 } | 3859 } |
| 3930 | 3860 |
| 3931 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); | 3861 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); |
| 3932 CallIC(ic, expr->CompareOperationFeedbackId()); | 3862 CallIC(ic, expr->CompareOperationFeedbackId()); |
| 3933 patch_site.EmitPatchInfo(); | 3863 patch_site.EmitPatchInfo(); |
| 3934 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3864 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3935 __ cmpi(r3, Operand::Zero()); | 3865 __ CmpP(r2, Operand::Zero()); |
| 3936 Split(cond, if_true, if_false, fall_through); | 3866 Split(cond, if_true, if_false, fall_through); |
| 3937 } | 3867 } |
| 3938 } | 3868 } |
| 3939 | 3869 |
| 3940 // Convert the result of the comparison into one expected for this | 3870 // Convert the result of the comparison into one expected for this |
| 3941 // expression's context. | 3871 // expression's context. |
| 3942 context()->Plug(if_true, if_false); | 3872 context()->Plug(if_true, if_false); |
| 3943 } | 3873 } |
| 3944 | 3874 |
| 3945 | |
| 3946 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, | 3875 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, |
| 3947 Expression* sub_expr, | 3876 Expression* sub_expr, |
| 3948 NilValue nil) { | 3877 NilValue nil) { |
| 3949 Label materialize_true, materialize_false; | 3878 Label materialize_true, materialize_false; |
| 3950 Label* if_true = NULL; | 3879 Label* if_true = NULL; |
| 3951 Label* if_false = NULL; | 3880 Label* if_false = NULL; |
| 3952 Label* fall_through = NULL; | 3881 Label* fall_through = NULL; |
| 3953 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, | 3882 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, |
| 3954 &if_false, &fall_through); | 3883 &if_false, &fall_through); |
| 3955 | 3884 |
| 3956 VisitForAccumulatorValue(sub_expr); | 3885 VisitForAccumulatorValue(sub_expr); |
| 3957 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 3886 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 3958 if (expr->op() == Token::EQ_STRICT) { | 3887 if (expr->op() == Token::EQ_STRICT) { |
| 3959 Heap::RootListIndex nil_value = nil == kNullValue | 3888 Heap::RootListIndex nil_value = nil == kNullValue |
| 3960 ? Heap::kNullValueRootIndex | 3889 ? Heap::kNullValueRootIndex |
| 3961 : Heap::kUndefinedValueRootIndex; | 3890 : Heap::kUndefinedValueRootIndex; |
| 3962 __ LoadRoot(r4, nil_value); | 3891 __ CompareRoot(r2, nil_value); |
| 3963 __ cmp(r3, r4); | |
| 3964 Split(eq, if_true, if_false, fall_through); | 3892 Split(eq, if_true, if_false, fall_through); |
| 3965 } else { | 3893 } else { |
| 3966 __ JumpIfSmi(r3, if_false); | 3894 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); |
| 3967 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); | 3895 CallIC(ic, expr->CompareOperationFeedbackId()); |
| 3968 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset)); | 3896 __ CompareRoot(r2, Heap::kTrueValueRootIndex); |
| 3969 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable)); | 3897 Split(eq, if_true, if_false, fall_through); |
| 3970 Split(ne, if_true, if_false, fall_through, cr0); | |
| 3971 } | 3898 } |
| 3972 context()->Plug(if_true, if_false); | 3899 context()->Plug(if_true, if_false); |
| 3973 } | 3900 } |
| 3974 | 3901 |
| 3975 | |
| 3976 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | 3902 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
| 3977 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 3903 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 3978 context()->Plug(r3); | 3904 context()->Plug(r2); |
| 3979 } | 3905 } |
| 3980 | 3906 |
| 3981 | 3907 Register FullCodeGenerator::result_register() { return r2; } |
| 3982 Register FullCodeGenerator::result_register() { return r3; } | |
| 3983 | |
| 3984 | 3908 |
| 3985 Register FullCodeGenerator::context_register() { return cp; } | 3909 Register FullCodeGenerator::context_register() { return cp; } |
| 3986 | 3910 |
| 3987 | |
| 3988 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 3911 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
| 3989 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset); | 3912 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset); |
| 3990 __ StoreP(value, MemOperand(fp, frame_offset), r0); | 3913 __ StoreP(value, MemOperand(fp, frame_offset)); |
| 3991 } | 3914 } |
| 3992 | 3915 |
| 3993 | |
| 3994 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 3916 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
| 3995 __ LoadP(dst, ContextMemOperand(cp, context_index), r0); | 3917 __ LoadP(dst, ContextMemOperand(cp, context_index), r0); |
| 3996 } | 3918 } |
| 3997 | 3919 |
| 3998 | |
| 3999 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { | 3920 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { |
| 4000 Scope* closure_scope = scope()->ClosureScope(); | 3921 Scope* closure_scope = scope()->ClosureScope(); |
| 4001 if (closure_scope->is_script_scope() || | 3922 if (closure_scope->is_script_scope() || closure_scope->is_module_scope()) { |
| 4002 closure_scope->is_module_scope()) { | |
| 4003 // Contexts nested in the native context have a canonical empty function | 3923 // Contexts nested in the native context have a canonical empty function |
| 4004 // as their closure, not the anonymous closure containing the global | 3924 // as their closure, not the anonymous closure containing the global |
| 4005 // code. | 3925 // code. |
| 4006 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip); | 3926 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip); |
| 4007 } else if (closure_scope->is_eval_scope()) { | 3927 } else if (closure_scope->is_eval_scope()) { |
| 4008 // Contexts created by a call to eval have the same closure as the | 3928 // Contexts created by a call to eval have the same closure as the |
| 4009 // context calling eval, not the anonymous closure containing the eval | 3929 // context calling eval, not the anonymous closure containing the eval |
| 4010 // code. Fetch it from the context. | 3930 // code. Fetch it from the context. |
| 4011 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX)); | 3931 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX)); |
| 4012 } else { | 3932 } else { |
| 4013 DCHECK(closure_scope->is_function_scope()); | 3933 DCHECK(closure_scope->is_function_scope()); |
| 4014 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 3934 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 4015 } | 3935 } |
| 4016 PushOperand(ip); | 3936 PushOperand(ip); |
| 4017 } | 3937 } |
| 4018 | 3938 |
| 4019 | |
| 4020 // ---------------------------------------------------------------------------- | 3939 // ---------------------------------------------------------------------------- |
| 4021 // Non-local control flow support. | 3940 // Non-local control flow support. |
| 4022 | 3941 |
| 4023 void FullCodeGenerator::EnterFinallyBlock() { | 3942 void FullCodeGenerator::EnterFinallyBlock() { |
| 4024 DCHECK(!result_register().is(r4)); | 3943 DCHECK(!result_register().is(r3)); |
| 4025 // Store pending message while executing finally block. | 3944 // Store pending message while executing finally block. |
| 4026 ExternalReference pending_message_obj = | 3945 ExternalReference pending_message_obj = |
| 4027 ExternalReference::address_of_pending_message_obj(isolate()); | 3946 ExternalReference::address_of_pending_message_obj(isolate()); |
| 4028 __ mov(ip, Operand(pending_message_obj)); | 3947 __ mov(ip, Operand(pending_message_obj)); |
| 4029 __ LoadP(r4, MemOperand(ip)); | 3948 __ LoadP(r3, MemOperand(ip)); |
| 4030 PushOperand(r4); | 3949 PushOperand(r3); |
| 4031 | 3950 |
| 4032 ClearPendingMessage(); | 3951 ClearPendingMessage(); |
| 4033 } | 3952 } |
| 4034 | 3953 |
| 4035 | |
| 4036 void FullCodeGenerator::ExitFinallyBlock() { | 3954 void FullCodeGenerator::ExitFinallyBlock() { |
| 4037 DCHECK(!result_register().is(r4)); | 3955 DCHECK(!result_register().is(r3)); |
| 4038 // Restore pending message from stack. | 3956 // Restore pending message from stack. |
| 4039 PopOperand(r4); | 3957 PopOperand(r3); |
| 4040 ExternalReference pending_message_obj = | 3958 ExternalReference pending_message_obj = |
| 4041 ExternalReference::address_of_pending_message_obj(isolate()); | 3959 ExternalReference::address_of_pending_message_obj(isolate()); |
| 4042 __ mov(ip, Operand(pending_message_obj)); | 3960 __ mov(ip, Operand(pending_message_obj)); |
| 4043 __ StoreP(r4, MemOperand(ip)); | 3961 __ StoreP(r3, MemOperand(ip)); |
| 4044 } | 3962 } |
| 4045 | 3963 |
| 4046 | |
| 4047 void FullCodeGenerator::ClearPendingMessage() { | 3964 void FullCodeGenerator::ClearPendingMessage() { |
| 4048 DCHECK(!result_register().is(r4)); | 3965 DCHECK(!result_register().is(r3)); |
| 4049 ExternalReference pending_message_obj = | 3966 ExternalReference pending_message_obj = |
| 4050 ExternalReference::address_of_pending_message_obj(isolate()); | 3967 ExternalReference::address_of_pending_message_obj(isolate()); |
| 4051 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); | 3968 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex); |
| 4052 __ mov(ip, Operand(pending_message_obj)); | 3969 __ mov(ip, Operand(pending_message_obj)); |
| 4053 __ StoreP(r4, MemOperand(ip)); | 3970 __ StoreP(r3, MemOperand(ip)); |
| 4054 } | 3971 } |
| 4055 | 3972 |
| 3973 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) { |
| 3974 DCHECK(!slot.IsInvalid()); |
| 3975 __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(), |
| 3976 Operand(SmiFromSlot(slot))); |
| 3977 } |
| 4056 | 3978 |
| 4057 void FullCodeGenerator::DeferredCommands::EmitCommands() { | 3979 void FullCodeGenerator::DeferredCommands::EmitCommands() { |
| 4058 DCHECK(!result_register().is(r4)); | 3980 DCHECK(!result_register().is(r3)); |
| 4059 // Restore the accumulator (r3) and token (r4). | 3981 // Restore the accumulator (r2) and token (r3). |
| 4060 __ Pop(r4, result_register()); | 3982 __ Pop(r3, result_register()); |
| 4061 for (DeferredCommand cmd : commands_) { | 3983 for (DeferredCommand cmd : commands_) { |
| 4062 Label skip; | 3984 Label skip; |
| 4063 __ CmpSmiLiteral(r4, Smi::FromInt(cmd.token), r0); | 3985 __ CmpSmiLiteral(r3, Smi::FromInt(cmd.token), r0); |
| 4064 __ bne(&skip); | 3986 __ bne(&skip); |
| 4065 switch (cmd.command) { | 3987 switch (cmd.command) { |
| 4066 case kReturn: | 3988 case kReturn: |
| 4067 codegen_->EmitUnwindAndReturn(); | 3989 codegen_->EmitUnwindAndReturn(); |
| 4068 break; | 3990 break; |
| 4069 case kThrow: | 3991 case kThrow: |
| 4070 __ Push(result_register()); | 3992 __ Push(result_register()); |
| 4071 __ CallRuntime(Runtime::kReThrow); | 3993 __ CallRuntime(Runtime::kReThrow); |
| 4072 break; | 3994 break; |
| 4073 case kContinue: | 3995 case kContinue: |
| 4074 codegen_->EmitContinue(cmd.target); | 3996 codegen_->EmitContinue(cmd.target); |
| 4075 break; | 3997 break; |
| 4076 case kBreak: | 3998 case kBreak: |
| 4077 codegen_->EmitBreak(cmd.target); | 3999 codegen_->EmitBreak(cmd.target); |
| 4078 break; | 4000 break; |
| 4079 } | 4001 } |
| 4080 __ bind(&skip); | 4002 __ bind(&skip); |
| 4081 } | 4003 } |
| 4082 } | 4004 } |
| 4083 | 4005 |
| 4084 #undef __ | 4006 #undef __ |
| 4085 | 4007 |
| 4008 #if V8_TARGET_ARCH_S390X |
| 4009 static const FourByteInstr kInterruptBranchInstruction = 0xA7A40011; |
| 4010 static const FourByteInstr kOSRBranchInstruction = 0xA7040011; |
| 4011 static const int16_t kBackEdgeBranchOffset = 0x11 * 2; |
| 4012 #else |
| 4013 static const FourByteInstr kInterruptBranchInstruction = 0xA7A4000D; |
| 4014 static const FourByteInstr kOSRBranchInstruction = 0xA704000D; |
| 4015 static const int16_t kBackEdgeBranchOffset = 0xD * 2; |
| 4016 #endif |
| 4086 | 4017 |
| 4087 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc, | 4018 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc, |
| 4088 BackEdgeState target_state, | 4019 BackEdgeState target_state, |
| 4089 Code* replacement_code) { | 4020 Code* replacement_code) { |
| 4090 Address mov_address = Assembler::target_address_from_return_address(pc); | 4021 Address call_address = Assembler::target_address_from_return_address(pc); |
| 4091 Address cmp_address = mov_address - 2 * Assembler::kInstrSize; | 4022 Address branch_address = call_address - 4; |
| 4092 Isolate* isolate = unoptimized_code->GetIsolate(); | 4023 Isolate* isolate = unoptimized_code->GetIsolate(); |
| 4093 CodePatcher patcher(isolate, cmp_address, 1); | 4024 CodePatcher patcher(isolate, branch_address, 4); |
| 4094 | 4025 |
| 4095 switch (target_state) { | 4026 switch (target_state) { |
| 4096 case INTERRUPT: { | 4027 case INTERRUPT: { |
| 4097 // <decrement profiling counter> | 4028 // <decrement profiling counter> |
| 4098 // cmpi r6, 0 | 4029 // bge <ok> ;; patched to GE BRC |
| 4099 // bge <ok> ;; not changed | 4030 // brasrl r14, <interrupt stub address> |
| 4100 // mov r12, <interrupt stub address> | |
| 4101 // mtlr r12 | |
| 4102 // blrl | |
| 4103 // <reset profiling counter> | 4031 // <reset profiling counter> |
| 4104 // ok-label | 4032 // ok-label |
| 4105 patcher.masm()->cmpi(r6, Operand::Zero()); | 4033 patcher.masm()->brc(ge, Operand(kBackEdgeBranchOffset)); |
| 4106 break; | 4034 break; |
| 4107 } | 4035 } |
| 4108 case ON_STACK_REPLACEMENT: | 4036 case ON_STACK_REPLACEMENT: |
| 4109 case OSR_AFTER_STACK_CHECK: | 4037 case OSR_AFTER_STACK_CHECK: |
| 4110 // <decrement profiling counter> | 4038 // <decrement profiling counter> |
| 4111 // crset | 4039 // brc 0x0, <ok> ;; patched to NOP BRC |
| 4112 // bge <ok> ;; not changed | 4040 // brasrl r14, <interrupt stub address> |
| 4113 // mov r12, <on-stack replacement address> | |
| 4114 // mtlr r12 | |
| 4115 // blrl | |
| 4116 // <reset profiling counter> | 4041 // <reset profiling counter> |
| 4117 // ok-label ----- pc_after points here | 4042 // ok-label ----- pc_after points here |
| 4118 | 4043 patcher.masm()->brc(CC_NOP, Operand(kBackEdgeBranchOffset)); |
| 4119 // Set the LT bit such that bge is a NOP | |
| 4120 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT)); | |
| 4121 break; | 4044 break; |
| 4122 } | 4045 } |
| 4123 | 4046 |
| 4124 // Replace the stack check address in the mov sequence with the | 4047 // Replace the stack check address in the mov sequence with the |
| 4125 // entry address of the replacement code. | 4048 // entry address of the replacement code. |
| 4126 Assembler::set_target_address_at(isolate, mov_address, unoptimized_code, | 4049 Assembler::set_target_address_at(isolate, call_address, unoptimized_code, |
| 4127 replacement_code->entry()); | 4050 replacement_code->entry()); |
| 4128 | 4051 |
| 4129 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 4052 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 4130 unoptimized_code, mov_address, replacement_code); | 4053 unoptimized_code, call_address, replacement_code); |
| 4131 } | 4054 } |
| 4132 | 4055 |
| 4133 | |
| 4134 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( | 4056 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( |
| 4135 Isolate* isolate, Code* unoptimized_code, Address pc) { | 4057 Isolate* isolate, Code* unoptimized_code, Address pc) { |
| 4136 Address mov_address = Assembler::target_address_from_return_address(pc); | 4058 Address call_address = Assembler::target_address_from_return_address(pc); |
| 4137 Address cmp_address = mov_address - 2 * Assembler::kInstrSize; | 4059 Address branch_address = call_address - 4; |
| 4138 Address interrupt_address = | 4060 Address interrupt_address = |
| 4139 Assembler::target_address_at(mov_address, unoptimized_code); | 4061 Assembler::target_address_at(call_address, unoptimized_code); |
| 4140 | 4062 |
| 4141 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) { | 4063 DCHECK(BRC == Instruction::S390OpcodeValue(branch_address)); |
| 4064 // For interrupt, we expect a branch greater than or equal |
| 4065 // i.e. BRC 0xa, +XXXX (0xA7A4XXXX) |
| 4066 FourByteInstr br_instr = Instruction::InstructionBits( |
| 4067 reinterpret_cast<const byte*>(branch_address)); |
| 4068 if (kInterruptBranchInstruction == br_instr) { |
| 4142 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry()); | 4069 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry()); |
| 4143 return INTERRUPT; | 4070 return INTERRUPT; |
| 4144 } | 4071 } |
| 4145 | 4072 |
| 4146 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address))); | 4073 // Expect BRC to be patched to NOP branch. |
| 4074 // i.e. BRC 0x0, +XXXX (0xA704XXXX) |
| 4075 USE(kOSRBranchInstruction); |
| 4076 DCHECK(kOSRBranchInstruction == br_instr); |
| 4147 | 4077 |
| 4148 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) { | 4078 if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) { |
| 4149 return ON_STACK_REPLACEMENT; | 4079 return ON_STACK_REPLACEMENT; |
| 4150 } | 4080 } |
| 4151 | 4081 |
| 4152 DCHECK(interrupt_address == | 4082 DCHECK(interrupt_address == |
| 4153 isolate->builtins()->OsrAfterStackCheck()->entry()); | 4083 isolate->builtins()->OsrAfterStackCheck()->entry()); |
| 4154 return OSR_AFTER_STACK_CHECK; | 4084 return OSR_AFTER_STACK_CHECK; |
| 4155 } | 4085 } |
| 4086 |
| 4156 } // namespace internal | 4087 } // namespace internal |
| 4157 } // namespace v8 | 4088 } // namespace v8 |
| 4158 #endif // V8_TARGET_ARCH_PPC | 4089 #endif // V8_TARGET_ARCH_S390 |
| OLD | NEW |