| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2011-2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. | 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
| 15 // | 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | 27 |
| 28 #include "v8.h" | 28 #include "v8.h" |
| 29 | 29 |
| 30 #if defined(V8_TARGET_ARCH_ARM) | 30 #if defined(V8_TARGET_ARCH_SH4) |
| 31 | 31 |
| 32 #include "code-stubs.h" | 32 #include "code-stubs.h" |
| 33 #include "codegen.h" | 33 #include "codegen.h" |
| 34 #include "compiler.h" | 34 #include "compiler.h" |
| 35 #include "debug.h" | 35 #include "debug.h" |
| 36 #include "full-codegen.h" | 36 #include "full-codegen.h" |
| 37 #include "isolate-inl.h" | 37 #include "isolate-inl.h" |
| 38 #include "parser.h" | 38 #include "parser.h" |
| 39 #include "scopes.h" | 39 #include "scopes.h" |
| 40 #include "stub-cache.h" | 40 #include "stub-cache.h" |
| 41 | 41 |
| 42 #include "arm/code-stubs-arm.h" | 42 #include "sh4/code-stubs-sh4.h" |
| 43 #include "arm/macro-assembler-arm.h" | 43 #include "sh4/macro-assembler-sh4.h" |
| 44 | 44 |
| 45 namespace v8 { | 45 namespace v8 { |
| 46 namespace internal { | 46 namespace internal { |
| 47 | 47 |
| 48 #define __ ACCESS_MASM(masm_) | 48 #define __ ACCESS_MASM(masm_) |
| 49 | 49 |
| 50 #include "map-sh4.h" // For ARM -> SH4 register mapping |
| 51 |
| 50 | 52 |
| 51 // A patch site is a location in the code which it is possible to patch. This | 53 // A patch site is a location in the code which it is possible to patch. This |
| 52 // class has a number of methods to emit the code which is patchable and the | 54 // class has a number of methods to emit the code which is patchable and the |
| 53 // method EmitPatchInfo to record a marker back to the patchable code. This | 55 // method EmitPatchInfo to record a marker back to the patchable code. |
| 54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit | 56 // On SH4 this marker is a cmp #ii, r0 operation, this limits the range |
| 55 // immediate value is used) is the delta from the pc to the first instruction of | 57 // of #ii to -128..+127 instructions for the distance betwen the patch and |
| 56 // the patchable code. | 58 // the label. |
| 59 // The #ii (8 bits signed value) is the delta from the pc to |
| 60 // the first instruction of the patchable code. |
| 57 class JumpPatchSite BASE_EMBEDDED { | 61 class JumpPatchSite BASE_EMBEDDED { |
| 58 public: | 62 public: |
| 59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { | 63 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { |
| 60 #ifdef DEBUG | 64 #ifdef DEBUG |
| 61 info_emitted_ = false; | 65 info_emitted_ = false; |
| 66 ASSERT(!patch_site_.is_bound()); |
| 62 #endif | 67 #endif |
| 63 } | 68 } |
| 64 | 69 |
| 65 ~JumpPatchSite() { | 70 ~JumpPatchSite() { |
| 66 ASSERT(patch_site_.is_bound() == info_emitted_); | 71 ASSERT(patch_site_.is_bound() == info_emitted_); |
| 67 } | 72 } |
| 68 | 73 |
| 69 // When initially emitting this ensure that a jump is always generated to skip | 74 // When initially emitting this ensure that a jump is always generated to skip |
| 70 // the inlined smi code. | 75 // the inlined smi code. |
| 71 void EmitJumpIfNotSmi(Register reg, Label* target) { | 76 void EmitJumpIfNotSmi(Register reg, Label* target) { |
| 72 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 77 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 73 Assembler::BlockConstPoolScope block_const_pool(masm_); | 78 // For the current unbound branch sequence (in assembler_sh4.cc) |
| 79 // to be simple to patch, we force the alignment now, such that the |
| 80 // first instruction of the sequence after the cmp is a branch. |
| 81 __ align(); |
| 82 __ mov(sh4_ip, Operand(kSmiTagMask)); |
| 74 __ bind(&patch_site_); | 83 __ bind(&patch_site_); |
| 75 __ cmp(reg, Operand(reg)); | 84 __ cmp(reg, reg); |
| 85 // Don't use b(al, ...) as that might emit the constant pool right after the |
| 86 // branch. After patching when the branch is no longer unconditional |
| 87 // execution can continue into the constant pool. |
| 88 // Also for the later patch in PatchInlinedSmiCode, we require |
| 89 // that the target is not bound yet. |
| 90 ASSERT(!target->is_bound()); |
| 91 ASSERT(masm_->pc_offset() % 4 == 0); |
| 76 __ b(eq, target); // Always taken before patched. | 92 __ b(eq, target); // Always taken before patched. |
| 77 } | 93 } |
| 78 | 94 |
| 79 // When initially emitting this ensure that a jump is never generated to skip | 95 // When initially emitting this ensure that a jump is never generated to skip |
| 80 // the inlined smi code. | 96 // the inlined smi code. |
| 81 void EmitJumpIfSmi(Register reg, Label* target) { | 97 void EmitJumpIfSmi(Register reg, Label* target) { |
| 82 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 98 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 83 Assembler::BlockConstPoolScope block_const_pool(masm_); | 99 __ align(); |
| 100 __ mov(sh4_ip, Operand(kSmiTagMask)); |
| 84 __ bind(&patch_site_); | 101 __ bind(&patch_site_); |
| 85 __ cmp(reg, Operand(reg)); | 102 __ cmp(reg, reg); |
| 86 __ b(ne, target); // Never taken before patched. | 103 ASSERT(!target->is_bound()); |
| 104 ASSERT(masm_->pc_offset() % 4 == 0); |
| 105 __ bf(target); // Never taken before patched. |
| 87 } | 106 } |
| 88 | 107 |
| 89 void EmitPatchInfo() { | 108 void EmitPatchInfo() { |
| 90 // Block literal pool emission whilst recording patch site information. | |
| 91 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 92 if (patch_site_.is_bound()) { | 109 if (patch_site_.is_bound()) { |
| 93 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); | 110 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); |
| 94 Register reg; | 111 ASSERT(delta_to_patch_site >= 0); |
| 95 reg.set_code(delta_to_patch_site / kOff12Mask); | 112 // Ensure that the delta fits into the raw immediate. |
| 96 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask); | 113 ASSERT(masm_->fits_cmp_unsigned_imm(delta_to_patch_site)); |
| 114 __ cmpeq_r0_unsigned_imm(delta_to_patch_site); |
| 97 #ifdef DEBUG | 115 #ifdef DEBUG |
| 98 info_emitted_ = true; | 116 info_emitted_ = true; |
| 99 #endif | 117 #endif |
| 100 } else { | 118 } else { |
| 101 __ nop(); // Signals no inlined code. | |
| 102 } | 119 } |
| 103 } | 120 } |
| 104 | 121 |
| 122 bool is_bound() const { return patch_site_.is_bound(); } |
| 123 |
| 105 private: | 124 private: |
| 106 MacroAssembler* masm_; | 125 MacroAssembler* masm_; |
| 107 Label patch_site_; | 126 Label patch_site_; |
| 108 #ifdef DEBUG | 127 #ifdef DEBUG |
| 109 bool info_emitted_; | 128 bool info_emitted_; |
| 110 #endif | 129 #endif |
| 111 }; | 130 }; |
| 112 | 131 |
| 113 | 132 |
| 114 // Generate code for a JS function. On entry to the function the receiver | 133 // Generate code for a JS function. On entry to the function the receiver |
| 115 // and arguments have been pushed on the stack left to right. The actual | 134 // and arguments have been pushed on the stack left to right. The actual |
| 116 // argument count matches the formal parameter count expected by the | 135 // argument count matches the formal parameter count expected by the |
| 117 // function. | 136 // function. |
| 118 // | 137 // |
| 119 // The live registers are: | 138 // The live registers are: |
| 120 // o r1: the JS function object being called (i.e., ourselves) | 139 // o r1: the JS function object being called (i.e., ourselves) |
| 121 // o cp: our context | 140 // o cp: our context |
| 122 // o fp: our caller's frame pointer | 141 // o fp: our caller's frame pointer |
| 123 // o sp: stack pointer | 142 // o sp: stack pointer |
| 124 // o lr: return address | 143 // o pr: return address |
| 125 // | 144 // |
| 126 // The function builds a JS frame. Please see JavaScriptFrameConstants in | 145 // The function builds a JS frame. Please see JavaScriptFrameConstants in |
| 127 // frames-arm.h for its layout. | 146 // frames-arm.h for its layout. |
| 128 void FullCodeGenerator::Generate() { | 147 void FullCodeGenerator::Generate() { |
| 129 CompilationInfo* info = info_; | 148 CompilationInfo* info = info_; |
| 130 handler_table_ = | 149 handler_table_ = |
| 131 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); | 150 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); |
| 132 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell( | 151 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell( |
| 133 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget))); | 152 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget))); |
| 134 SetFunctionPosition(function()); | 153 SetFunctionPosition(function()); |
| 135 Comment cmnt(masm_, "[ function compiled by full code generator"); | 154 Comment cmnt(masm_, "[ function compiled by full code generator"); |
| 136 | 155 |
| 137 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 156 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
| 138 | 157 |
| 139 #ifdef DEBUG | 158 #ifdef DEBUG |
| 140 if (strlen(FLAG_stop_at) > 0 && | 159 if (strlen(FLAG_stop_at) > 0 && |
| 141 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { | 160 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) { |
| 142 __ stop("stop-at"); | 161 __ stop("stop-at"); |
| 143 } | 162 } |
| 144 #endif | 163 #endif |
| 145 | 164 |
| 146 // Strict mode functions and builtins need to replace the receiver | 165 // Strict mode functions and builtins need to replace the receiver |
| 147 // with undefined when called as functions (without an explicit | 166 // with undefined when called as functions (without an explicit |
| 148 // receiver object). r5 is zero for method calls and non-zero for | 167 // receiver object). r5 is zero for method calls and non-zero for |
| 149 // function calls. | 168 // function calls. |
| 150 if (!info->is_classic_mode() || info->is_native()) { | 169 if (!info->is_classic_mode() || info->is_native()) { |
| 151 Label ok; | 170 Label ok; |
| 152 __ cmp(r5, Operand(0)); | 171 __ cmp(r5, Operand(0)); |
| 153 __ b(eq, &ok); | 172 __ b(eq, &ok, Label::kNear); |
| 154 int receiver_offset = info->scope()->num_parameters() * kPointerSize; | 173 int receiver_offset = info->scope()->num_parameters() * kPointerSize; |
| 155 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); | 174 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
| 156 __ str(r2, MemOperand(sp, receiver_offset)); | 175 __ str(r2, MemOperand(sp, receiver_offset)); |
| 157 __ bind(&ok); | 176 __ bind(&ok); |
| 158 } | 177 } |
| 159 | 178 |
| 160 // Open a frame scope to indicate that there is a frame on the stack. The | 179 // Open a frame scope to indicate that there is a frame on the stack. The |
| 161 // MANUAL indicates that the scope shouldn't actually generate code to set up | 180 // MANUAL indicates that the scope shouldn't actually generate code to set up |
| 162 // the frame (that is done below). | 181 // the frame (that is done below). |
| 163 FrameScope frame_scope(masm_, StackFrame::MANUAL); | 182 FrameScope frame_scope(masm_, StackFrame::MANUAL); |
| 164 | 183 |
| 165 int locals_count = info->scope()->num_stack_slots(); | 184 int locals_count = info->scope()->num_stack_slots(); |
| 166 | 185 |
| 167 __ Push(lr, fp, cp, r1); | 186 __ Push(pr, fp, cp, r1); |
| 168 if (locals_count > 0) { | 187 if (locals_count > 0) { |
| 169 // Load undefined value here, so the value is ready for the loop | 188 // Load undefined value here, so the value is ready for the loop |
| 170 // below. | 189 // below. |
| 171 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 190 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 172 } | 191 } |
| 173 // Adjust fp to point to caller's fp. | 192 // Adjust fp to point to caller's fp. |
| 174 __ add(fp, sp, Operand(2 * kPointerSize)); | 193 __ add(fp, sp, Operand(2 * kPointerSize)); |
| 175 | 194 |
| 176 { Comment cmnt(masm_, "[ Allocate locals"); | 195 { Comment cmnt(masm_, "[ Allocate locals"); |
| 177 for (int i = 0; i < locals_count; i++) { | 196 for (int i = 0; i < locals_count; i++) { |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 278 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); | 297 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); |
| 279 VisitVariableDeclaration(function); | 298 VisitVariableDeclaration(function); |
| 280 } | 299 } |
| 281 VisitDeclarations(scope()->declarations()); | 300 VisitDeclarations(scope()->declarations()); |
| 282 } | 301 } |
| 283 | 302 |
| 284 { Comment cmnt(masm_, "[ Stack check"); | 303 { Comment cmnt(masm_, "[ Stack check"); |
| 285 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); | 304 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); |
| 286 Label ok; | 305 Label ok; |
| 287 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 306 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 288 __ cmp(sp, Operand(ip)); | 307 __ cmphs(sp, ip); |
| 289 __ b(hs, &ok); | 308 __ bt_near(&ok); |
| 290 PredictableCodeSizeScope predictable(masm_); | |
| 291 StackCheckStub stub; | 309 StackCheckStub stub; |
| 292 __ CallStub(&stub); | 310 __ CallStub(&stub); |
| 293 __ bind(&ok); | 311 __ bind(&ok); |
| 294 } | 312 } |
| 295 | 313 |
| 296 { Comment cmnt(masm_, "[ Body"); | 314 { Comment cmnt(masm_, "[ Body"); |
| 297 ASSERT(loop_depth() == 0); | 315 ASSERT(loop_depth() == 0); |
| 298 VisitStatements(function()->body()); | 316 VisitStatements(function()->body()); |
| 299 ASSERT(loop_depth() == 0); | 317 ASSERT(loop_depth() == 0); |
| 300 } | 318 } |
| 301 } | 319 } |
| 302 | 320 |
| 303 // Always emit a 'return undefined' in case control fell off the end of | 321 // Always emit a 'return undefined' in case control fell off the end of |
| 304 // the body. | 322 // the body. |
| 305 { Comment cmnt(masm_, "[ return <undefined>;"); | 323 { Comment cmnt(masm_, "[ return <undefined>;"); |
| 306 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 324 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 307 } | 325 } |
| 308 EmitReturnSequence(); | 326 EmitReturnSequence(); |
| 309 | 327 |
| 328 // TODO(stm): implement this when const pool are active |
| 310 // Force emit the constant pool, so it doesn't get emitted in the middle | 329 // Force emit the constant pool, so it doesn't get emitted in the middle |
| 311 // of the stack check table. | 330 // of the stack check table. |
| 312 masm()->CheckConstPool(true, false); | 331 // masm()->CheckConstPool(true, false); |
| 313 } | 332 } |
| 314 | 333 |
| 315 | 334 |
| 316 void FullCodeGenerator::ClearAccumulator() { | 335 void FullCodeGenerator::ClearAccumulator() { |
| 317 __ mov(r0, Operand(Smi::FromInt(0))); | 336 __ mov(r0, Operand(Smi::FromInt(0))); |
| 318 } | 337 } |
| 319 | 338 |
| 320 | 339 |
| 321 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { | 340 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { |
| 322 __ mov(r2, Operand(profiling_counter_)); | 341 __ mov(r2, Operand(profiling_counter_)); |
| 323 __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); | 342 __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); |
| 324 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC); | 343 __ sub(r3, r3, Operand(Smi::FromInt(delta))); |
| 344 __ cmpge(r3, Operand(0)); |
| 325 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); | 345 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); |
| 326 } | 346 } |
| 327 | 347 |
| 328 | 348 |
| 329 void FullCodeGenerator::EmitProfilingCounterReset() { | 349 void FullCodeGenerator::EmitProfilingCounterReset() { |
| 330 int reset_value = FLAG_interrupt_budget; | 350 int reset_value = FLAG_interrupt_budget; |
| 331 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { | 351 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { |
| 332 // Self-optimization is a one-off thing: if it fails, don't try again. | 352 // Self-optimization is a one-off thing: if it fails, don't try again. |
| 333 reset_value = Smi::kMaxValue; | 353 reset_value = Smi::kMaxValue; |
| 334 } | 354 } |
| 335 if (isolate()->IsDebuggerActive()) { | 355 if (isolate()->IsDebuggerActive()) { |
| 336 // Detect debug break requests as soon as possible. | 356 // Detect debug break requests as soon as possible. |
| 337 reset_value = FLAG_interrupt_budget >> 4; | 357 reset_value = FLAG_interrupt_budget >> 4; |
| 338 } | 358 } |
| 339 __ mov(r2, Operand(profiling_counter_)); | 359 __ mov(r2, Operand(profiling_counter_)); |
| 340 __ mov(r3, Operand(Smi::FromInt(reset_value))); | 360 __ mov(r3, Operand(Smi::FromInt(reset_value))); |
| 341 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); | 361 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); |
| 342 } | 362 } |
| 343 | 363 |
| 344 | 364 |
| 345 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, | 365 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt, |
| 346 Label* back_edge_target) { | 366 Label* back_edge_target) { |
| 347 Comment cmnt(masm_, "[ Stack check"); | 367 Comment cmnt(masm_, "[ Stack check"); |
| 348 // Block literal pools whilst emitting stack check code. | |
| 349 Assembler::BlockConstPoolScope block_const_pool(masm_); | |
| 350 Label ok; | 368 Label ok; |
| 351 | 369 |
| 352 if (FLAG_count_based_interrupts) { | 370 if (FLAG_count_based_interrupts) { |
| 353 int weight = 1; | 371 int weight = 1; |
| 354 if (FLAG_weighted_back_edges) { | 372 if (FLAG_weighted_back_edges) { |
| 355 ASSERT(back_edge_target->is_bound()); | 373 ASSERT(back_edge_target->is_bound()); |
| 356 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); | 374 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); |
| 357 weight = Min(kMaxBackEdgeWeight, | 375 weight = Min(kMaxBackEdgeWeight, |
| 358 Max(1, distance / kBackEdgeDistanceUnit)); | 376 Max(1, distance / kBackEdgeDistanceUnit)); |
| 359 } | 377 } |
| 360 EmitProfilingCounterDecrement(weight); | 378 EmitProfilingCounterDecrement(weight); |
| 361 __ b(pl, &ok); | 379 __ bt(&ok); // TODO(STM): ?? |
| 362 InterruptStub stub; | 380 InterruptStub stub; |
| 363 __ CallStub(&stub); | 381 __ CallStub(&stub); |
| 364 } else { | 382 } else { |
| 365 __ LoadRoot(ip, Heap::kStackLimitRootIndex); | 383 __ LoadRoot(ip, Heap::kStackLimitRootIndex); |
| 366 __ cmp(sp, Operand(ip)); | 384 __ cmphs(sp, ip); |
| 367 __ b(hs, &ok); | 385 __ bt(&ok); |
| 368 PredictableCodeSizeScope predictable(masm_); | |
| 369 StackCheckStub stub; | 386 StackCheckStub stub; |
| 370 __ CallStub(&stub); | 387 __ CallStub(&stub); |
| 371 } | 388 } |
| 372 | 389 |
| 373 // Record a mapping of this PC offset to the OSR id. This is used to find | 390 // Record a mapping of this PC offset to the OSR id. This is used to find |
| 374 // the AST id from the unoptimized code in order to use it as a key into | 391 // the AST id from the unoptimized code in order to use it as a key into |
| 375 // the deoptimization input data found in the optimized code. | 392 // the deoptimization input data found in the optimized code. |
| 376 RecordStackCheck(stmt->OsrEntryId()); | 393 RecordStackCheck(stmt->OsrEntryId()); |
| 377 | 394 |
| 378 if (FLAG_count_based_interrupts) { | 395 if (FLAG_count_based_interrupts) { |
| (...skipping 26 matching lines...) Expand all Loading... |
| 405 int weight = 1; | 422 int weight = 1; |
| 406 if (info_->ShouldSelfOptimize()) { | 423 if (info_->ShouldSelfOptimize()) { |
| 407 weight = FLAG_interrupt_budget / FLAG_self_opt_count; | 424 weight = FLAG_interrupt_budget / FLAG_self_opt_count; |
| 408 } else if (FLAG_weighted_back_edges) { | 425 } else if (FLAG_weighted_back_edges) { |
| 409 int distance = masm_->pc_offset(); | 426 int distance = masm_->pc_offset(); |
| 410 weight = Min(kMaxBackEdgeWeight, | 427 weight = Min(kMaxBackEdgeWeight, |
| 411 Max(1, distance / kBackEdgeDistanceUnit)); | 428 Max(1, distance / kBackEdgeDistanceUnit)); |
| 412 } | 429 } |
| 413 EmitProfilingCounterDecrement(weight); | 430 EmitProfilingCounterDecrement(weight); |
| 414 Label ok; | 431 Label ok; |
| 415 __ b(pl, &ok); | 432 __ bt(&ok); // TODO(STM): ?? |
| 416 __ push(r0); | 433 __ push(r0); |
| 417 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { | 434 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { |
| 418 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 435 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 419 __ push(r2); | 436 __ push(r2); |
| 420 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); | 437 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); |
| 421 } else { | 438 } else { |
| 422 InterruptStub stub; | 439 InterruptStub stub; |
| 423 __ CallStub(&stub); | 440 __ CallStub(&stub); |
| 424 } | 441 } |
| 425 __ pop(r0); | 442 __ pop(r0); |
| 426 EmitProfilingCounterReset(); | 443 EmitProfilingCounterReset(); |
| 427 __ bind(&ok); | 444 __ bind(&ok); |
| 428 } | 445 } |
| 429 | 446 |
| 430 #ifdef DEBUG | 447 #ifdef DEBUG |
| 431 // Add a label for checking the size of the code used for returning. | 448 // Add a label for checking the size of the code used for returning. |
| 432 Label check_exit_codesize; | 449 Label check_exit_codesize; |
| 433 masm_->bind(&check_exit_codesize); | 450 masm_->bind(&check_exit_codesize); |
| 434 #endif | 451 #endif |
| 435 // Make sure that the constant pool is not emitted inside of the return | 452 // Make sure that the constant pool is not emitted inside of the return |
| 436 // sequence. | 453 // sequence. |
| 437 { Assembler::BlockConstPoolScope block_const_pool(masm_); | 454 { |
| 455 // SH4: removed |
| 456 // Assembler::BlockConstPoolScope block_const_pool(masm_); |
| 438 // Here we use masm_-> instead of the __ macro to avoid the code coverage | 457 // Here we use masm_-> instead of the __ macro to avoid the code coverage |
| 439 // tool from instrumenting as we rely on the code size here. | 458 // tool from instrumenting as we rely on the code size here. |
| 440 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; | 459 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; |
| 441 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); | 460 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); |
| 442 PredictableCodeSizeScope predictable(masm_); | |
| 443 __ RecordJSReturn(); | 461 __ RecordJSReturn(); |
| 444 masm_->mov(sp, fp); | 462 masm_->mov(sp, fp); |
| 445 masm_->ldm(ia_w, sp, fp.bit() | lr.bit()); | 463 masm_->Pop(lr, fp); |
| 446 masm_->add(sp, sp, Operand(sp_delta)); | 464 masm_->add(sp, sp, Operand(sp_delta)); |
| 447 masm_->Jump(lr); | 465 masm_->Ret(); |
| 448 } | 466 } |
| 449 | 467 |
| 450 #ifdef DEBUG | 468 // #ifdef DEBUG |
| 451 // Check that the size of the code used for returning is large enough | 469 // // Check that the size of the code used for returning is large enough |
| 452 // for the debugger's requirements. | 470 // // for the debugger's requirements. |
| 453 ASSERT(Assembler::kJSReturnSequenceInstructions <= | 471 // ASSERT(Assembler::kJSReturnSequenceInstructions <= |
| 454 masm_->InstructionsGeneratedSince(&check_exit_codesize)); | 472 // masm_->InstructionsGeneratedSince(&check_exit_codesize)); |
| 455 #endif | 473 // #endif |
| 456 } | 474 } |
| 457 } | 475 } |
| 458 | 476 |
| 459 | 477 |
| 460 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { | 478 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { |
| 461 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 479 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 462 } | 480 } |
| 463 | 481 |
| 464 | 482 |
| 465 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { | 483 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 607 __ bind(materialize_true); | 625 __ bind(materialize_true); |
| 608 } | 626 } |
| 609 | 627 |
| 610 | 628 |
| 611 void FullCodeGenerator::AccumulatorValueContext::Plug( | 629 void FullCodeGenerator::AccumulatorValueContext::Plug( |
| 612 Label* materialize_true, | 630 Label* materialize_true, |
| 613 Label* materialize_false) const { | 631 Label* materialize_false) const { |
| 614 Label done; | 632 Label done; |
| 615 __ bind(materialize_true); | 633 __ bind(materialize_true); |
| 616 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); | 634 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); |
| 617 __ jmp(&done); | 635 __ jmp_near(&done); |
| 618 __ bind(materialize_false); | 636 __ bind(materialize_false); |
| 619 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); | 637 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); |
| 620 __ bind(&done); | 638 __ bind(&done); |
| 621 } | 639 } |
| 622 | 640 |
| 623 | 641 |
| 624 void FullCodeGenerator::StackValueContext::Plug( | 642 void FullCodeGenerator::StackValueContext::Plug( |
| 625 Label* materialize_true, | 643 Label* materialize_true, |
| 626 Label* materialize_false) const { | 644 Label* materialize_false) const { |
| 627 Label done; | 645 Label done; |
| 628 __ bind(materialize_true); | 646 __ bind(materialize_true); |
| 629 __ LoadRoot(ip, Heap::kTrueValueRootIndex); | 647 __ LoadRoot(ip, Heap::kTrueValueRootIndex); |
| 630 __ push(ip); | 648 __ push(ip); |
| 631 __ jmp(&done); | 649 __ jmp_near(&done); |
| 632 __ bind(materialize_false); | 650 __ bind(materialize_false); |
| 633 __ LoadRoot(ip, Heap::kFalseValueRootIndex); | 651 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 634 __ push(ip); | 652 __ push(ip); |
| 635 __ bind(&done); | 653 __ bind(&done); |
| 636 } | 654 } |
| 637 | 655 |
| 638 | 656 |
| 639 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, | 657 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, |
| 640 Label* materialize_false) const { | 658 Label* materialize_false) const { |
| 641 ASSERT(materialize_true == true_label_); | 659 ASSERT(materialize_true == true_label_); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 672 } else { | 690 } else { |
| 673 if (false_label_ != fall_through_) __ b(false_label_); | 691 if (false_label_ != fall_through_) __ b(false_label_); |
| 674 } | 692 } |
| 675 } | 693 } |
| 676 | 694 |
| 677 | 695 |
| 678 void FullCodeGenerator::DoTest(Expression* condition, | 696 void FullCodeGenerator::DoTest(Expression* condition, |
| 679 Label* if_true, | 697 Label* if_true, |
| 680 Label* if_false, | 698 Label* if_false, |
| 681 Label* fall_through) { | 699 Label* fall_through) { |
| 682 ToBooleanStub stub(result_register()); | 700 // TODO(STM): can be removed ! |
| 683 __ CallStub(&stub); | 701 if (CpuFeatures::IsSupported(FPU)) { |
| 684 __ tst(result_register(), result_register()); | 702 ToBooleanStub stub(result_register()); |
| 703 __ CallStub(&stub); |
| 704 __ tst(result_register(), result_register()); |
| 705 } else { |
| 706 // Call the runtime to find the boolean value of the source and then |
| 707 // translate it into control flow to the pair of labels. |
| 708 __ push(result_register()); |
| 709 __ CallRuntime(Runtime::kToBool, 1); |
| 710 __ LoadRoot(ip, Heap::kFalseValueRootIndex); |
| 711 __ cmp(r0, ip); |
| 712 } |
| 685 Split(ne, if_true, if_false, fall_through); | 713 Split(ne, if_true, if_false, fall_through); |
| 686 } | 714 } |
| 687 | 715 |
| 688 | 716 |
| 689 void FullCodeGenerator::Split(Condition cond, | 717 void FullCodeGenerator::Split(Condition cond, |
| 690 Label* if_true, | 718 Label* if_true, |
| 691 Label* if_false, | 719 Label* if_false, |
| 692 Label* fall_through) { | 720 Label* fall_through) { |
| 721 // We use ne for inverting conditions. |
| 722 ASSERT(cond == ne || cond == eq); |
| 693 if (if_false == fall_through) { | 723 if (if_false == fall_through) { |
| 694 __ b(cond, if_true); | 724 __ b(cond, if_true); |
| 695 } else if (if_true == fall_through) { | 725 } else if (if_true == fall_through) { |
| 696 __ b(NegateCondition(cond), if_false); | 726 __ b(NegateCondition(cond), if_false); |
| 697 } else { | 727 } else { |
| 728 // TODO(stm): add a special case for two jumps in a row |
| 698 __ b(cond, if_true); | 729 __ b(cond, if_true); |
| 699 __ b(if_false); | 730 __ b(if_false); |
| 700 } | 731 } |
| 701 } | 732 } |
| 702 | 733 |
| 703 | 734 |
| 704 MemOperand FullCodeGenerator::StackOperand(Variable* var) { | 735 MemOperand FullCodeGenerator::StackOperand(Variable* var) { |
| 705 ASSERT(var->IsStackAllocated()); | 736 ASSERT(var->IsStackAllocated()); |
| 706 // Offset is negative because higher indexes are at lower addresses. | 737 // Offset is negative because higher indexes are at lower addresses. |
| 707 int offset = -var->index() * kPointerSize; | 738 int offset = -var->index() * kPointerSize; |
| (...skipping 29 matching lines...) Expand all Loading... |
| 737 void FullCodeGenerator::SetVar(Variable* var, | 768 void FullCodeGenerator::SetVar(Variable* var, |
| 738 Register src, | 769 Register src, |
| 739 Register scratch0, | 770 Register scratch0, |
| 740 Register scratch1) { | 771 Register scratch1) { |
| 741 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); | 772 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); |
| 742 ASSERT(!scratch0.is(src)); | 773 ASSERT(!scratch0.is(src)); |
| 743 ASSERT(!scratch0.is(scratch1)); | 774 ASSERT(!scratch0.is(scratch1)); |
| 744 ASSERT(!scratch1.is(src)); | 775 ASSERT(!scratch1.is(src)); |
| 745 MemOperand location = VarOperand(var, scratch0); | 776 MemOperand location = VarOperand(var, scratch0); |
| 746 __ str(src, location); | 777 __ str(src, location); |
| 747 | |
| 748 // Emit the write barrier code if the location is in the heap. | 778 // Emit the write barrier code if the location is in the heap. |
| 749 if (var->IsContextSlot()) { | 779 if (var->IsContextSlot()) { |
| 750 __ RecordWriteContextSlot(scratch0, | 780 __ RecordWriteContextSlot(scratch0, |
| 751 location.offset(), | 781 location.offset(), |
| 752 src, | 782 src, |
| 753 scratch1, | 783 scratch1, |
| 754 kLRHasBeenSaved, | 784 kLRHasBeenSaved, |
| 755 kDontSaveFPRegs); | 785 kDontSaveFPRegs); |
| 756 } | 786 } |
| 757 } | 787 } |
| (...skipping 325 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1083 __ cmp(r0, ip); | 1113 __ cmp(r0, ip); |
| 1084 __ b(eq, &exit); | 1114 __ b(eq, &exit); |
| 1085 Register null_value = r5; | 1115 Register null_value = r5; |
| 1086 __ LoadRoot(null_value, Heap::kNullValueRootIndex); | 1116 __ LoadRoot(null_value, Heap::kNullValueRootIndex); |
| 1087 __ cmp(r0, null_value); | 1117 __ cmp(r0, null_value); |
| 1088 __ b(eq, &exit); | 1118 __ b(eq, &exit); |
| 1089 | 1119 |
| 1090 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); | 1120 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); |
| 1091 | 1121 |
| 1092 // Convert the object to a JS object. | 1122 // Convert the object to a JS object. |
| 1093 Label convert, done_convert; | 1123 Label convert; |
| 1094 __ JumpIfSmi(r0, &convert); | 1124 Label done_convert; |
| 1095 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); | 1125 __ JumpIfSmi(r0, &convert, Label::kNear); |
| 1096 __ b(ge, &done_convert); | 1126 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE, ge); |
| 1127 __ bt_near(&done_convert); |
| 1097 __ bind(&convert); | 1128 __ bind(&convert); |
| 1098 __ push(r0); | 1129 __ push(r0); |
| 1099 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | 1130 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 1100 __ bind(&done_convert); | 1131 __ bind(&done_convert); |
| 1101 __ push(r0); | 1132 __ push(r0); |
| 1102 | 1133 |
| 1103 // Check for proxies. | 1134 // Check for proxies. |
| 1104 Label call_runtime; | 1135 Label call_runtime; |
| 1105 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 1136 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
| 1106 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE); | 1137 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE, gt); |
| 1107 __ b(le, &call_runtime); | 1138 __ bf(&call_runtime); |
| 1108 | 1139 |
| 1109 // Check cache validity in generated code. This is a fast case for | 1140 // Check cache validity in generated code. This is a fast case for |
| 1110 // the JSObject::IsSimpleEnum cache validity checks. If we cannot | 1141 // the JSObject::IsSimpleEnum cache validity checks. If we cannot |
| 1111 // guarantee cache validity, call the runtime system to check cache | 1142 // guarantee cache validity, call the runtime system to check cache |
| 1112 // validity or get the property names in a fixed array. | 1143 // validity or get the property names in a fixed array. |
| 1113 __ CheckEnumCache(null_value, &call_runtime); | 1144 __ CheckEnumCache(null_value, &call_runtime); |
| 1114 | 1145 |
| 1115 // The enum cache is valid. Load the map of the object being | 1146 // The enum cache is valid. Load the map of the object being |
| 1116 // iterated over and use the cache for the iteration. | 1147 // iterated over and use the cache for the iteration. |
| 1117 Label use_cache; | 1148 Label use_cache; |
| 1118 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | 1149 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 1119 __ b(&use_cache); | 1150 __ b_near(&use_cache); |
| 1120 | 1151 |
| 1121 // Get the set of properties to enumerate. | 1152 // Get the set of properties to enumerate. |
| 1122 __ bind(&call_runtime); | 1153 __ bind(&call_runtime); |
| 1123 __ push(r0); // Duplicate the enumerable object on the stack. | 1154 __ push(r0); // Duplicate the enumerable object on the stack. |
| 1124 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); | 1155 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); |
| 1125 | 1156 |
| 1126 // If we got a map from the runtime call, we can do a fast | 1157 // If we got a map from the runtime call, we can do a fast |
| 1127 // modification check. Otherwise, we got a fixed array, and we have | 1158 // modification check. Otherwise, we got a fixed array, and we have |
| 1128 // to do a slow check. | 1159 // to do a slow check. |
| 1129 Label fixed_array; | 1160 Label fixed_array; |
| 1130 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | 1161 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 1131 __ LoadRoot(ip, Heap::kMetaMapRootIndex); | 1162 __ LoadRoot(ip, Heap::kMetaMapRootIndex); |
| 1132 __ cmp(r2, ip); | 1163 __ cmpeq(r2, ip); |
| 1133 __ b(ne, &fixed_array); | 1164 __ bf_near(&fixed_array); |
| 1134 | 1165 |
| 1135 // We got a map in register r0. Get the enumeration cache from it. | 1166 // We got a map in register r0. Get the enumeration cache from it. |
| 1136 Label no_descriptors; | 1167 Label no_descriptors; |
| 1137 __ bind(&use_cache); | 1168 __ bind(&use_cache); |
| 1138 | 1169 |
| 1139 __ EnumLength(r1, r0); | 1170 __ EnumLength(r1, r0); |
| 1140 __ cmp(r1, Operand(Smi::FromInt(0))); | 1171 __ cmp(r1, Operand(Smi::FromInt(0))); |
| 1141 __ b(eq, &no_descriptors); | 1172 __ b(eq, &no_descriptors); |
| 1142 | 1173 |
| 1143 __ LoadInstanceDescriptors(r0, r2); | 1174 __ LoadInstanceDescriptors(r0, r2, r4); |
| 1144 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); | 1175 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset)); |
| 1145 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); | 1176 __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset)); |
| 1146 | 1177 |
| 1147 // Set up the four remaining stack slots. | 1178 // Set up the four remaining stack slots. |
| 1148 __ push(r0); // Map. | 1179 __ push(r0); // Map. |
| 1149 __ mov(r0, Operand(Smi::FromInt(0))); | 1180 __ mov(r0, Operand(Smi::FromInt(0))); |
| 1150 // Push enumeration cache, enumeration cache length (as smi) and zero. | 1181 // Push enumeration cache, enumeration cache length (as smi) and zero. |
| 1151 __ Push(r2, r1, r0); | 1182 __ Push(r2, r1, r0); |
| 1152 __ jmp(&loop); | 1183 __ jmp_near(&loop); |
| 1153 | 1184 |
| 1154 __ bind(&no_descriptors); | 1185 __ bind(&no_descriptors); |
| 1155 __ Drop(1); | 1186 __ Drop(1); |
| 1156 __ jmp(&exit); | 1187 __ jmp(&exit); |
| 1157 | 1188 |
| 1158 // We got a fixed array in register r0. Iterate through that. | 1189 // We got a fixed array in register r0. Iterate through that. |
| 1159 Label non_proxy; | 1190 Label non_proxy; |
| 1160 __ bind(&fixed_array); | 1191 __ bind(&fixed_array); |
| 1161 | 1192 |
| 1162 Handle<JSGlobalPropertyCell> cell = | 1193 Handle<JSGlobalPropertyCell> cell = |
| 1163 isolate()->factory()->NewJSGlobalPropertyCell( | 1194 isolate()->factory()->NewJSGlobalPropertyCell( |
| 1164 Handle<Object>( | 1195 Handle<Object>( |
| 1165 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker))); | 1196 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker))); |
| 1166 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); | 1197 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); |
| 1167 __ LoadHeapObject(r1, cell); | 1198 __ LoadHeapObject(r1, cell); |
| 1168 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); | 1199 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); |
| 1169 __ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset)); | 1200 __ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset)); |
| 1170 | 1201 |
| 1171 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check | 1202 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check |
| 1172 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object | 1203 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object |
| 1173 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); | 1204 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); |
| 1174 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE); | 1205 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE, gt); |
| 1175 __ b(gt, &non_proxy); | 1206 __ bt(&non_proxy); |
| 1176 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy | 1207 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy |
| 1177 __ bind(&non_proxy); | 1208 __ bind(&non_proxy); |
| 1178 __ Push(r1, r0); // Smi and array | 1209 __ Push(r1, r0); // Smi and array |
| 1179 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); | 1210 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset)); |
| 1180 __ mov(r0, Operand(Smi::FromInt(0))); | 1211 __ mov(r0, Operand(Smi::FromInt(0))); |
| 1181 __ Push(r1, r0); // Fixed array length (as smi) and initial index. | 1212 __ Push(r1, r0); // Fixed array length (as smi) and initial index. |
| 1182 | 1213 |
| 1183 // Generate code for doing the condition check. | 1214 // Generate code for doing the condition check. |
| 1184 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); | 1215 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); |
| 1185 __ bind(&loop); | 1216 __ bind(&loop); |
| 1186 // Load the current count to r0, load the length to r1. | 1217 // Load the current count to r0, load the length to r1. |
| 1187 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); | 1218 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize)); |
| 1188 __ cmp(r0, r1); // Compare to the array length. | 1219 __ cmphs(r0, r1); // Compare to the array length. |
| 1189 __ b(hs, loop_statement.break_label()); | 1220 __ bt(loop_statement.break_label()); |
| 1190 | 1221 |
| 1191 // Get the current entry of the array into register r3. | 1222 // Get the current entry of the array into register r3. |
| 1192 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); | 1223 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); |
| 1193 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 1224 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 1194 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize)); | 1225 __ lsl(r3, r0, Operand(kPointerSizeLog2 - kSmiTagSize)); |
| 1226 __ ldr(r3, MemOperand(r2, r3)); |
| 1195 | 1227 |
| 1196 // Get the expected map from the stack or a smi in the | 1228 // Get the expected map from the stack or a zero map in the |
| 1197 // permanent slow case into register r2. | 1229 // permanent slow case into register r2. |
| 1198 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); | 1230 __ ldr(r2, MemOperand(sp, 3 * kPointerSize)); |
| 1199 | 1231 |
| 1200 // Check if the expected map still matches that of the enumerable. | 1232 // Check if the expected map still matches that of the enumerable. |
| 1201 // If not, we may have to filter the key. | 1233 // If not, we may have to filter the key. |
| 1202 Label update_each; | 1234 Label update_each; |
| 1203 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); | 1235 __ ldr(r1, MemOperand(sp, 4 * kPointerSize)); |
| 1204 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); | 1236 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset)); |
| 1205 __ cmp(r4, Operand(r2)); | 1237 __ cmpeq(r4, r2); |
| 1206 __ b(eq, &update_each); | 1238 __ bt_near(&update_each); |
| 1207 | 1239 |
| 1208 // For proxies, no filtering is done. | 1240 // For proxies, no filtering is done. |
| 1209 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. | 1241 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. |
| 1210 __ cmp(r2, Operand(Smi::FromInt(0))); | 1242 __ cmp(r2, Operand(Smi::FromInt(0))); |
| 1211 __ b(eq, &update_each); | 1243 __ b(eq, &update_each); |
| 1212 | 1244 |
| 1213 // Convert the entry to a string or (smi) 0 if it isn't a property | 1245 // Convert the entry to a string or (smi) 0 if it isn't a property |
| 1214 // any more. If the property has been removed while iterating, we | 1246 // any more. If the property has been removed while iterating, we |
| 1215 // just skip it. | 1247 // just skip it. |
| 1216 __ push(r1); // Enumerable. | 1248 __ push(r1); // Enumerable. |
| 1217 __ push(r3); // Current entry. | 1249 __ push(r3); // Current entry. |
| 1218 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); | 1250 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); |
| 1219 __ mov(r3, Operand(r0), SetCC); | 1251 __ mov(r3, r0); |
| 1252 __ tst(r3, r3); |
| 1220 __ b(eq, loop_statement.continue_label()); | 1253 __ b(eq, loop_statement.continue_label()); |
| 1221 | 1254 |
| 1222 // Update the 'each' property or variable from the possibly filtered | 1255 // Update the 'each' property or variable from the possibly filtered |
| 1223 // entry in register r3. | 1256 // entry in register r3. |
| 1224 __ bind(&update_each); | 1257 __ bind(&update_each); |
| 1225 __ mov(result_register(), r3); | 1258 __ mov(result_register(), r3); |
| 1226 // Perform the assignment as if via '='. | 1259 // Perform the assignment as if via '='. |
| 1227 { EffectContext context(this); | 1260 { EffectContext context(this); |
| 1228 EmitAssignment(stmt->each()); | 1261 EmitAssignment(stmt->each()); |
| 1229 } | 1262 } |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1316 if (s->is_eval_scope()) { | 1349 if (s->is_eval_scope()) { |
| 1317 Label loop, fast; | 1350 Label loop, fast; |
| 1318 if (!current.is(next)) { | 1351 if (!current.is(next)) { |
| 1319 __ Move(next, current); | 1352 __ Move(next, current); |
| 1320 } | 1353 } |
| 1321 __ bind(&loop); | 1354 __ bind(&loop); |
| 1322 // Terminate at native context. | 1355 // Terminate at native context. |
| 1323 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); | 1356 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset)); |
| 1324 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); | 1357 __ LoadRoot(ip, Heap::kNativeContextMapRootIndex); |
| 1325 __ cmp(temp, ip); | 1358 __ cmp(temp, ip); |
| 1326 __ b(eq, &fast); | 1359 __ b(eq, &fast, Label::kNear); |
| 1327 // Check that extension is NULL. | 1360 // Check that extension is NULL. |
| 1328 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); | 1361 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX)); |
| 1329 __ tst(temp, temp); | 1362 __ tst(temp, temp); |
| 1330 __ b(ne, slow); | 1363 __ b(ne, slow); |
| 1331 // Load next context in chain. | 1364 // Load next context in chain. |
| 1332 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); | 1365 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX)); |
| 1333 __ b(&loop); | 1366 __ b(&loop); |
| 1334 __ bind(&fast); | 1367 __ bind(&fast); |
| 1335 } | 1368 } |
| 1336 | 1369 |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1389 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); | 1422 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); |
| 1390 __ jmp(done); | 1423 __ jmp(done); |
| 1391 } else if (var->mode() == DYNAMIC_LOCAL) { | 1424 } else if (var->mode() == DYNAMIC_LOCAL) { |
| 1392 Variable* local = var->local_if_not_shadowed(); | 1425 Variable* local = var->local_if_not_shadowed(); |
| 1393 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); | 1426 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow)); |
| 1394 if (local->mode() == CONST || | 1427 if (local->mode() == CONST || |
| 1395 local->mode() == CONST_HARMONY || | 1428 local->mode() == CONST_HARMONY || |
| 1396 local->mode() == LET) { | 1429 local->mode() == LET) { |
| 1397 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); | 1430 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex); |
| 1398 if (local->mode() == CONST) { | 1431 if (local->mode() == CONST) { |
| 1399 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | 1432 Label skip; |
| 1433 __ bf_near(&skip); |
| 1434 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 1435 __ bind(&skip); |
| 1400 } else { // LET || CONST_HARMONY | 1436 } else { // LET || CONST_HARMONY |
| 1401 __ b(ne, done); | 1437 __ b(ne, done); |
| 1402 __ mov(r0, Operand(var->name())); | 1438 __ mov(r0, Operand(var->name())); |
| 1403 __ push(r0); | 1439 __ push(r0); |
| 1404 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1440 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 1405 } | 1441 } |
| 1406 } | 1442 } |
| 1407 __ jmp(done); | 1443 __ jmp(done); |
| 1408 } | 1444 } |
| 1409 } | 1445 } |
| 1410 | 1446 |
| 1411 | 1447 |
| 1448 // clobbers: r0, r1, r3 |
| 1449 // live-in: fp, sp, cp |
| 1450 // live-out: fp, sp, cp |
| 1412 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { | 1451 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { |
| 1413 // Record position before possible IC call. | 1452 // Record position before possible IC call. |
| 1414 SetSourcePosition(proxy->position()); | 1453 SetSourcePosition(proxy->position()); |
| 1415 Variable* var = proxy->var(); | 1454 Variable* var = proxy->var(); |
| 1416 | 1455 |
| 1417 // Three cases: global variables, lookup variables, and all other types of | 1456 // Three cases: global variables, lookup variables, and all other types of |
| 1418 // variables. | 1457 // variables. |
| 1419 switch (var->location()) { | 1458 switch (var->location()) { |
| 1420 case Variable::UNALLOCATED: { | 1459 case Variable::UNALLOCATED: { |
| 1421 Comment cmnt(masm_, "Global variable"); | 1460 Comment cmnt(masm_, "Global variable"); |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1479 // binding in harmony mode. | 1518 // binding in harmony mode. |
| 1480 Label done; | 1519 Label done; |
| 1481 __ b(ne, &done); | 1520 __ b(ne, &done); |
| 1482 __ mov(r0, Operand(var->name())); | 1521 __ mov(r0, Operand(var->name())); |
| 1483 __ push(r0); | 1522 __ push(r0); |
| 1484 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 1523 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 1485 __ bind(&done); | 1524 __ bind(&done); |
| 1486 } else { | 1525 } else { |
| 1487 // Uninitalized const bindings outside of harmony mode are unholed. | 1526 // Uninitalized const bindings outside of harmony mode are unholed. |
| 1488 ASSERT(var->mode() == CONST); | 1527 ASSERT(var->mode() == CONST); |
| 1489 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq); | 1528 Label skip; |
| 1529 __ bf(&skip); |
| 1530 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 1531 __ bind(&skip); |
| 1490 } | 1532 } |
| 1491 context()->Plug(r0); | 1533 context()->Plug(r0); |
| 1492 break; | 1534 break; |
| 1493 } | 1535 } |
| 1494 } | 1536 } |
| 1495 context()->Plug(var); | 1537 context()->Plug(var); |
| 1496 break; | 1538 break; |
| 1497 } | 1539 } |
| 1498 | 1540 |
| 1499 case Variable::LOOKUP: { | 1541 case Variable::LOOKUP: { |
| (...skipping 22 matching lines...) Expand all Loading... |
| 1522 // r3 = literal index | 1564 // r3 = literal index |
| 1523 // r2 = RegExp pattern | 1565 // r2 = RegExp pattern |
| 1524 // r1 = RegExp flags | 1566 // r1 = RegExp flags |
| 1525 // r0 = RegExp literal clone | 1567 // r0 = RegExp literal clone |
| 1526 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 1568 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 1527 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); | 1569 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); |
| 1528 int literal_offset = | 1570 int literal_offset = |
| 1529 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; | 1571 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; |
| 1530 __ ldr(r5, FieldMemOperand(r4, literal_offset)); | 1572 __ ldr(r5, FieldMemOperand(r4, literal_offset)); |
| 1531 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); | 1573 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); |
| 1532 __ cmp(r5, ip); | 1574 __ cmpeq(r5, ip); |
| 1533 __ b(ne, &materialized); | 1575 __ bf_near(&materialized); |
| 1534 | 1576 |
| 1535 // Create regexp literal using runtime function. | 1577 // Create regexp literal using runtime function. |
| 1536 // Result will be in r0. | 1578 // Result will be in r0. |
| 1537 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); | 1579 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); |
| 1538 __ mov(r2, Operand(expr->pattern())); | 1580 __ mov(r2, Operand(expr->pattern())); |
| 1539 __ mov(r1, Operand(expr->flags())); | 1581 __ mov(r1, Operand(expr->flags())); |
| 1540 __ Push(r4, r3, r2, r1); | 1582 __ Push(r4, r3, r2, r1); |
| 1541 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); | 1583 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); |
| 1542 __ mov(r5, r0); | 1584 __ mov(r5, r0); |
| 1543 | 1585 |
| 1544 __ bind(&materialized); | 1586 __ bind(&materialized); |
| 1545 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; | 1587 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; |
| 1546 Label allocated, runtime_allocate; | 1588 Label allocated, runtime_allocate; |
| 1547 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); | 1589 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT); |
| 1548 __ jmp(&allocated); | 1590 __ jmp_near(&allocated); |
| 1549 | 1591 |
| 1550 __ bind(&runtime_allocate); | 1592 __ bind(&runtime_allocate); |
| 1551 __ push(r5); | 1593 __ push(r5); |
| 1552 __ mov(r0, Operand(Smi::FromInt(size))); | 1594 __ mov(r0, Operand(Smi::FromInt(size))); |
| 1553 __ push(r0); | 1595 __ push(r0); |
| 1554 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); | 1596 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); |
| 1555 __ pop(r5); | 1597 __ pop(r5); |
| 1556 | 1598 |
| 1557 __ bind(&allocated); | 1599 __ bind(&allocated); |
| 1558 // After this, registers are used as follows: | 1600 // After this, registers are used as follows: |
| (...skipping 364 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1923 | 1965 |
| 1924 Register scratch1 = r2; | 1966 Register scratch1 = r2; |
| 1925 Register scratch2 = r3; | 1967 Register scratch2 = r3; |
| 1926 | 1968 |
| 1927 // Get the arguments. | 1969 // Get the arguments. |
| 1928 Register left = r1; | 1970 Register left = r1; |
| 1929 Register right = r0; | 1971 Register right = r0; |
| 1930 __ pop(left); | 1972 __ pop(left); |
| 1931 | 1973 |
| 1932 // Perform combined smi check on both operands. | 1974 // Perform combined smi check on both operands. |
| 1933 __ orr(scratch1, left, Operand(right)); | 1975 __ orr(scratch1, left, right); |
| 1934 STATIC_ASSERT(kSmiTag == 0); | 1976 STATIC_ASSERT(kSmiTag == 0); |
| 1935 JumpPatchSite patch_site(masm_); | 1977 JumpPatchSite patch_site(masm_); |
| 1936 patch_site.EmitJumpIfSmi(scratch1, &smi_case); | 1978 patch_site.EmitJumpIfSmi(scratch1, &smi_case); |
| 1937 | 1979 |
| 1938 __ bind(&stub_call); | 1980 __ bind(&stub_call); |
| 1939 BinaryOpStub stub(op, mode); | 1981 BinaryOpStub stub(op, mode); |
| 1940 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, | 1982 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, |
| 1941 expr->BinaryOperationFeedbackId()); | 1983 expr->BinaryOperationFeedbackId()); |
| 1942 patch_site.EmitPatchInfo(); | 1984 patch_site.EmitPatchInfo(); |
| 1943 __ jmp(&done); | 1985 __ jmp(&done); |
| 1944 | 1986 |
| 1945 __ bind(&smi_case); | 1987 __ bind(&smi_case); |
| 1946 // Smi case. This code works the same way as the smi-smi case in the type | 1988 // Smi case. This code works the same way as the smi-smi case in the type |
| 1947 // recording binary operation stub, see | 1989 // recording binary operation stub, see |
| 1948 // BinaryOpStub::GenerateSmiSmiOperation for comments. | 1990 // BinaryOpStub::GenerateSmiSmiOperation for comments. |
| 1949 switch (op) { | 1991 switch (op) { |
| 1950 case Token::SAR: | 1992 case Token::SAR: |
| 1951 __ b(&stub_call); | 1993 __ b(&stub_call); |
| 1952 __ GetLeastBitsFromSmi(scratch1, right, 5); | 1994 __ GetLeastBitsFromSmi(scratch1, right, 5); |
| 1953 __ mov(right, Operand(left, ASR, scratch1)); | 1995 __ asr(right, left, scratch1); |
| 1954 __ bic(right, right, Operand(kSmiTagMask)); | 1996 __ bic(right, right, Operand(kSmiTagMask)); |
| 1955 break; | 1997 break; |
| 1956 case Token::SHL: { | 1998 case Token::SHL: { |
| 1957 __ b(&stub_call); | 1999 __ b(&stub_call); |
| 1958 __ SmiUntag(scratch1, left); | 2000 __ SmiUntag(scratch1, left); |
| 1959 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2001 __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 1960 __ mov(scratch1, Operand(scratch1, LSL, scratch2)); | 2002 __ lsl(scratch1, scratch1, scratch2); |
| 1961 __ add(scratch2, scratch1, Operand(0x40000000), SetCC); | 2003 __ add(scratch2, scratch1, Operand(0x40000000)); |
| 1962 __ b(mi, &stub_call); | 2004 __ cmpge(scratch2, Operand(0)); |
| 2005 __ b(f, &stub_call); |
| 1963 __ SmiTag(right, scratch1); | 2006 __ SmiTag(right, scratch1); |
| 1964 break; | 2007 break; |
| 1965 } | 2008 } |
| 1966 case Token::SHR: { | 2009 case Token::SHR: { |
| 1967 __ b(&stub_call); | 2010 __ b(&stub_call); |
| 1968 __ SmiUntag(scratch1, left); | 2011 __ SmiUntag(scratch1, left); |
| 1969 __ GetLeastBitsFromSmi(scratch2, right, 5); | 2012 __ GetLeastBitsFromSmi(scratch2, right, 5); |
| 1970 __ mov(scratch1, Operand(scratch1, LSR, scratch2)); | 2013 __ lsr(scratch1, scratch1, scratch2); |
| 1971 __ tst(scratch1, Operand(0xc0000000)); | 2014 __ tst(scratch1, Operand(0xc0000000)); |
| 1972 __ b(ne, &stub_call); | 2015 __ b(ne, &stub_call); |
| 1973 __ SmiTag(right, scratch1); | 2016 __ SmiTag(right, scratch1); |
| 1974 break; | 2017 break; |
| 1975 } | 2018 } |
| 1976 case Token::ADD: | 2019 case Token::ADD: |
| 1977 __ add(scratch1, left, Operand(right), SetCC); | 2020 __ addv(scratch1, left, right); |
| 1978 __ b(vs, &stub_call); | 2021 __ b(t, &stub_call); |
| 1979 __ mov(right, scratch1); | 2022 __ mov(right, scratch1); |
| 1980 break; | 2023 break; |
| 1981 case Token::SUB: | 2024 case Token::SUB: |
| 1982 __ sub(scratch1, left, Operand(right), SetCC); | 2025 __ subv(scratch1, left, right); |
| 1983 __ b(vs, &stub_call); | 2026 __ b(t, &stub_call); |
| 1984 __ mov(right, scratch1); | 2027 __ mov(right, scratch1); |
| 1985 break; | 2028 break; |
| 1986 case Token::MUL: { | 2029 case Token::MUL: { |
| 1987 __ SmiUntag(ip, right); | 2030 __ SmiUntag(ip, right); |
| 1988 __ smull(scratch1, scratch2, left, ip); | 2031 __ dmuls(scratch1, scratch2, left, ip); |
| 1989 __ mov(ip, Operand(scratch1, ASR, 31)); | 2032 __ asr(ip, scratch1, Operand(31)); |
| 1990 __ cmp(ip, Operand(scratch2)); | 2033 __ cmp(ip, scratch2); |
| 1991 __ b(ne, &stub_call); | 2034 __ b(ne, &stub_call); |
| 1992 __ cmp(scratch1, Operand(0)); | 2035 __ tst(scratch1, scratch1); |
| 1993 __ mov(right, Operand(scratch1), LeaveCC, ne); | 2036 __ mov(right, scratch1, ne); |
| 1994 __ b(ne, &done); | 2037 __ b(ne, &done); |
| 1995 __ add(scratch2, right, Operand(left), SetCC); | 2038 __ add(scratch2, right, left); |
| 1996 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl); | 2039 __ cmpge(scratch2, Operand(0)); |
| 1997 __ b(mi, &stub_call); | 2040 __ mov(right, Operand(Smi::FromInt(0)), t); |
| 2041 __ bf(&stub_call); |
| 1998 break; | 2042 break; |
| 1999 } | 2043 } |
| 2000 case Token::BIT_OR: | 2044 case Token::BIT_OR: |
| 2001 __ orr(right, left, Operand(right)); | 2045 __ orr(right, left, right); |
| 2002 break; | 2046 break; |
| 2003 case Token::BIT_AND: | 2047 case Token::BIT_AND: |
| 2004 __ and_(right, left, Operand(right)); | 2048 __ land(right, left, right); |
| 2005 break; | 2049 break; |
| 2006 case Token::BIT_XOR: | 2050 case Token::BIT_XOR: |
| 2007 __ eor(right, left, Operand(right)); | 2051 __ eor(right, left, right); |
| 2008 break; | 2052 break; |
| 2009 default: | 2053 default: |
| 2010 UNREACHABLE(); | 2054 UNREACHABLE(); |
| 2011 } | 2055 } |
| 2012 | 2056 |
| 2013 __ bind(&done); | 2057 __ bind(&done); |
| 2014 context()->Plug(r0); | 2058 context()->Plug(r0); |
| 2015 } | 2059 } |
| 2016 | 2060 |
| 2017 | 2061 |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2095 : isolate()->builtins()->StoreIC_Initialize_Strict(); | 2139 : isolate()->builtins()->StoreIC_Initialize_Strict(); |
| 2096 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); | 2140 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); |
| 2097 | 2141 |
| 2098 } else if (op == Token::INIT_CONST) { | 2142 } else if (op == Token::INIT_CONST) { |
| 2099 // Const initializers need a write barrier. | 2143 // Const initializers need a write barrier. |
| 2100 ASSERT(!var->IsParameter()); // No const parameters. | 2144 ASSERT(!var->IsParameter()); // No const parameters. |
| 2101 if (var->IsStackLocal()) { | 2145 if (var->IsStackLocal()) { |
| 2102 Label skip; | 2146 Label skip; |
| 2103 __ ldr(r1, StackOperand(var)); | 2147 __ ldr(r1, StackOperand(var)); |
| 2104 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); | 2148 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); |
| 2105 __ b(ne, &skip); | 2149 __ b(ne, &skip, Label::kNear); |
| 2106 __ str(result_register(), StackOperand(var)); | 2150 __ str(result_register(), StackOperand(var)); |
| 2107 __ bind(&skip); | 2151 __ bind(&skip); |
| 2108 } else { | 2152 } else { |
| 2109 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); | 2153 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); |
| 2110 // Like var declarations, const declarations are hoisted to function | 2154 // Like var declarations, const declarations are hoisted to function |
| 2111 // scope. However, unlike var initializers, const initializers are | 2155 // scope. However, unlike var initializers, const initializers are |
| 2112 // able to drill a hole to that function context, even from inside a | 2156 // able to drill a hole to that function context, even from inside a |
| 2113 // 'with' context. We thus bypass the normal static scope lookup for | 2157 // 'with' context. We thus bypass the normal static scope lookup for |
| 2114 // var->IsContextSlot(). | 2158 // var->IsContextSlot(). |
| 2115 __ push(r0); | 2159 __ push(r0); |
| 2116 __ mov(r0, Operand(var->name())); | 2160 __ mov(r0, Operand(var->name())); |
| 2117 __ Push(cp, r0); // Context and name. | 2161 __ Push(cp, r0); // Context and name. |
| 2118 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); | 2162 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); |
| 2119 } | 2163 } |
| 2120 | 2164 |
| 2121 } else if (var->mode() == LET && op != Token::INIT_LET) { | 2165 } else if (var->mode() == LET && op != Token::INIT_LET) { |
| 2122 // Non-initializing assignment to let variable needs a write barrier. | 2166 // Non-initializing assignment to let variable needs a write barrier. |
| 2123 if (var->IsLookupSlot()) { | 2167 if (var->IsLookupSlot()) { |
| 2124 __ push(r0); // Value. | 2168 __ push(r0); // Value. |
| 2125 __ mov(r1, Operand(var->name())); | 2169 __ mov(r1, Operand(var->name())); |
| 2126 __ mov(r0, Operand(Smi::FromInt(language_mode()))); | 2170 __ mov(r0, Operand(Smi::FromInt(language_mode()))); |
| 2127 __ Push(cp, r1, r0); // Context, name, strict mode. | 2171 __ Push(cp, r1, r0); // Context, name, strict mode. |
| 2128 __ CallRuntime(Runtime::kStoreContextSlot, 4); | 2172 __ CallRuntime(Runtime::kStoreContextSlot, 4); |
| 2129 } else { | 2173 } else { |
| 2130 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); | 2174 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); |
| 2131 Label assign; | 2175 Label assign; |
| 2132 MemOperand location = VarOperand(var, r1); | 2176 MemOperand location = VarOperand(var, r1); |
| 2133 __ ldr(r3, location); | 2177 __ ldr(r3, location); |
| 2134 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); | 2178 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); |
| 2135 __ b(ne, &assign); | 2179 __ b(ne, &assign, Label::kNear); |
| 2136 __ mov(r3, Operand(var->name())); | 2180 __ mov(r3, Operand(var->name())); |
| 2137 __ push(r3); | 2181 __ push(r3); |
| 2138 __ CallRuntime(Runtime::kThrowReferenceError, 1); | 2182 __ CallRuntime(Runtime::kThrowReferenceError, 1); |
| 2139 // Perform the assignment. | 2183 // Perform the assignment. |
| 2140 __ bind(&assign); | 2184 __ bind(&assign); |
| 2141 __ str(result_register(), location); | 2185 __ str(result_register(), location); |
| 2142 if (var->IsContextSlot()) { | 2186 if (var->IsContextSlot()) { |
| 2143 // RecordWrite may destroy all its register arguments. | 2187 // RecordWrite may destroy all its register arguments. |
| 2144 __ mov(r3, result_register()); | 2188 __ mov(r3, result_register()); |
| 2145 int offset = Context::SlotOffset(var->index()); | 2189 int offset = Context::SlotOffset(var->index()); |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2235 EmitKeyedPropertyLoad(expr); | 2279 EmitKeyedPropertyLoad(expr); |
| 2236 context()->Plug(r0); | 2280 context()->Plug(r0); |
| 2237 } | 2281 } |
| 2238 } | 2282 } |
| 2239 | 2283 |
| 2240 | 2284 |
| 2241 void FullCodeGenerator::CallIC(Handle<Code> code, | 2285 void FullCodeGenerator::CallIC(Handle<Code> code, |
| 2242 RelocInfo::Mode rmode, | 2286 RelocInfo::Mode rmode, |
| 2243 TypeFeedbackId ast_id) { | 2287 TypeFeedbackId ast_id) { |
| 2244 ic_total_count_++; | 2288 ic_total_count_++; |
| 2245 // All calls must have a predictable size in full-codegen code to ensure that | 2289 __ Call(code, rmode, ast_id); |
| 2246 // the debugger can patch them correctly. | |
| 2247 __ Call(code, rmode, ast_id, al, NEVER_INLINE_TARGET_ADDRESS); | |
| 2248 } | 2290 } |
| 2249 | 2291 |
| 2250 void FullCodeGenerator::EmitCallWithIC(Call* expr, | 2292 void FullCodeGenerator::EmitCallWithIC(Call* expr, |
| 2251 Handle<Object> name, | 2293 Handle<Object> name, |
| 2252 RelocInfo::Mode mode) { | 2294 RelocInfo::Mode mode) { |
| 2253 // Code common for calls using the IC. | 2295 // Code common for calls using the IC. |
| 2254 ZoneList<Expression*>* args = expr->arguments(); | 2296 ZoneList<Expression*>* args = expr->arguments(); |
| 2255 int arg_count = args->length(); | 2297 int arg_count = args->length(); |
| 2256 { PreservePositionScope scope(masm()->positions_recorder()); | 2298 { PreservePositionScope scope(masm()->positions_recorder()); |
| 2257 for (int i = 0; i < arg_count; i++) { | 2299 for (int i = 0; i < arg_count; i++) { |
| (...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2435 __ mov(r2, Operand(proxy->name())); | 2477 __ mov(r2, Operand(proxy->name())); |
| 2436 __ push(r2); | 2478 __ push(r2); |
| 2437 __ CallRuntime(Runtime::kLoadContextSlot, 2); | 2479 __ CallRuntime(Runtime::kLoadContextSlot, 2); |
| 2438 __ Push(r0, r1); // Function, receiver. | 2480 __ Push(r0, r1); // Function, receiver. |
| 2439 | 2481 |
| 2440 // If fast case code has been generated, emit code to push the | 2482 // If fast case code has been generated, emit code to push the |
| 2441 // function and receiver and have the slow path jump around this | 2483 // function and receiver and have the slow path jump around this |
| 2442 // code. | 2484 // code. |
| 2443 if (done.is_linked()) { | 2485 if (done.is_linked()) { |
| 2444 Label call; | 2486 Label call; |
| 2445 __ b(&call); | 2487 __ b_near(&call); |
| 2446 __ bind(&done); | 2488 __ bind(&done); |
| 2447 // Push function. | 2489 // Push function. |
| 2448 __ push(r0); | 2490 __ push(r0); |
| 2449 // The receiver is implicitly the global receiver. Indicate this | 2491 // The receiver is implicitly the global receiver. Indicate this |
| 2450 // by passing the hole to the call function stub. | 2492 // by passing the hole to the call function stub. |
| 2451 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); | 2493 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex); |
| 2452 __ push(r1); | 2494 __ push(r1); |
| 2453 __ bind(&call); | 2495 __ bind(&call); |
| 2454 } | 2496 } |
| 2455 | 2497 |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2587 __ JumpIfSmi(r0, if_false); | 2629 __ JumpIfSmi(r0, if_false); |
| 2588 __ LoadRoot(ip, Heap::kNullValueRootIndex); | 2630 __ LoadRoot(ip, Heap::kNullValueRootIndex); |
| 2589 __ cmp(r0, ip); | 2631 __ cmp(r0, ip); |
| 2590 __ b(eq, if_true); | 2632 __ b(eq, if_true); |
| 2591 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); | 2633 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 2592 // Undetectable objects behave like undefined when tested with typeof. | 2634 // Undetectable objects behave like undefined when tested with typeof. |
| 2593 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); | 2635 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset)); |
| 2594 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | 2636 __ tst(r1, Operand(1 << Map::kIsUndetectable)); |
| 2595 __ b(ne, if_false); | 2637 __ b(ne, if_false); |
| 2596 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); | 2638 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset)); |
| 2597 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2639 __ cmpge(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 2598 __ b(lt, if_false); | 2640 __ bf(if_false); |
| 2599 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); | 2641 __ cmpgt(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); |
| 2600 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2642 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2601 Split(le, if_true, if_false, fall_through); | 2643 Split(ne, if_true, if_false, fall_through); |
| 2602 | 2644 |
| 2603 context()->Plug(if_true, if_false); | 2645 context()->Plug(if_true, if_false); |
| 2604 } | 2646 } |
| 2605 | 2647 |
| 2606 | 2648 |
| 2607 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { | 2649 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { |
| 2608 ZoneList<Expression*>* args = expr->arguments(); | 2650 ZoneList<Expression*>* args = expr->arguments(); |
| 2609 ASSERT(args->length() == 1); | 2651 ASSERT(args->length() == 1); |
| 2610 | 2652 |
| 2611 VisitForAccumulatorValue(args->at(0)); | 2653 VisitForAccumulatorValue(args->at(0)); |
| 2612 | 2654 |
| 2613 Label materialize_true, materialize_false; | 2655 Label materialize_true, materialize_false; |
| 2614 Label* if_true = NULL; | 2656 Label* if_true = NULL; |
| 2615 Label* if_false = NULL; | 2657 Label* if_false = NULL; |
| 2616 Label* fall_through = NULL; | 2658 Label* fall_through = NULL; |
| 2617 context()->PrepareTest(&materialize_true, &materialize_false, | 2659 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2618 &if_true, &if_false, &fall_through); | 2660 &if_true, &if_false, &fall_through); |
| 2619 | 2661 |
| 2620 __ JumpIfSmi(r0, if_false); | 2662 __ JumpIfSmi(r0, if_false); |
| 2621 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); | 2663 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE, ge); |
| 2622 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2664 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2623 Split(ge, if_true, if_false, fall_through); | 2665 Split(t, if_true, if_false, fall_through); |
| 2624 | 2666 |
| 2625 context()->Plug(if_true, if_false); | 2667 context()->Plug(if_true, if_false); |
| 2626 } | 2668 } |
| 2627 | 2669 |
| 2628 | 2670 |
| 2629 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { | 2671 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { |
| 2630 ZoneList<Expression*>* args = expr->arguments(); | 2672 ZoneList<Expression*>* args = expr->arguments(); |
| 2631 ASSERT(args->length() == 1); | 2673 ASSERT(args->length() == 1); |
| 2632 | 2674 |
| 2633 VisitForAccumulatorValue(args->at(0)); | 2675 VisitForAccumulatorValue(args->at(0)); |
| (...skipping 23 matching lines...) Expand all Loading... |
| 2657 | 2699 |
| 2658 VisitForAccumulatorValue(args->at(0)); | 2700 VisitForAccumulatorValue(args->at(0)); |
| 2659 | 2701 |
| 2660 Label materialize_true, materialize_false; | 2702 Label materialize_true, materialize_false; |
| 2661 Label* if_true = NULL; | 2703 Label* if_true = NULL; |
| 2662 Label* if_false = NULL; | 2704 Label* if_false = NULL; |
| 2663 Label* fall_through = NULL; | 2705 Label* fall_through = NULL; |
| 2664 context()->PrepareTest(&materialize_true, &materialize_false, | 2706 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2665 &if_true, &if_false, &fall_through); | 2707 &if_true, &if_false, &fall_through); |
| 2666 | 2708 |
| 2667 __ AssertNotSmi(r0); | 2709 if (generate_debug_code_) __ AbortIfSmi(r0); |
| 2668 | 2710 |
| 2669 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | 2711 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 2670 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); | 2712 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset)); |
| 2671 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); | 2713 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); |
| 2672 __ b(ne, if_true); | 2714 __ b(ne, if_true); |
| 2673 | 2715 |
| 2674 // Check for fast case object. Generate false result for slow case object. | 2716 // Check for fast case object. Generate false result for slow case object. |
| 2675 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); | 2717 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset)); |
| 2676 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); | 2718 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 2677 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); | 2719 __ LoadRoot(ip, Heap::kHashTableMapRootIndex); |
| 2678 __ cmp(r2, ip); | 2720 __ cmp(r2, ip); |
| 2679 __ b(eq, if_false); | 2721 __ b(eq, if_false); |
| 2680 | 2722 |
| 2681 // Look for valueOf symbol in the descriptor array, and indicate false if | 2723 // Look for valueOf symbol in the descriptor array, and indicate false if |
| 2682 // found. Since we omit an enumeration index check, if it is added via a | 2724 // found. Since we omit an enumeration index check, if it is added via a |
| 2683 // transition that shares its descriptor array, this is a false positive. | 2725 // transition that shares its descriptor array, this is a false positive. |
| 2684 Label entry, loop, done; | 2726 Label entry, loop, done; |
| 2685 | 2727 |
| 2686 // Skip loop if no descriptors are valid. | 2728 // Skip loop if no descriptors are valid. |
| 2687 __ NumberOfOwnDescriptors(r3, r1); | 2729 __ NumberOfOwnDescriptors(r3, r1); |
| 2688 __ cmp(r3, Operand(0)); | 2730 __ cmp(r3, Operand(0)); |
| 2689 __ b(eq, &done); | 2731 __ b(eq, &done); |
| 2690 | 2732 |
| 2691 __ LoadInstanceDescriptors(r1, r4); | 2733 __ LoadInstanceDescriptors(r1, r4, r2); |
| 2692 // r4: descriptor array. | 2734 // r4: descriptor array. |
| 2693 // r3: valid entries in the descriptor array. | 2735 // r3: valid entries in the descriptor array. |
| 2694 STATIC_ASSERT(kSmiTag == 0); | 2736 STATIC_ASSERT(kSmiTag == 0); |
| 2695 STATIC_ASSERT(kSmiTagSize == 1); | 2737 STATIC_ASSERT(kSmiTagSize == 1); |
| 2696 STATIC_ASSERT(kPointerSize == 4); | 2738 STATIC_ASSERT(kPointerSize == 4); |
| 2697 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); | 2739 __ mov(ip, Operand(DescriptorArray::kDescriptorSize)); |
| 2698 __ mul(r3, r3, ip); | 2740 __ mul(r3, r3, ip); |
| 2699 // Calculate location of the first key name. | 2741 // Calculate location of the first key name. |
| 2700 __ add(r4, r4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); | 2742 __ add(r4, |
| 2701 // Calculate the end of the descriptor array. | 2743 r4, |
| 2702 __ mov(r2, r4); | 2744 Operand(FixedArray::kHeaderSize - kHeapObjectTag + |
| 2703 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize)); | 2745 DescriptorArray::kFirstIndex * kPointerSize)); |
| 2704 | |
| 2705 // Loop through all the keys in the descriptor array. If one of these is the | 2746 // Loop through all the keys in the descriptor array. If one of these is the |
| 2706 // symbol valueOf the result is false. | 2747 // symbol valueOf the result is false. |
| 2707 // The use of ip to store the valueOf symbol asumes that it is not otherwise | 2748 // The use of ip to store the valueOf symbol asumes that it is not otherwise |
| 2708 // used in the loop below. | 2749 // used in the loop below. |
| 2709 __ mov(ip, Operand(FACTORY->value_of_symbol())); | 2750 __ mov(ip, Operand(FACTORY->value_of_symbol())); |
| 2710 __ jmp(&entry); | 2751 __ jmp_near(&entry); |
| 2711 __ bind(&loop); | 2752 __ bind(&loop); |
| 2712 __ ldr(r3, MemOperand(r4, 0)); | 2753 __ ldr(r3, MemOperand(r4, 0)); |
| 2713 __ cmp(r3, ip); | 2754 __ cmp(r3, ip); |
| 2714 __ b(eq, if_false); | 2755 __ b(eq, if_false); |
| 2715 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); | 2756 __ add(r4, r4, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); |
| 2716 __ bind(&entry); | 2757 __ bind(&entry); |
| 2717 __ cmp(r4, Operand(r2)); | 2758 __ cmp(r4, r2); |
| 2718 __ b(ne, &loop); | 2759 __ b(ne, &loop); |
| 2719 | 2760 |
| 2720 __ bind(&done); | 2761 __ bind(&done); |
| 2721 // If a valueOf property is not found on the object check that its | 2762 // If a valueOf property is not found on the object check that its |
| 2722 // prototype is the un-modified String prototype. If not result is false. | 2763 // prototype is the un-modified String prototype. If not result is false. |
| 2723 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); | 2764 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset)); |
| 2724 __ JumpIfSmi(r2, if_false); | 2765 __ JumpIfSmi(r2, if_false); |
| 2725 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); | 2766 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); |
| 2726 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); | 2767 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); |
| 2727 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); | 2768 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kNativeContextOffset)); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2748 VisitForAccumulatorValue(args->at(0)); | 2789 VisitForAccumulatorValue(args->at(0)); |
| 2749 | 2790 |
| 2750 Label materialize_true, materialize_false; | 2791 Label materialize_true, materialize_false; |
| 2751 Label* if_true = NULL; | 2792 Label* if_true = NULL; |
| 2752 Label* if_false = NULL; | 2793 Label* if_false = NULL; |
| 2753 Label* fall_through = NULL; | 2794 Label* fall_through = NULL; |
| 2754 context()->PrepareTest(&materialize_true, &materialize_false, | 2795 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2755 &if_true, &if_false, &fall_through); | 2796 &if_true, &if_false, &fall_through); |
| 2756 | 2797 |
| 2757 __ JumpIfSmi(r0, if_false); | 2798 __ JumpIfSmi(r0, if_false); |
| 2758 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE); | 2799 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE, eq); |
| 2759 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2800 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2760 Split(eq, if_true, if_false, fall_through); | 2801 Split(eq, if_true, if_false, fall_through); |
| 2761 | 2802 |
| 2762 context()->Plug(if_true, if_false); | 2803 context()->Plug(if_true, if_false); |
| 2763 } | 2804 } |
| 2764 | 2805 |
| 2765 | 2806 |
| 2766 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { | 2807 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { |
| 2767 ZoneList<Expression*>* args = expr->arguments(); | 2808 ZoneList<Expression*>* args = expr->arguments(); |
| 2768 ASSERT(args->length() == 1); | 2809 ASSERT(args->length() == 1); |
| 2769 | 2810 |
| 2770 VisitForAccumulatorValue(args->at(0)); | 2811 VisitForAccumulatorValue(args->at(0)); |
| 2771 | 2812 |
| 2772 Label materialize_true, materialize_false; | 2813 Label materialize_true, materialize_false; |
| 2773 Label* if_true = NULL; | 2814 Label* if_true = NULL; |
| 2774 Label* if_false = NULL; | 2815 Label* if_false = NULL; |
| 2775 Label* fall_through = NULL; | 2816 Label* fall_through = NULL; |
| 2776 context()->PrepareTest(&materialize_true, &materialize_false, | 2817 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2777 &if_true, &if_false, &fall_through); | 2818 &if_true, &if_false, &fall_through); |
| 2778 | 2819 |
| 2779 __ JumpIfSmi(r0, if_false); | 2820 __ JumpIfSmi(r0, if_false); |
| 2780 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE); | 2821 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE, eq); |
| 2781 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2822 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2782 Split(eq, if_true, if_false, fall_through); | 2823 Split(eq, if_true, if_false, fall_through); |
| 2783 | 2824 |
| 2784 context()->Plug(if_true, if_false); | 2825 context()->Plug(if_true, if_false); |
| 2785 } | 2826 } |
| 2786 | 2827 |
| 2787 | 2828 |
| 2788 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { | 2829 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { |
| 2789 ZoneList<Expression*>* args = expr->arguments(); | 2830 ZoneList<Expression*>* args = expr->arguments(); |
| 2790 ASSERT(args->length() == 1); | 2831 ASSERT(args->length() == 1); |
| 2791 | 2832 |
| 2792 VisitForAccumulatorValue(args->at(0)); | 2833 VisitForAccumulatorValue(args->at(0)); |
| 2793 | 2834 |
| 2794 Label materialize_true, materialize_false; | 2835 Label materialize_true, materialize_false; |
| 2795 Label* if_true = NULL; | 2836 Label* if_true = NULL; |
| 2796 Label* if_false = NULL; | 2837 Label* if_false = NULL; |
| 2797 Label* fall_through = NULL; | 2838 Label* fall_through = NULL; |
| 2798 context()->PrepareTest(&materialize_true, &materialize_false, | 2839 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2799 &if_true, &if_false, &fall_through); | 2840 &if_true, &if_false, &fall_through); |
| 2800 | 2841 |
| 2801 __ JumpIfSmi(r0, if_false); | 2842 __ JumpIfSmi(r0, if_false); |
| 2802 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); | 2843 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE, eq); |
| 2803 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2844 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2804 Split(eq, if_true, if_false, fall_through); | 2845 Split(eq, if_true, if_false, fall_through); |
| 2805 | 2846 |
| 2806 context()->Plug(if_true, if_false); | 2847 context()->Plug(if_true, if_false); |
| 2807 } | 2848 } |
| 2808 | 2849 |
| 2809 | 2850 |
| 2810 | 2851 |
| 2811 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { | 2852 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { |
| 2812 ASSERT(expr->arguments()->length() == 0); | 2853 ASSERT(expr->arguments()->length() == 0); |
| 2813 | 2854 |
| 2814 Label materialize_true, materialize_false; | 2855 Label materialize_true, materialize_false; |
| 2815 Label* if_true = NULL; | 2856 Label* if_true = NULL; |
| 2816 Label* if_false = NULL; | 2857 Label* if_false = NULL; |
| 2817 Label* fall_through = NULL; | 2858 Label* fall_through = NULL; |
| 2818 context()->PrepareTest(&materialize_true, &materialize_false, | 2859 context()->PrepareTest(&materialize_true, &materialize_false, |
| 2819 &if_true, &if_false, &fall_through); | 2860 &if_true, &if_false, &fall_through); |
| 2820 | 2861 |
| 2821 // Get the frame pointer for the calling frame. | 2862 // Get the frame pointer for the calling frame. |
| 2822 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 2863 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 2823 | 2864 |
| 2824 // Skip the arguments adaptor frame if it exists. | 2865 // Skip the arguments adaptor frame if it exists. |
| 2825 Label check_frame_marker; | 2866 Label check_frame_marker; |
| 2826 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); | 2867 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset)); |
| 2827 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 2868 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 2828 __ b(ne, &check_frame_marker); | 2869 __ b(ne, &check_frame_marker, Label::kNear); |
| 2829 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset)); | 2870 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset)); |
| 2830 | 2871 |
| 2831 // Check the marker in the calling frame. | 2872 // Check the marker in the calling frame. |
| 2832 __ bind(&check_frame_marker); | 2873 __ bind(&check_frame_marker); |
| 2833 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); | 2874 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset)); |
| 2834 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); | 2875 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT))); |
| 2835 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 2876 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 2836 Split(eq, if_true, if_false, fall_through); | 2877 Split(eq, if_true, if_false, fall_through); |
| 2837 | 2878 |
| 2838 context()->Plug(if_true, if_false); | 2879 context()->Plug(if_true, if_false); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2881 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { | 2922 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { |
| 2882 ASSERT(expr->arguments()->length() == 0); | 2923 ASSERT(expr->arguments()->length() == 0); |
| 2883 Label exit; | 2924 Label exit; |
| 2884 // Get the number of formal parameters. | 2925 // Get the number of formal parameters. |
| 2885 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); | 2926 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); |
| 2886 | 2927 |
| 2887 // Check if the calling frame is an arguments adaptor frame. | 2928 // Check if the calling frame is an arguments adaptor frame. |
| 2888 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | 2929 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 2889 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); | 2930 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); |
| 2890 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | 2931 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 2891 __ b(ne, &exit); | 2932 __ b(ne, &exit, Label::kNear); |
| 2892 | 2933 |
| 2893 // Arguments adaptor case: Read the arguments length from the | 2934 // Arguments adaptor case: Read the arguments length from the |
| 2894 // adaptor frame. | 2935 // adaptor frame. |
| 2895 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); | 2936 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 2896 | 2937 |
| 2897 __ bind(&exit); | 2938 __ bind(&exit); |
| 2898 context()->Plug(r0); | 2939 context()->Plug(r0); |
| 2899 } | 2940 } |
| 2900 | 2941 |
| 2901 | 2942 |
| 2902 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { | 2943 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { |
| 2903 ZoneList<Expression*>* args = expr->arguments(); | 2944 ZoneList<Expression*>* args = expr->arguments(); |
| 2904 ASSERT(args->length() == 1); | 2945 ASSERT(args->length() == 1); |
| 2905 Label done, null, function, non_function_constructor; | 2946 Label done, null, function, non_function_constructor; |
| 2906 | 2947 |
| 2907 VisitForAccumulatorValue(args->at(0)); | 2948 VisitForAccumulatorValue(args->at(0)); |
| 2908 | 2949 |
| 2909 // If the object is a smi, we return null. | 2950 // If the object is a smi, we return null. |
| 2910 __ JumpIfSmi(r0, &null); | 2951 __ JumpIfSmi(r0, &null, Label::kNear); |
| 2911 | 2952 |
| 2912 // Check that the object is a JS object but take special care of JS | 2953 // Check that the object is a JS object but take special care of JS |
| 2913 // functions to make sure they have 'Function' as their class. | 2954 // functions to make sure they have 'Function' as their class. |
| 2914 // Assume that there are only two callable types, and one of them is at | 2955 // Assume that there are only two callable types, and one of them is at |
| 2915 // either end of the type range for JS object types. Saves extra comparisons. | 2956 // either end of the type range for JS object types. Saves extra comparisons. |
| 2916 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | 2957 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); |
| 2917 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE); | 2958 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE, ge); |
| 2918 // Map is now in r0. | 2959 // Map is now in r0. |
| 2919 __ b(lt, &null); | 2960 __ bf_near(&null); |
| 2920 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == | 2961 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == |
| 2921 FIRST_SPEC_OBJECT_TYPE + 1); | 2962 FIRST_SPEC_OBJECT_TYPE + 1); |
| 2922 __ b(eq, &function); | 2963 __ b(eq, &function); |
| 2923 | 2964 |
| 2924 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); | 2965 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE)); |
| 2925 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == | 2966 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == |
| 2926 LAST_SPEC_OBJECT_TYPE - 1); | 2967 LAST_SPEC_OBJECT_TYPE - 1); |
| 2927 __ b(eq, &function); | 2968 __ b(eq, &function); |
| 2928 // Assume that there is no larger type. | 2969 // Assume that there is no larger type. |
| 2929 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); | 2970 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); |
| 2930 | 2971 |
| 2931 // Check if the constructor in the map is a JS function. | 2972 // Check if the constructor in the map is a JS function. |
| 2932 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); | 2973 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset)); |
| 2933 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); | 2974 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE, eq); |
| 2934 __ b(ne, &non_function_constructor); | 2975 __ b(ne, &non_function_constructor, Label::kNear); |
| 2935 | 2976 |
| 2936 // r0 now contains the constructor function. Grab the | 2977 // r0 now contains the constructor function. Grab the |
| 2937 // instance class name from there. | 2978 // instance class name from there. |
| 2938 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); | 2979 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset)); |
| 2939 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); | 2980 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset)); |
| 2940 __ b(&done); | 2981 __ b_near(&done); |
| 2941 | 2982 |
| 2942 // Functions have class 'Function'. | 2983 // Functions have class 'Function'. |
| 2943 __ bind(&function); | 2984 __ bind(&function); |
| 2944 __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex); | 2985 __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex); |
| 2945 __ jmp(&done); | 2986 __ jmp_near(&done); |
| 2946 | 2987 |
| 2947 // Objects with a non-function constructor have class 'Object'. | 2988 // Objects with a non-function constructor have class 'Object'. |
| 2948 __ bind(&non_function_constructor); | 2989 __ bind(&non_function_constructor); |
| 2949 __ LoadRoot(r0, Heap::kObject_symbolRootIndex); | 2990 __ LoadRoot(r0, Heap::kObject_symbolRootIndex); |
| 2950 __ jmp(&done); | 2991 __ jmp_near(&done); |
| 2951 | 2992 |
| 2952 // Non-JS objects have class null. | 2993 // Non-JS objects have class null. |
| 2953 __ bind(&null); | 2994 __ bind(&null); |
| 2954 __ LoadRoot(r0, Heap::kNullValueRootIndex); | 2995 __ LoadRoot(r0, Heap::kNullValueRootIndex); |
| 2955 | 2996 |
| 2956 // All done. | 2997 // All done. |
| 2957 __ bind(&done); | 2998 __ bind(&done); |
| 2958 | 2999 |
| 2959 context()->Plug(r0); | 3000 context()->Plug(r0); |
| 2960 } | 3001 } |
| (...skipping 21 matching lines...) Expand all Loading... |
| 2982 } | 3023 } |
| 2983 | 3024 |
| 2984 | 3025 |
| 2985 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) { | 3026 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) { |
| 2986 ASSERT(expr->arguments()->length() == 0); | 3027 ASSERT(expr->arguments()->length() == 0); |
| 2987 Label slow_allocate_heapnumber; | 3028 Label slow_allocate_heapnumber; |
| 2988 Label heapnumber_allocated; | 3029 Label heapnumber_allocated; |
| 2989 | 3030 |
| 2990 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); | 3031 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex); |
| 2991 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber); | 3032 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber); |
| 2992 __ jmp(&heapnumber_allocated); | 3033 __ jmp_near(&heapnumber_allocated); |
| 2993 | 3034 |
| 2994 __ bind(&slow_allocate_heapnumber); | 3035 __ bind(&slow_allocate_heapnumber); |
| 2995 // Allocate a heap number. | 3036 // Allocate a heap number. |
| 2996 __ CallRuntime(Runtime::kNumberAlloc, 0); | 3037 __ CallRuntime(Runtime::kNumberAlloc, 0); |
| 2997 __ mov(r4, Operand(r0)); | 3038 __ mov(r4, r0); |
| 2998 | 3039 |
| 2999 __ bind(&heapnumber_allocated); | 3040 __ bind(&heapnumber_allocated); |
| 3000 | 3041 |
| 3001 // Convert 32 random bits in r0 to 0.(32 random bits) in a double | 3042 // Convert 32 random bits in r0 to 0.(32 random bits) in a double |
| 3002 // by computing: | 3043 // by computing: |
| 3003 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). | 3044 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). |
| 3004 if (CpuFeatures::IsSupported(VFP2)) { | 3045 if (CpuFeatures::IsSupported(FPU)) { |
| 3046 __ push(r4); |
| 3005 __ PrepareCallCFunction(1, r0); | 3047 __ PrepareCallCFunction(1, r0); |
| 3006 __ ldr(r0, | 3048 __ ldr(r4, |
| 3007 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX)); | 3049 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX)); |
| 3008 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset)); | 3050 __ ldr(r4, FieldMemOperand(r4, GlobalObject::kNativeContextOffset)); |
| 3009 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); | 3051 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); |
| 3052 __ pop(r4); |
| 3010 | 3053 |
| 3011 CpuFeatures::Scope scope(VFP2); | |
| 3012 // 0x41300000 is the top half of 1.0 x 2^20 as a double. | 3054 // 0x41300000 is the top half of 1.0 x 2^20 as a double. |
| 3013 // Create this constant using mov/orr to avoid PC relative load. | 3055 // Create this constant using mov/orr to avoid PC relative load. |
| 3014 __ mov(r1, Operand(0x41000000)); | 3056 __ mov(r1, Operand(0x41000000)); |
| 3015 __ orr(r1, r1, Operand(0x300000)); | 3057 __ orr(r1, r1, Operand(0x300000)); |
| 3016 // Move 0x41300000xxxxxxxx (x = random bits) to VFP. | 3058 // Move 0x41300000xxxxxxxx (x = random bits) to VFP. |
| 3017 __ vmov(d7, r0, r1); | 3059 __ movd(dr2, r0, r1); |
| 3018 // Move 0x4130000000000000 to VFP. | 3060 // Move 0x4130000000000000 to VFP. |
| 3019 __ mov(r0, Operand(0, RelocInfo::NONE)); | 3061 __ mov(r0, Operand(0, RelocInfo::NONE)); |
| 3020 __ vmov(d8, r0, r1); | 3062 __ movd(dr4, r0, r1); |
| 3021 // Subtract and store the result in the heap number. | 3063 // Subtract and store the result in the heap number. |
| 3022 __ vsub(d7, d7, d8); | 3064 __ fsub(dr2, dr4); |
| 3023 __ sub(r0, r4, Operand(kHeapObjectTag)); | 3065 __ sub(r0, r4, Operand(kHeapObjectTag)); |
| 3024 __ vstr(d7, r0, HeapNumber::kValueOffset); | 3066 __ dstr(dr2, MemOperand(r0, HeapNumber::kValueOffset)); |
| 3025 __ mov(r0, r4); | 3067 __ mov(r0, r4); |
| 3026 } else { | 3068 } else { |
| 3027 __ PrepareCallCFunction(2, r0); | 3069 __ PrepareCallCFunction(2, r0); |
| 3028 __ ldr(r1, | 3070 __ ldr(r5, |
| 3029 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX)); | 3071 ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX)); |
| 3030 __ mov(r0, Operand(r4)); | 3072 __ ldr(r5, FieldMemOperand(r5, GlobalObject::kNativeContextOffset)); |
| 3031 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kNativeContextOffset)); | |
| 3032 __ CallCFunction( | 3073 __ CallCFunction( |
| 3033 ExternalReference::fill_heap_number_with_random_function(isolate()), 2); | 3074 ExternalReference::fill_heap_number_with_random_function(isolate()), 2); |
| 3034 } | 3075 } |
| 3035 | 3076 |
| 3036 context()->Plug(r0); | 3077 context()->Plug(r0); |
| 3037 } | 3078 } |
| 3038 | 3079 |
| 3039 | 3080 |
| 3040 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { | 3081 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { |
| 3041 // Load the arguments on the stack and call the stub. | 3082 // Load the arguments on the stack and call the stub. |
| (...skipping 22 matching lines...) Expand all Loading... |
| 3064 } | 3105 } |
| 3065 | 3106 |
| 3066 | 3107 |
| 3067 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { | 3108 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { |
| 3068 ZoneList<Expression*>* args = expr->arguments(); | 3109 ZoneList<Expression*>* args = expr->arguments(); |
| 3069 ASSERT(args->length() == 1); | 3110 ASSERT(args->length() == 1); |
| 3070 VisitForAccumulatorValue(args->at(0)); // Load the object. | 3111 VisitForAccumulatorValue(args->at(0)); // Load the object. |
| 3071 | 3112 |
| 3072 Label done; | 3113 Label done; |
| 3073 // If the object is a smi return the object. | 3114 // If the object is a smi return the object. |
| 3074 __ JumpIfSmi(r0, &done); | 3115 __ JumpIfSmi(r0, &done, Label::kNear); |
| 3075 // If the object is not a value type, return the object. | 3116 // If the object is not a value type, return the object. |
| 3076 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE); | 3117 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE, eq); |
| 3077 __ b(ne, &done); | 3118 __ bf_near(&done); |
| 3078 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset)); | 3119 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset)); |
| 3079 | 3120 |
| 3080 __ bind(&done); | 3121 __ bind(&done); |
| 3081 context()->Plug(r0); | 3122 context()->Plug(r0); |
| 3082 } | 3123 } |
| 3083 | 3124 |
| 3084 | 3125 |
| 3085 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { | 3126 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { |
| 3086 ZoneList<Expression*>* args = expr->arguments(); | 3127 ZoneList<Expression*>* args = expr->arguments(); |
| 3087 ASSERT(args->length() == 2); | 3128 ASSERT(args->length() == 2); |
| 3088 ASSERT_NE(NULL, args->at(1)->AsLiteral()); | 3129 ASSERT_NE(NULL, args->at(1)->AsLiteral()); |
| 3089 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle())); | 3130 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle())); |
| 3090 | 3131 |
| 3091 VisitForAccumulatorValue(args->at(0)); // Load the object. | 3132 VisitForAccumulatorValue(args->at(0)); // Load the object. |
| 3092 | 3133 |
| 3093 Label runtime, done, not_date_object; | 3134 Label runtime, done, not_date_object; |
| 3094 Register object = r0; | 3135 Register object = r0; |
| 3095 Register result = r0; | 3136 Register result = r0; |
| 3096 Register scratch0 = r9; | 3137 Register scratch0 = r9; |
| 3097 Register scratch1 = r1; | 3138 Register scratch1 = r1; |
| 3098 | 3139 |
| 3099 __ JumpIfSmi(object, ¬_date_object); | 3140 __ JumpIfSmi(object, ¬_date_object); |
| 3100 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE); | 3141 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE, eq); |
| 3101 __ b(ne, ¬_date_object); | 3142 __ bf(¬_date_object); |
| 3102 | 3143 |
| 3103 if (index->value() == 0) { | 3144 if (index->value() == 0) { |
| 3104 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); | 3145 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset)); |
| 3105 __ jmp(&done); | 3146 __ jmp(&done); |
| 3106 } else { | 3147 } else { |
| 3107 if (index->value() < JSDate::kFirstUncachedField) { | 3148 if (index->value() < JSDate::kFirstUncachedField) { |
| 3108 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); | 3149 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); |
| 3109 __ mov(scratch1, Operand(stamp)); | 3150 __ mov(scratch1, Operand(stamp)); |
| 3110 __ ldr(scratch1, MemOperand(scratch1)); | 3151 __ ldr(scratch1, MemOperand(scratch1)); |
| 3111 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); | 3152 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); |
| 3112 __ cmp(scratch1, scratch0); | 3153 __ cmp(scratch1, scratch0); |
| 3113 __ b(ne, &runtime); | 3154 __ b(ne, &runtime); |
| 3114 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + | 3155 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset + |
| 3115 kPointerSize * index->value())); | 3156 kPointerSize * index->value())); |
| 3116 __ jmp(&done); | 3157 __ jmp(&done); |
| 3117 } | 3158 } |
| 3118 __ bind(&runtime); | 3159 __ bind(&runtime); |
| 3160 // TODO(STM): take care of the ABI |
| 3119 __ PrepareCallCFunction(2, scratch1); | 3161 __ PrepareCallCFunction(2, scratch1); |
| 3120 __ mov(r1, Operand(index)); | 3162 __ mov(r5, Operand(index)); |
| 3163 __ mov(r4, r0); |
| 3121 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); | 3164 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); |
| 3122 __ jmp(&done); | 3165 __ jmp(&done); |
| 3123 } | 3166 } |
| 3124 | 3167 |
| 3125 __ bind(¬_date_object); | 3168 __ bind(¬_date_object); |
| 3126 __ CallRuntime(Runtime::kThrowNotDateError, 0); | 3169 __ CallRuntime(Runtime::kThrowNotDateError, 0); |
| 3127 __ bind(&done); | 3170 __ bind(&done); |
| 3128 context()->Plug(r0); | 3171 context()->Plug(r0); |
| 3129 } | 3172 } |
| 3130 | 3173 |
| 3131 | 3174 |
| 3132 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { | 3175 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { |
| 3133 // Load the arguments on the stack and call the runtime function. | 3176 // Load the arguments on the stack and call the runtime function. |
| 3134 ZoneList<Expression*>* args = expr->arguments(); | 3177 ZoneList<Expression*>* args = expr->arguments(); |
| 3135 ASSERT(args->length() == 2); | 3178 ASSERT(args->length() == 2); |
| 3136 VisitForStackValue(args->at(0)); | 3179 VisitForStackValue(args->at(0)); |
| 3137 VisitForStackValue(args->at(1)); | 3180 VisitForStackValue(args->at(1)); |
| 3138 if (CpuFeatures::IsSupported(VFP2)) { | 3181 if (CpuFeatures::IsSupported(FPU)) { |
| 3139 MathPowStub stub(MathPowStub::ON_STACK); | 3182 MathPowStub stub(MathPowStub::ON_STACK); |
| 3140 __ CallStub(&stub); | 3183 __ CallStub(&stub); |
| 3141 } else { | 3184 } else { |
| 3142 __ CallRuntime(Runtime::kMath_pow, 2); | 3185 __ CallRuntime(Runtime::kMath_pow, 2); |
| 3143 } | 3186 } |
| 3144 context()->Plug(r0); | 3187 context()->Plug(r0); |
| 3145 } | 3188 } |
| 3146 | 3189 |
| 3147 | 3190 |
| 3148 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { | 3191 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { |
| 3149 ZoneList<Expression*>* args = expr->arguments(); | 3192 ZoneList<Expression*>* args = expr->arguments(); |
| 3150 ASSERT(args->length() == 2); | 3193 ASSERT(args->length() == 2); |
| 3151 VisitForStackValue(args->at(0)); // Load the object. | 3194 VisitForStackValue(args->at(0)); // Load the object. |
| 3152 VisitForAccumulatorValue(args->at(1)); // Load the value. | 3195 VisitForAccumulatorValue(args->at(1)); // Load the value. |
| 3153 __ pop(r1); // r0 = value. r1 = object. | 3196 __ pop(r1); // r0 = value. r1 = object. |
| 3154 | 3197 |
| 3155 Label done; | 3198 Label done; |
| 3156 // If the object is a smi, return the value. | 3199 // If the object is a smi, return the value. |
| 3157 __ JumpIfSmi(r1, &done); | 3200 __ JumpIfSmi(r1, &done); |
| 3158 | 3201 |
| 3159 // If the object is not a value type, return the value. | 3202 // If the object is not a value type, return the value. |
| 3160 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE); | 3203 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE, eq); |
| 3161 __ b(ne, &done); | 3204 __ b(ne, &done); |
| 3162 | 3205 |
| 3163 // Store the value. | 3206 // Store the value. |
| 3164 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); | 3207 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset)); |
| 3165 // Update the write barrier. Save the value as it will be | 3208 // Update the write barrier. Save the value as it will be |
| 3166 // overwritten by the write barrier code and is needed afterward. | 3209 // overwritten by the write barrier code and is needed afterward. |
| 3167 __ mov(r2, r0); | 3210 __ mov(r2, r0); |
| 3168 __ RecordWriteField( | 3211 __ RecordWriteField( |
| 3169 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); | 3212 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs); |
| 3170 | 3213 |
| (...skipping 10 matching lines...) Expand all Loading... |
| 3181 | 3224 |
| 3182 NumberToStringStub stub; | 3225 NumberToStringStub stub; |
| 3183 __ CallStub(&stub); | 3226 __ CallStub(&stub); |
| 3184 context()->Plug(r0); | 3227 context()->Plug(r0); |
| 3185 } | 3228 } |
| 3186 | 3229 |
| 3187 | 3230 |
| 3188 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { | 3231 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { |
| 3189 ZoneList<Expression*>* args = expr->arguments(); | 3232 ZoneList<Expression*>* args = expr->arguments(); |
| 3190 ASSERT(args->length() == 1); | 3233 ASSERT(args->length() == 1); |
| 3234 |
| 3191 VisitForAccumulatorValue(args->at(0)); | 3235 VisitForAccumulatorValue(args->at(0)); |
| 3192 | 3236 |
| 3193 Label done; | 3237 Label done; |
| 3194 StringCharFromCodeGenerator generator(r0, r1); | 3238 StringCharFromCodeGenerator generator(r0, r1); |
| 3195 generator.GenerateFast(masm_); | 3239 generator.GenerateFast(masm_); |
| 3196 __ jmp(&done); | 3240 __ jmp(&done); |
| 3197 | 3241 |
| 3198 NopRuntimeCallHelper call_helper; | 3242 NopRuntimeCallHelper call_helper; |
| 3199 generator.GenerateSlow(masm_, call_helper); | 3243 generator.GenerateSlow(masm_, call_helper); |
| 3200 | 3244 |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3383 | 3427 |
| 3384 int arg_count = args->length() - 2; // 2 ~ receiver and function. | 3428 int arg_count = args->length() - 2; // 2 ~ receiver and function. |
| 3385 for (int i = 0; i < arg_count + 1; i++) { | 3429 for (int i = 0; i < arg_count + 1; i++) { |
| 3386 VisitForStackValue(args->at(i)); | 3430 VisitForStackValue(args->at(i)); |
| 3387 } | 3431 } |
| 3388 VisitForAccumulatorValue(args->last()); // Function. | 3432 VisitForAccumulatorValue(args->last()); // Function. |
| 3389 | 3433 |
| 3390 Label runtime, done; | 3434 Label runtime, done; |
| 3391 // Check for non-function argument (including proxy). | 3435 // Check for non-function argument (including proxy). |
| 3392 __ JumpIfSmi(r0, &runtime); | 3436 __ JumpIfSmi(r0, &runtime); |
| 3393 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE); | 3437 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE, eq); |
| 3394 __ b(ne, &runtime); | 3438 __ bf(&runtime); |
| 3395 | 3439 |
| 3396 // InvokeFunction requires the function in r1. Move it in there. | 3440 // InvokeFunction requires the function in r1. Move it in there. |
| 3397 __ mov(r1, result_register()); | 3441 __ mov(r1, result_register()); |
| 3398 ParameterCount count(arg_count); | 3442 ParameterCount count(arg_count); |
| 3399 __ InvokeFunction(r1, count, CALL_FUNCTION, | 3443 __ InvokeFunction(r1, count, CALL_FUNCTION, |
| 3400 NullCallWrapper(), CALL_AS_METHOD); | 3444 NullCallWrapper(), CALL_AS_METHOD); |
| 3401 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3445 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3402 __ jmp(&done); | 3446 __ jmp(&done); |
| 3403 | 3447 |
| 3404 __ bind(&runtime); | 3448 __ bind(&runtime); |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3448 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); | 3492 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); |
| 3449 | 3493 |
| 3450 | 3494 |
| 3451 Label done, not_found; | 3495 Label done, not_found; |
| 3452 // tmp now holds finger offset as a smi. | 3496 // tmp now holds finger offset as a smi. |
| 3453 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); | 3497 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); |
| 3454 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); | 3498 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); |
| 3455 // r2 now holds finger offset as a smi. | 3499 // r2 now holds finger offset as a smi. |
| 3456 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3500 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3457 // r3 now points to the start of fixed array elements. | 3501 // r3 now points to the start of fixed array elements. |
| 3458 __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex)); | 3502 __ lsl(r2, r2, Operand(kPointerSizeLog2 - kSmiTagSize)); |
| 3503 __ add(r3, r3, r2); |
| 3504 __ ldr(r2, MemOperand(r3)); |
| 3459 // Note side effect of PreIndex: r3 now points to the key of the pair. | 3505 // Note side effect of PreIndex: r3 now points to the key of the pair. |
| 3460 __ cmp(key, r2); | 3506 __ cmp(key, r2); |
| 3461 __ b(ne, ¬_found); | 3507 __ b(ne, ¬_found, Label::kNear); |
| 3462 | 3508 |
| 3463 __ ldr(r0, MemOperand(r3, kPointerSize)); | 3509 __ ldr(r0, MemOperand(r3, kPointerSize)); |
| 3464 __ b(&done); | 3510 __ b_near(&done); |
| 3465 | 3511 |
| 3466 __ bind(¬_found); | 3512 __ bind(¬_found); |
| 3467 // Call runtime to perform the lookup. | 3513 // Call runtime to perform the lookup. |
| 3468 __ Push(cache, key); | 3514 __ Push(cache, key); |
| 3469 __ CallRuntime(Runtime::kGetFromCache, 2); | 3515 __ CallRuntime(Runtime::kGetFromCache, 2); |
| 3470 | 3516 |
| 3471 __ bind(&done); | 3517 __ bind(&done); |
| 3472 context()->Plug(r0); | 3518 context()->Plug(r0); |
| 3473 } | 3519 } |
| 3474 | 3520 |
| 3475 | 3521 |
| 3476 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) { | 3522 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) { |
| 3477 ZoneList<Expression*>* args = expr->arguments(); | 3523 ZoneList<Expression*>* args = expr->arguments(); |
| 3478 ASSERT_EQ(2, args->length()); | 3524 ASSERT_EQ(2, args->length()); |
| 3479 | 3525 |
| 3480 Register right = r0; | 3526 Register right = r0; |
| 3481 Register left = r1; | 3527 Register left = r1; |
| 3482 Register tmp = r2; | 3528 Register tmp = r2; |
| 3483 Register tmp2 = r3; | 3529 Register tmp2 = r3; |
| 3484 | 3530 |
| 3485 VisitForStackValue(args->at(0)); | 3531 VisitForStackValue(args->at(0)); |
| 3486 VisitForAccumulatorValue(args->at(1)); | 3532 VisitForAccumulatorValue(args->at(1)); |
| 3487 __ pop(left); | 3533 __ pop(left); |
| 3488 | 3534 |
| 3489 Label done, fail, ok; | 3535 Label done, fail, ok; |
| 3490 __ cmp(left, Operand(right)); | 3536 __ cmp(left, right); |
| 3491 __ b(eq, &ok); | 3537 __ b(eq, &ok, Label::kNear); |
| 3492 // Fail if either is a non-HeapObject. | 3538 // Fail if either is a non-HeapObject. |
| 3493 __ and_(tmp, left, Operand(right)); | 3539 __ land(tmp, left, right); |
| 3494 __ JumpIfSmi(tmp, &fail); | 3540 __ JumpIfSmi(tmp, &fail, Label::kNear); |
| 3495 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset)); | 3541 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset)); |
| 3496 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset)); | 3542 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset)); |
| 3497 __ cmp(tmp2, Operand(JS_REGEXP_TYPE)); | 3543 __ cmp(tmp2, Operand(JS_REGEXP_TYPE)); |
| 3498 __ b(ne, &fail); | 3544 __ b(ne, &fail, Label::kNear); |
| 3499 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); | 3545 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); |
| 3500 __ cmp(tmp, Operand(tmp2)); | 3546 __ cmp(tmp, tmp2); |
| 3501 __ b(ne, &fail); | 3547 __ b(ne, &fail, Label::kNear); |
| 3502 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset)); | 3548 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset)); |
| 3503 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset)); | 3549 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset)); |
| 3504 __ cmp(tmp, tmp2); | 3550 __ cmp(tmp, tmp2); |
| 3505 __ b(eq, &ok); | 3551 __ b(eq, &ok, Label::kNear); |
| 3506 __ bind(&fail); | 3552 __ bind(&fail); |
| 3507 __ LoadRoot(r0, Heap::kFalseValueRootIndex); | 3553 __ LoadRoot(r0, Heap::kFalseValueRootIndex); |
| 3508 __ jmp(&done); | 3554 __ jmp_near(&done); |
| 3509 __ bind(&ok); | 3555 __ bind(&ok); |
| 3510 __ LoadRoot(r0, Heap::kTrueValueRootIndex); | 3556 __ LoadRoot(r0, Heap::kTrueValueRootIndex); |
| 3511 __ bind(&done); | 3557 __ bind(&done); |
| 3512 | 3558 |
| 3513 context()->Plug(r0); | 3559 context()->Plug(r0); |
| 3514 } | 3560 } |
| 3515 | 3561 |
| 3516 | 3562 |
| 3517 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { | 3563 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { |
| 3518 ZoneList<Expression*>* args = expr->arguments(); | 3564 ZoneList<Expression*>* args = expr->arguments(); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 3532 | 3578 |
| 3533 context()->Plug(if_true, if_false); | 3579 context()->Plug(if_true, if_false); |
| 3534 } | 3580 } |
| 3535 | 3581 |
| 3536 | 3582 |
| 3537 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { | 3583 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { |
| 3538 ZoneList<Expression*>* args = expr->arguments(); | 3584 ZoneList<Expression*>* args = expr->arguments(); |
| 3539 ASSERT(args->length() == 1); | 3585 ASSERT(args->length() == 1); |
| 3540 VisitForAccumulatorValue(args->at(0)); | 3586 VisitForAccumulatorValue(args->at(0)); |
| 3541 | 3587 |
| 3542 __ AssertString(r0); | 3588 __ AbortIfNotString(r0); |
| 3589 |
| 3543 | 3590 |
| 3544 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); | 3591 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset)); |
| 3545 __ IndexFromHash(r0, r0); | 3592 __ IndexFromHash(r0, r0); |
| 3546 | 3593 |
| 3547 context()->Plug(r0); | 3594 context()->Plug(r0); |
| 3548 } | 3595 } |
| 3549 | 3596 |
| 3550 | 3597 |
| 3551 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { | 3598 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { |
| 3552 Label bailout, done, one_char_separator, long_separator, | 3599 Label bailout, done, one_char_separator, long_separator, |
| (...skipping 17 matching lines...) Expand all Loading... |
| 3570 Register element = r5; | 3617 Register element = r5; |
| 3571 Register elements_end = r6; | 3618 Register elements_end = r6; |
| 3572 Register scratch1 = r7; | 3619 Register scratch1 = r7; |
| 3573 Register scratch2 = r9; | 3620 Register scratch2 = r9; |
| 3574 | 3621 |
| 3575 // Separator operand is on the stack. | 3622 // Separator operand is on the stack. |
| 3576 __ pop(separator); | 3623 __ pop(separator); |
| 3577 | 3624 |
| 3578 // Check that the array is a JSArray. | 3625 // Check that the array is a JSArray. |
| 3579 __ JumpIfSmi(array, &bailout); | 3626 __ JumpIfSmi(array, &bailout); |
| 3580 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE); | 3627 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE, eq); |
| 3581 __ b(ne, &bailout); | 3628 __ b(ne, &bailout); |
| 3582 | 3629 |
| 3583 // Check that the array has fast elements. | 3630 // Check that the array has fast elements. |
| 3584 __ CheckFastElements(scratch1, scratch2, &bailout); | 3631 __ CheckFastElements(scratch1, scratch2, &bailout); |
| 3585 | 3632 |
| 3586 // If the array has length zero, return the empty string. | 3633 // If the array has length zero, return the empty string. |
| 3587 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); | 3634 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); |
| 3588 __ SmiUntag(array_length, SetCC); | 3635 __ SmiUntag(array_length); |
| 3589 __ b(ne, &non_trivial_array); | 3636 __ tst(array_length, array_length); |
| 3637 __ b(ne, &non_trivial_array, Label::kNear); |
| 3590 __ LoadRoot(r0, Heap::kEmptyStringRootIndex); | 3638 __ LoadRoot(r0, Heap::kEmptyStringRootIndex); |
| 3591 __ b(&done); | 3639 __ b(&done); |
| 3592 | 3640 |
| 3593 __ bind(&non_trivial_array); | 3641 __ bind(&non_trivial_array); |
| 3594 | 3642 |
| 3595 // Get the FixedArray containing array's elements. | 3643 // Get the FixedArray containing array's elements. |
| 3596 elements = array; | 3644 elements = array; |
| 3597 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); | 3645 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset)); |
| 3598 array = no_reg; // End of array's live range. | 3646 array = no_reg; // End of array's live range. |
| 3599 | 3647 |
| 3600 // Check that all array elements are sequential ASCII strings, and | 3648 // Check that all array elements are sequential ASCII strings, and |
| 3601 // accumulate the sum of their lengths, as a smi-encoded value. | 3649 // accumulate the sum of their lengths, as a smi-encoded value. |
| 3602 __ mov(string_length, Operand(0)); | 3650 __ mov(string_length, Operand(0)); |
| 3603 __ add(element, | 3651 __ add(element, |
| 3604 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3652 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3605 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); | 3653 __ lsl(elements_end, array_length, Operand(kPointerSizeLog2)); |
| 3654 __ add(elements_end, element, elements_end); |
| 3606 // Loop condition: while (element < elements_end). | 3655 // Loop condition: while (element < elements_end). |
| 3607 // Live values in registers: | 3656 // Live values in registers: |
| 3608 // elements: Fixed array of strings. | 3657 // elements: Fixed array of strings. |
| 3609 // array_length: Length of the fixed array of strings (not smi) | 3658 // array_length: Length of the fixed array of strings (not smi) |
| 3610 // separator: Separator string | 3659 // separator: Separator string |
| 3611 // string_length: Accumulated sum of string lengths (smi). | 3660 // string_length: Accumulated sum of string lengths (smi). |
| 3612 // element: Current array element. | 3661 // element: Current array element. |
| 3613 // elements_end: Array end. | 3662 // elements_end: Array end. |
| 3614 if (generate_debug_code_) { | 3663 if (generate_debug_code_) { |
| 3615 __ cmp(array_length, Operand(0)); | 3664 __ cmpgt(array_length, Operand(0)); |
| 3616 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin"); | 3665 __ Assert(eq, "No empty arrays here in EmitFastAsciiArrayJoin"); |
| 3617 } | 3666 } |
| 3618 __ bind(&loop); | 3667 __ bind(&loop); |
| 3619 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); | 3668 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); |
| 3620 __ JumpIfSmi(string, &bailout); | 3669 __ JumpIfSmi(string, &bailout); |
| 3621 __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); | 3670 __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); |
| 3622 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 3671 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
| 3623 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); | 3672 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); |
| 3624 __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset)); | 3673 __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset)); |
| 3625 __ add(string_length, string_length, Operand(scratch1), SetCC); | 3674 __ addv(string_length, string_length, scratch1); |
| 3626 __ b(vs, &bailout); | 3675 __ b(t, &bailout); |
| 3627 __ cmp(element, elements_end); | 3676 __ cmpge(element, elements_end); |
| 3628 __ b(lt, &loop); | 3677 __ bf(&loop); |
| 3629 | 3678 |
| 3630 // If array_length is 1, return elements[0], a string. | 3679 // If array_length is 1, return elements[0], a string. |
| 3631 __ cmp(array_length, Operand(1)); | 3680 __ cmp(array_length, Operand(1)); |
| 3632 __ b(ne, ¬_size_one_array); | 3681 __ b(ne, ¬_size_one_array, Label::kNear); |
| 3633 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); | 3682 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize)); |
| 3634 __ b(&done); | 3683 __ b(&done); |
| 3635 | 3684 |
| 3636 __ bind(¬_size_one_array); | 3685 __ bind(¬_size_one_array); |
| 3637 | 3686 |
| 3638 // Live values in registers: | 3687 // Live values in registers: |
| 3639 // separator: Separator string | 3688 // separator: Separator string |
| 3640 // array_length: Length of the array. | 3689 // array_length: Length of the array. |
| 3641 // string_length: Sum of string lengths (smi). | 3690 // string_length: Sum of string lengths (smi). |
| 3642 // elements: FixedArray of strings. | 3691 // elements: FixedArray of strings. |
| 3643 | 3692 |
| 3644 // Check that the separator is a flat ASCII string. | 3693 // Check that the separator is a flat ASCII string. |
| 3645 __ JumpIfSmi(separator, &bailout); | 3694 __ JumpIfSmi(separator, &bailout); |
| 3646 __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); | 3695 __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); |
| 3647 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); | 3696 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
| 3648 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); | 3697 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); |
| 3649 | 3698 |
| 3650 // Add (separator length times array_length) - separator length to the | 3699 // Add (separator length times array_length) - separator length to the |
| 3651 // string_length to get the length of the result string. array_length is not | 3700 // string_length to get the length of the result string. array_length is not |
| 3652 // smi but the other values are, so the result is a smi | 3701 // smi but the other values are, so the result is a smi |
| 3653 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset)); | 3702 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset)); |
| 3654 __ sub(string_length, string_length, Operand(scratch1)); | 3703 __ sub(string_length, string_length, scratch1); |
| 3655 __ smull(scratch2, ip, array_length, scratch1); | 3704 __ dmuls(scratch2, ip, array_length, scratch1); |
| 3656 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are | 3705 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are |
| 3657 // zero. | 3706 // zero. |
| 3658 __ cmp(ip, Operand(0)); | 3707 __ cmp(ip, Operand(0)); |
| 3659 __ b(ne, &bailout); | 3708 __ b(ne, &bailout); |
| 3660 __ tst(scratch2, Operand(0x80000000)); | 3709 __ tst(scratch2, Operand(0x80000000)); |
| 3661 __ b(ne, &bailout); | 3710 __ b(ne, &bailout); |
| 3662 __ add(string_length, string_length, Operand(scratch2), SetCC); | 3711 __ addv(string_length, string_length, scratch2); |
| 3663 __ b(vs, &bailout); | 3712 __ b(t, &bailout); |
| 3664 __ SmiUntag(string_length); | 3713 __ SmiUntag(string_length); |
| 3665 | 3714 |
| 3666 // Get first element in the array to free up the elements register to be used | 3715 // Get first element in the array to free up the elements register to be used |
| 3667 // for the result. | 3716 // for the result. |
| 3668 __ add(element, | 3717 __ add(element, |
| 3669 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3718 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3670 result = elements; // End of live range for elements. | 3719 result = elements; // End of live range for elements. |
| 3671 elements = no_reg; | 3720 elements = no_reg; |
| 3672 // Live values in registers: | 3721 // Live values in registers: |
| 3673 // element: First array element | 3722 // element: First array element |
| 3674 // separator: Separator string | 3723 // separator: Separator string |
| 3675 // string_length: Length of result string (not smi) | 3724 // string_length: Length of result string (not smi) |
| 3676 // array_length: Length of the array. | 3725 // array_length: Length of the array. |
| 3677 __ AllocateAsciiString(result, | 3726 __ AllocateAsciiString(result, |
| 3678 string_length, | 3727 string_length, |
| 3679 scratch1, | 3728 scratch1, |
| 3680 scratch2, | 3729 scratch2, |
| 3681 elements_end, | 3730 elements_end, |
| 3682 &bailout); | 3731 &bailout); |
| 3683 // Prepare for looping. Set up elements_end to end of the array. Set | 3732 // Prepare for looping. Set up elements_end to end of the array. Set |
| 3684 // result_pos to the position of the result where to write the first | 3733 // result_pos to the position of the result where to write the first |
| 3685 // character. | 3734 // character. |
| 3686 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2)); | 3735 __ lsl(elements_end, array_length, Operand(kPointerSizeLog2)); |
| 3736 __ add(elements_end, element, elements_end); |
| 3687 result_pos = array_length; // End of live range for array_length. | 3737 result_pos = array_length; // End of live range for array_length. |
| 3688 array_length = no_reg; | 3738 array_length = no_reg; |
| 3689 __ add(result_pos, | 3739 __ add(result_pos, |
| 3690 result, | 3740 result, |
| 3691 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 3741 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
| 3692 | 3742 |
| 3693 // Check the length of the separator. | 3743 // Check the length of the separator. |
| 3694 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset)); | 3744 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset)); |
| 3695 __ cmp(scratch1, Operand(Smi::FromInt(1))); | 3745 __ cmpeq(scratch1, Operand(Smi::FromInt(1))); |
| 3696 __ b(eq, &one_char_separator); | 3746 __ bt_near(&one_char_separator); |
| 3697 __ b(gt, &long_separator); | 3747 __ cmpgt(scratch1, Operand(Smi::FromInt(1))); |
| 3748 __ bt(&long_separator); |
| 3698 | 3749 |
| 3699 // Empty separator case | 3750 // Empty separator case |
| 3700 __ bind(&empty_separator_loop); | 3751 __ bind(&empty_separator_loop); |
| 3701 // Live values in registers: | 3752 // Live values in registers: |
| 3702 // result_pos: the position to which we are currently copying characters. | 3753 // result_pos: the position to which we are currently copying characters. |
| 3703 // element: Current array element. | 3754 // element: Current array element. |
| 3704 // elements_end: Array end. | 3755 // elements_end: Array end. |
| 3705 | 3756 |
| 3706 // Copy next array element to the result. | 3757 // Copy next array element to the result. |
| 3707 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); | 3758 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); |
| 3708 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); | 3759 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); |
| 3709 __ SmiUntag(string_length); | 3760 __ SmiUntag(string_length); |
| 3710 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 3761 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
| 3711 __ CopyBytes(string, result_pos, string_length, scratch1); | 3762 __ CopyBytes(string, result_pos, string_length, scratch1); |
| 3712 __ cmp(element, elements_end); | 3763 __ cmpge(element, elements_end); |
| 3713 __ b(lt, &empty_separator_loop); // End while (element < elements_end). | 3764 __ bf(&empty_separator_loop); // End while (element < elements_end). |
| 3714 ASSERT(result.is(r0)); | 3765 ASSERT(result.is(r0)); |
| 3715 __ b(&done); | 3766 __ b(&done); |
| 3716 | 3767 |
| 3717 // One-character separator case | 3768 // One-character separator case |
| 3718 __ bind(&one_char_separator); | 3769 __ bind(&one_char_separator); |
| 3719 // Replace separator with its ASCII character value. | 3770 // Replace separator with its ASCII character value. |
| 3720 __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize)); | 3771 __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize)); |
| 3721 // Jump into the loop after the code that copies the separator, so the first | 3772 // Jump into the loop after the code that copies the separator, so the first |
| 3722 // element is not preceded by a separator | 3773 // element is not preceded by a separator |
| 3723 __ jmp(&one_char_separator_loop_entry); | 3774 __ jmp_near(&one_char_separator_loop_entry); |
| 3724 | 3775 |
| 3725 __ bind(&one_char_separator_loop); | 3776 __ bind(&one_char_separator_loop); |
| 3726 // Live values in registers: | 3777 // Live values in registers: |
| 3727 // result_pos: the position to which we are currently copying characters. | 3778 // result_pos: the position to which we are currently copying characters. |
| 3728 // element: Current array element. | 3779 // element: Current array element. |
| 3729 // elements_end: Array end. | 3780 // elements_end: Array end. |
| 3730 // separator: Single separator ASCII char (in lower byte). | 3781 // separator: Single separator ASCII char (in lower byte). |
| 3731 | 3782 |
| 3732 // Copy the separator character to the result. | 3783 // Copy the separator character to the result. |
| 3733 __ strb(separator, MemOperand(result_pos, 1, PostIndex)); | 3784 __ strb(separator, MemOperand(result_pos)); |
| 3785 __ add(result_pos, result_pos, Operand(1)); |
| 3734 | 3786 |
| 3735 // Copy next array element to the result. | 3787 // Copy next array element to the result. |
| 3736 __ bind(&one_char_separator_loop_entry); | 3788 __ bind(&one_char_separator_loop_entry); |
| 3737 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); | 3789 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); |
| 3738 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); | 3790 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); |
| 3739 __ SmiUntag(string_length); | 3791 __ SmiUntag(string_length); |
| 3740 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 3792 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
| 3741 __ CopyBytes(string, result_pos, string_length, scratch1); | 3793 __ CopyBytes(string, result_pos, string_length, scratch1); |
| 3742 __ cmp(element, elements_end); | 3794 __ cmpge(element, elements_end); |
| 3743 __ b(lt, &one_char_separator_loop); // End while (element < elements_end). | 3795 __ bf(&one_char_separator_loop); // End while (element < elements_end). |
| 3744 ASSERT(result.is(r0)); | 3796 ASSERT(result.is(r0)); |
| 3745 __ b(&done); | 3797 __ b(&done); |
| 3746 | 3798 |
| 3747 // Long separator case (separator is more than one character). Entry is at the | 3799 // Long separator case (separator is more than one character). Entry is at the |
| 3748 // label long_separator below. | 3800 // label long_separator below. |
| 3749 __ bind(&long_separator_loop); | 3801 __ bind(&long_separator_loop); |
| 3750 // Live values in registers: | 3802 // Live values in registers: |
| 3751 // result_pos: the position to which we are currently copying characters. | 3803 // result_pos: the position to which we are currently copying characters. |
| 3752 // element: Current array element. | 3804 // element: Current array element. |
| 3753 // elements_end: Array end. | 3805 // elements_end: Array end. |
| 3754 // separator: Separator string. | 3806 // separator: Separator string. |
| 3755 | 3807 |
| 3756 // Copy the separator to the result. | 3808 // Copy the separator to the result. |
| 3757 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); | 3809 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset)); |
| 3758 __ SmiUntag(string_length); | 3810 __ SmiUntag(string_length); |
| 3759 __ add(string, | 3811 __ add(string, |
| 3760 separator, | 3812 separator, |
| 3761 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 3813 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
| 3762 __ CopyBytes(string, result_pos, string_length, scratch1); | 3814 __ CopyBytes(string, result_pos, string_length, scratch1); |
| 3763 | 3815 |
| 3764 __ bind(&long_separator); | 3816 __ bind(&long_separator); |
| 3765 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); | 3817 __ ldr(string, MemOperand(element, kPointerSize, PostIndex)); |
| 3766 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); | 3818 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset)); |
| 3767 __ SmiUntag(string_length); | 3819 __ SmiUntag(string_length); |
| 3768 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); | 3820 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); |
| 3769 __ CopyBytes(string, result_pos, string_length, scratch1); | 3821 __ CopyBytes(string, result_pos, string_length, scratch1); |
| 3770 __ cmp(element, elements_end); | 3822 __ cmpge(element, elements_end); |
| 3771 __ b(lt, &long_separator_loop); // End while (element < elements_end). | 3823 __ bf(&long_separator_loop); // End while (element < elements_end). |
| 3772 ASSERT(result.is(r0)); | 3824 ASSERT(result.is(r0)); |
| 3773 __ b(&done); | 3825 __ b(&done); |
| 3774 | 3826 |
| 3775 __ bind(&bailout); | 3827 __ bind(&bailout); |
| 3776 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); | 3828 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
| 3777 __ bind(&done); | 3829 __ bind(&done); |
| 3778 context()->Plug(r0); | 3830 context()->Plug(r0); |
| 3779 } | 3831 } |
| 3780 | 3832 |
| 3781 | 3833 |
| (...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4053 } | 4105 } |
| 4054 } | 4106 } |
| 4055 | 4107 |
| 4056 | 4108 |
| 4057 // Inline smi case if we are in a loop. | 4109 // Inline smi case if we are in a loop. |
| 4058 Label stub_call, done; | 4110 Label stub_call, done; |
| 4059 JumpPatchSite patch_site(masm_); | 4111 JumpPatchSite patch_site(masm_); |
| 4060 | 4112 |
| 4061 int count_value = expr->op() == Token::INC ? 1 : -1; | 4113 int count_value = expr->op() == Token::INC ? 1 : -1; |
| 4062 if (ShouldInlineSmiCase(expr->op())) { | 4114 if (ShouldInlineSmiCase(expr->op())) { |
| 4063 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); | 4115 __ addv(r0, r0, Operand(Smi::FromInt(count_value))); |
| 4064 __ b(vs, &stub_call); | 4116 __ b(t, &stub_call); |
| 4065 // We could eliminate this smi check if we split the code at | 4117 // We could eliminate this smi check if we split the code at |
| 4066 // the first smi check before calling ToNumber. | 4118 // the first smi check before calling ToNumber. |
| 4067 patch_site.EmitJumpIfSmi(r0, &done); | 4119 patch_site.EmitJumpIfSmi(r0, &done); |
| 4068 | 4120 |
| 4069 __ bind(&stub_call); | 4121 __ bind(&stub_call); |
| 4070 // Call stub. Undo operation first. | 4122 // Call stub. Undo operation first. |
| 4071 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); | 4123 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); |
| 4072 } | 4124 } |
| 4073 __ mov(r1, Operand(Smi::FromInt(count_value))); | 4125 __ mov(r1, Operand(Smi::FromInt(count_value))); |
| 4074 | 4126 |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4193 | 4245 |
| 4194 if (check->Equals(isolate()->heap()->number_symbol())) { | 4246 if (check->Equals(isolate()->heap()->number_symbol())) { |
| 4195 __ JumpIfSmi(r0, if_true); | 4247 __ JumpIfSmi(r0, if_true); |
| 4196 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | 4248 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 4197 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); | 4249 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); |
| 4198 __ cmp(r0, ip); | 4250 __ cmp(r0, ip); |
| 4199 Split(eq, if_true, if_false, fall_through); | 4251 Split(eq, if_true, if_false, fall_through); |
| 4200 } else if (check->Equals(isolate()->heap()->string_symbol())) { | 4252 } else if (check->Equals(isolate()->heap()->string_symbol())) { |
| 4201 __ JumpIfSmi(r0, if_false); | 4253 __ JumpIfSmi(r0, if_false); |
| 4202 // Check for undetectable objects => false. | 4254 // Check for undetectable objects => false. |
| 4203 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE); | 4255 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE, ge); |
| 4204 __ b(ge, if_false); | 4256 __ bt(if_false); |
| 4205 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); | 4257 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); |
| 4206 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | 4258 __ tst(r1, Operand(1 << Map::kIsUndetectable)); |
| 4207 Split(eq, if_true, if_false, fall_through); | 4259 Split(eq, if_true, if_false, fall_through); |
| 4208 } else if (check->Equals(isolate()->heap()->boolean_symbol())) { | 4260 } else if (check->Equals(isolate()->heap()->boolean_symbol())) { |
| 4209 __ CompareRoot(r0, Heap::kTrueValueRootIndex); | 4261 __ CompareRoot(r0, Heap::kTrueValueRootIndex); |
| 4210 __ b(eq, if_true); | 4262 __ b(eq, if_true); |
| 4211 __ CompareRoot(r0, Heap::kFalseValueRootIndex); | 4263 __ CompareRoot(r0, Heap::kFalseValueRootIndex); |
| 4212 Split(eq, if_true, if_false, fall_through); | 4264 Split(eq, if_true, if_false, fall_through); |
| 4213 } else if (FLAG_harmony_typeof && | 4265 } else if (FLAG_harmony_typeof && |
| 4214 check->Equals(isolate()->heap()->null_symbol())) { | 4266 check->Equals(isolate()->heap()->null_symbol())) { |
| 4215 __ CompareRoot(r0, Heap::kNullValueRootIndex); | 4267 __ CompareRoot(r0, Heap::kNullValueRootIndex); |
| 4216 Split(eq, if_true, if_false, fall_through); | 4268 Split(eq, if_true, if_false, fall_through); |
| 4217 } else if (check->Equals(isolate()->heap()->undefined_symbol())) { | 4269 } else if (check->Equals(isolate()->heap()->undefined_symbol())) { |
| 4218 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); | 4270 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); |
| 4219 __ b(eq, if_true); | 4271 __ b(eq, if_true); |
| 4220 __ JumpIfSmi(r0, if_false); | 4272 __ JumpIfSmi(r0, if_false); |
| 4221 // Check for undetectable objects => true. | 4273 // Check for undetectable objects => true. |
| 4222 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); | 4274 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 4223 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); | 4275 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); |
| 4224 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | 4276 __ tst(r1, Operand(1 << Map::kIsUndetectable)); |
| 4225 Split(ne, if_true, if_false, fall_through); | 4277 Split(ne, if_true, if_false, fall_through); |
| 4226 | 4278 |
| 4227 } else if (check->Equals(isolate()->heap()->function_symbol())) { | 4279 } else if (check->Equals(isolate()->heap()->function_symbol())) { |
| 4228 __ JumpIfSmi(r0, if_false); | 4280 __ JumpIfSmi(r0, if_false); |
| 4229 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); | 4281 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); |
| 4230 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE); | 4282 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE, eq); |
| 4231 __ b(eq, if_true); | 4283 __ b(eq, if_true); |
| 4232 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); | 4284 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE)); |
| 4233 Split(eq, if_true, if_false, fall_through); | 4285 Split(eq, if_true, if_false, fall_through); |
| 4234 } else if (check->Equals(isolate()->heap()->object_symbol())) { | 4286 } else if (check->Equals(isolate()->heap()->object_symbol())) { |
| 4235 __ JumpIfSmi(r0, if_false); | 4287 __ JumpIfSmi(r0, if_false); |
| 4236 if (!FLAG_harmony_typeof) { | 4288 if (!FLAG_harmony_typeof) { |
| 4237 __ CompareRoot(r0, Heap::kNullValueRootIndex); | 4289 __ CompareRoot(r0, Heap::kNullValueRootIndex); |
| 4238 __ b(eq, if_true); | 4290 __ b(eq, if_true); |
| 4239 } | 4291 } |
| 4240 // Check for JS objects => true. | 4292 // Check for JS objects => true. |
| 4241 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); | 4293 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, ge); |
| 4242 __ b(lt, if_false); | 4294 __ bf(if_false); |
| 4243 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); | 4295 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE, gt); |
| 4244 __ b(gt, if_false); | 4296 __ bt(if_false); |
| 4245 // Check for undetectable objects => false. | 4297 // Check for undetectable objects => false. |
| 4246 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); | 4298 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset)); |
| 4247 __ tst(r1, Operand(1 << Map::kIsUndetectable)); | 4299 __ tst(r1, Operand(1 << Map::kIsUndetectable)); |
| 4248 Split(eq, if_true, if_false, fall_through); | 4300 Split(eq, if_true, if_false, fall_through); |
| 4249 } else { | 4301 } else { |
| 4250 if (if_false != fall_through) __ jmp(if_false); | 4302 if (if_false != fall_through) __ jmp(if_false); |
| 4251 } | 4303 } |
| 4252 context()->Plug(if_true, if_false); | 4304 context()->Plug(if_true, if_false); |
| 4253 } | 4305 } |
| 4254 | 4306 |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4317 case Token::INSTANCEOF: | 4369 case Token::INSTANCEOF: |
| 4318 default: | 4370 default: |
| 4319 UNREACHABLE(); | 4371 UNREACHABLE(); |
| 4320 } | 4372 } |
| 4321 __ pop(r1); | 4373 __ pop(r1); |
| 4322 | 4374 |
| 4323 bool inline_smi_code = ShouldInlineSmiCase(op); | 4375 bool inline_smi_code = ShouldInlineSmiCase(op); |
| 4324 JumpPatchSite patch_site(masm_); | 4376 JumpPatchSite patch_site(masm_); |
| 4325 if (inline_smi_code) { | 4377 if (inline_smi_code) { |
| 4326 Label slow_case; | 4378 Label slow_case; |
| 4327 __ orr(r2, r0, Operand(r1)); | 4379 __ orr(r2, r0, r1); |
| 4328 patch_site.EmitJumpIfNotSmi(r2, &slow_case); | 4380 patch_site.EmitJumpIfNotSmi(r2, &slow_case); |
| 4329 __ cmp(r1, r0); | 4381 Condition tmp_cond = cond; |
| 4330 Split(cond, if_true, if_false, NULL); | 4382 __ cmp(&tmp_cond, r1, r0); |
| 4383 Split(tmp_cond, if_true, if_false, NULL); |
| 4331 __ bind(&slow_case); | 4384 __ bind(&slow_case); |
| 4332 } | 4385 } |
| 4333 | 4386 |
| 4334 // Record position and call the compare IC. | 4387 // Record position and call the compare IC. |
| 4335 SetSourcePosition(expr->position()); | 4388 SetSourcePosition(expr->position()); |
| 4336 Handle<Code> ic = CompareIC::GetUninitialized(op); | 4389 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 4337 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId()); | 4390 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId()); |
| 4338 patch_site.EmitPatchInfo(); | 4391 patch_site.EmitPatchInfo(); |
| 4339 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); | 4392 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); |
| 4340 __ cmp(r0, Operand(0)); | 4393 __ cmp(&cond, r0, Operand(0)); |
| 4341 Split(cond, if_true, if_false, fall_through); | 4394 Split(cond, if_true, if_false, fall_through); |
| 4342 } | 4395 } |
| 4343 } | 4396 } |
| 4344 | 4397 |
| 4345 // Convert the result of the comparison into one expected for this | 4398 // Convert the result of the comparison into one expected for this |
| 4346 // expression's context. | 4399 // expression's context. |
| 4347 context()->Plug(if_true, if_false); | 4400 context()->Plug(if_true, if_false); |
| 4348 } | 4401 } |
| 4349 | 4402 |
| 4350 | 4403 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 4372 Heap::kUndefinedValueRootIndex : | 4425 Heap::kUndefinedValueRootIndex : |
| 4373 Heap::kNullValueRootIndex; | 4426 Heap::kNullValueRootIndex; |
| 4374 __ b(eq, if_true); | 4427 __ b(eq, if_true); |
| 4375 __ LoadRoot(r1, other_nil_value); | 4428 __ LoadRoot(r1, other_nil_value); |
| 4376 __ cmp(r0, r1); | 4429 __ cmp(r0, r1); |
| 4377 __ b(eq, if_true); | 4430 __ b(eq, if_true); |
| 4378 __ JumpIfSmi(r0, if_false); | 4431 __ JumpIfSmi(r0, if_false); |
| 4379 // It can be an undetectable object. | 4432 // It can be an undetectable object. |
| 4380 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | 4433 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); |
| 4381 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); | 4434 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset)); |
| 4382 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable)); | 4435 __ land(r1, r1, Operand(1 << Map::kIsUndetectable)); |
| 4383 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); | 4436 __ cmp(r1, Operand(1 << Map::kIsUndetectable)); |
| 4384 Split(eq, if_true, if_false, fall_through); | 4437 Split(eq, if_true, if_false, fall_through); |
| 4385 } | 4438 } |
| 4386 context()->Plug(if_true, if_false); | 4439 context()->Plug(if_true, if_false); |
| 4387 } | 4440 } |
| 4388 | 4441 |
| 4389 | 4442 |
| 4390 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { | 4443 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
| 4391 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 4444 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 4392 context()->Plug(r0); | 4445 context()->Plug(r0); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4437 | 4490 |
| 4438 | 4491 |
| 4439 // ---------------------------------------------------------------------------- | 4492 // ---------------------------------------------------------------------------- |
| 4440 // Non-local control flow support. | 4493 // Non-local control flow support. |
| 4441 | 4494 |
| 4442 void FullCodeGenerator::EnterFinallyBlock() { | 4495 void FullCodeGenerator::EnterFinallyBlock() { |
| 4443 ASSERT(!result_register().is(r1)); | 4496 ASSERT(!result_register().is(r1)); |
| 4444 // Store result register while executing finally block. | 4497 // Store result register while executing finally block. |
| 4445 __ push(result_register()); | 4498 __ push(result_register()); |
| 4446 // Cook return address in link register to stack (smi encoded Code* delta) | 4499 // Cook return address in link register to stack (smi encoded Code* delta) |
| 4447 __ sub(r1, lr, Operand(masm_->CodeObject())); | 4500 __ strpr(r1); |
| 4501 __ sub(r1, r1, Operand(masm_->CodeObject())); |
| 4448 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); | 4502 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); |
| 4449 STATIC_ASSERT(kSmiTag == 0); | 4503 STATIC_ASSERT(kSmiTag == 0); |
| 4450 __ add(r1, r1, Operand(r1)); // Convert to smi. | 4504 __ add(r1, r1, r1); // Convert to smi. |
| 4451 | 4505 |
| 4452 // Store result register while executing finally block. | 4506 // Store result register while executing finally block. |
| 4453 __ push(r1); | 4507 __ push(r1); |
| 4454 | 4508 |
| 4455 // Store pending message while executing finally block. | 4509 // Store pending message while executing finally block. |
| 4456 ExternalReference pending_message_obj = | 4510 ExternalReference pending_message_obj = |
| 4457 ExternalReference::address_of_pending_message_obj(isolate()); | 4511 ExternalReference::address_of_pending_message_obj(isolate()); |
| 4458 __ mov(ip, Operand(pending_message_obj)); | 4512 __ mov(ip, Operand(pending_message_obj)); |
| 4459 __ ldr(r1, MemOperand(ip)); | 4513 __ ldr(r1, MemOperand(ip)); |
| 4460 __ push(r1); | 4514 __ push(r1); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 4491 __ str(r1, MemOperand(ip)); | 4545 __ str(r1, MemOperand(ip)); |
| 4492 | 4546 |
| 4493 __ pop(r1); | 4547 __ pop(r1); |
| 4494 ExternalReference pending_message_obj = | 4548 ExternalReference pending_message_obj = |
| 4495 ExternalReference::address_of_pending_message_obj(isolate()); | 4549 ExternalReference::address_of_pending_message_obj(isolate()); |
| 4496 __ mov(ip, Operand(pending_message_obj)); | 4550 __ mov(ip, Operand(pending_message_obj)); |
| 4497 __ str(r1, MemOperand(ip)); | 4551 __ str(r1, MemOperand(ip)); |
| 4498 | 4552 |
| 4499 // Restore result register from stack. | 4553 // Restore result register from stack. |
| 4500 __ pop(r1); | 4554 __ pop(r1); |
| 4501 | |
| 4502 // Uncook return address and return. | 4555 // Uncook return address and return. |
| 4503 __ pop(result_register()); | 4556 __ pop(result_register()); |
| 4504 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); | 4557 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); |
| 4505 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. | 4558 __ asr(r1, r1, Operand(1)); // Un-smi-tag value. |
| 4506 __ add(pc, r1, Operand(masm_->CodeObject())); | 4559 __ add(r1, r1, Operand(masm_->CodeObject())); |
| 4560 __ jmp(r1); |
| 4507 } | 4561 } |
| 4508 | 4562 |
| 4509 | 4563 |
| 4510 #undef __ | 4564 #undef __ |
| 4511 | 4565 |
| 4512 #define __ ACCESS_MASM(masm()) | 4566 #define __ ACCESS_MASM(masm()) |
| 4513 | 4567 |
| 4514 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( | 4568 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( |
| 4515 int* stack_depth, | 4569 int* stack_depth, |
| 4516 int* context_length) { | 4570 int* context_length) { |
| 4517 // The macros used here must preserve the result register. | 4571 // The macros used here must preserve the result register. |
| 4518 | 4572 |
| 4519 // Because the handler block contains the context of the finally | 4573 // Because the handler block contains the context of the finally |
| 4520 // code, we can restore it directly from there for the finally code | 4574 // code, we can restore it directly from there for the finally code |
| 4521 // rather than iteratively unwinding contexts via their previous | 4575 // rather than iteratively unwinding contexts via their previous |
| 4522 // links. | 4576 // links. |
| 4523 __ Drop(*stack_depth); // Down to the handler block. | 4577 __ Drop(*stack_depth); // Down to the handler block. |
| 4524 if (*context_length > 0) { | 4578 if (*context_length > 0) { |
| 4525 // Restore the context to its dedicated register and the stack. | 4579 // Restore the context to its dedicated register and the stack. |
| 4526 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); | 4580 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); |
| 4527 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 4581 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 4528 } | 4582 } |
| 4529 __ PopTryHandler(); | 4583 __ PopTryHandler(); |
| 4530 __ bl(finally_entry_); | 4584 __ jsr(finally_entry_); |
| 4531 | 4585 |
| 4532 *stack_depth = 0; | 4586 *stack_depth = 0; |
| 4533 *context_length = 0; | 4587 *context_length = 0; |
| 4534 return previous_; | 4588 return previous_; |
| 4535 } | 4589 } |
| 4536 | 4590 |
| 4537 | 4591 |
| 4538 #undef __ | 4592 #undef __ |
| 4539 | 4593 |
| 4540 } } // namespace v8::internal | 4594 } } // namespace v8::internal |
| 4541 | 4595 |
| 4542 #endif // V8_TARGET_ARCH_ARM | 4596 #endif // V8_TARGET_ARCH_SH4 |
| OLD | NEW |