| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 403 | 403 |
| 404 int LCodeGen::ToInteger32(LConstantOperand* op) const { | 404 int LCodeGen::ToInteger32(LConstantOperand* op) const { |
| 405 Handle<Object> value = chunk_->LookupLiteral(op); | 405 Handle<Object> value = chunk_->LookupLiteral(op); |
| 406 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); | 406 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); |
| 407 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) == | 407 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) == |
| 408 value->Number()); | 408 value->Number()); |
| 409 return static_cast<int32_t>(value->Number()); | 409 return static_cast<int32_t>(value->Number()); |
| 410 } | 410 } |
| 411 | 411 |
| 412 | 412 |
| 413 double LCodeGen::ToDouble(LConstantOperand* op) const { |
| 414 Handle<Object> value = chunk_->LookupLiteral(op); |
| 415 return value->Number(); |
| 416 } |
| 417 |
| 418 |
| 413 Operand LCodeGen::ToOperand(LOperand* op) { | 419 Operand LCodeGen::ToOperand(LOperand* op) { |
| 414 if (op->IsConstantOperand()) { | 420 if (op->IsConstantOperand()) { |
| 415 LConstantOperand* const_op = LConstantOperand::cast(op); | 421 LConstantOperand* const_op = LConstantOperand::cast(op); |
| 416 Handle<Object> literal = chunk_->LookupLiteral(const_op); | 422 Handle<Object> literal = chunk_->LookupLiteral(const_op); |
| 417 Representation r = chunk_->LookupLiteralRepresentation(const_op); | 423 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 418 if (r.IsInteger32()) { | 424 if (r.IsInteger32()) { |
| 419 ASSERT(literal->IsNumber()); | 425 ASSERT(literal->IsNumber()); |
| 420 return Operand(static_cast<int32_t>(literal->Number())); | 426 return Operand(static_cast<int32_t>(literal->Number())); |
| 421 } else if (r.IsDouble()) { | 427 } else if (r.IsDouble()) { |
| 422 Abort("ToOperand Unsupported double immediate."); | 428 Abort("ToOperand Unsupported double immediate."); |
| (...skipping 1275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1698 break; | 1704 break; |
| 1699 case Token::IN: | 1705 case Token::IN: |
| 1700 case Token::INSTANCEOF: | 1706 case Token::INSTANCEOF: |
| 1701 default: | 1707 default: |
| 1702 UNREACHABLE(); | 1708 UNREACHABLE(); |
| 1703 } | 1709 } |
| 1704 return cond; | 1710 return cond; |
| 1705 } | 1711 } |
| 1706 | 1712 |
| 1707 | 1713 |
| 1708 void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) { | |
| 1709 __ cmp(ToRegister(left), ToRegister(right)); | |
| 1710 } | |
| 1711 | |
| 1712 | |
| 1713 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { | 1714 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { |
| 1714 LOperand* left = instr->InputAt(0); | 1715 LOperand* left = instr->InputAt(0); |
| 1715 LOperand* right = instr->InputAt(1); | 1716 LOperand* right = instr->InputAt(1); |
| 1716 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1717 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1717 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1718 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1719 Condition cond = TokenToCondition(instr->op(), false); |
| 1718 | 1720 |
| 1719 if (instr->is_double()) { | 1721 if (left->IsConstantOperand() && right->IsConstantOperand()) { |
| 1720 // Compare left and right as doubles and load the | 1722 // We can statically evaluate the comparison. |
| 1721 // resulting flags into the normal status register. | 1723 double left_val = ToDouble(LConstantOperand::cast(left)); |
| 1722 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right)); | 1724 double right_val = ToDouble(LConstantOperand::cast(right)); |
| 1723 // If a NaN is involved, i.e. the result is unordered (V set), | 1725 int next_block = |
| 1724 // jump to false block label. | 1726 EvalComparison(instr->op(), left_val, right_val) ? true_block |
| 1725 __ b(vs, chunk_->GetAssemblyLabel(false_block)); | 1727 : false_block; |
| 1728 EmitGoto(next_block); |
| 1726 } else { | 1729 } else { |
| 1727 EmitCmpI(left, right); | 1730 if (instr->is_double()) { |
| 1731 // Compare left and right operands as doubles and load the |
| 1732 // resulting flags into the normal status register. |
| 1733 __ VFPCompareAndSetFlags(ToDoubleRegister(left), ToDoubleRegister(right)); |
| 1734 // If a NaN is involved, i.e. the result is unordered (V set), |
| 1735 // jump to false block label. |
| 1736 __ b(vs, chunk_->GetAssemblyLabel(false_block)); |
| 1737 } else { |
| 1738 if (right->IsConstantOperand()) { |
| 1739 __ cmp(ToRegister(left), |
| 1740 Operand(ToInteger32(LConstantOperand::cast(right)))); |
| 1741 } else if (left->IsConstantOperand()) { |
| 1742 __ cmp(ToRegister(right), |
| 1743 Operand(ToInteger32(LConstantOperand::cast(left)))); |
| 1744 // We transposed the operands. Reverse the condition. |
| 1745 cond = ReverseCondition(cond); |
| 1746 } else { |
| 1747 __ cmp(ToRegister(left), ToRegister(right)); |
| 1748 } |
| 1749 } |
| 1750 EmitBranch(true_block, false_block, cond); |
| 1728 } | 1751 } |
| 1729 | |
| 1730 Condition cc = TokenToCondition(instr->op(), instr->is_double()); | |
| 1731 EmitBranch(true_block, false_block, cc); | |
| 1732 } | 1752 } |
| 1733 | 1753 |
| 1734 | 1754 |
| 1735 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { | 1755 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { |
| 1736 Register left = ToRegister(instr->InputAt(0)); | 1756 Register left = ToRegister(instr->InputAt(0)); |
| 1737 Register right = ToRegister(instr->InputAt(1)); | 1757 Register right = ToRegister(instr->InputAt(1)); |
| 1738 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 1758 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 1739 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 1759 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 1740 | 1760 |
| 1741 __ cmp(left, Operand(right)); | 1761 __ cmp(left, Operand(right)); |
| (...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2169 | 2189 |
| 2170 | 2190 |
| 2171 void LCodeGen::DoCmpT(LCmpT* instr) { | 2191 void LCodeGen::DoCmpT(LCmpT* instr) { |
| 2172 Token::Value op = instr->op(); | 2192 Token::Value op = instr->op(); |
| 2173 | 2193 |
| 2174 Handle<Code> ic = CompareIC::GetUninitialized(op); | 2194 Handle<Code> ic = CompareIC::GetUninitialized(op); |
| 2175 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2195 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2176 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined. | 2196 __ cmp(r0, Operand(0)); // This instruction also signals no smi code inlined. |
| 2177 | 2197 |
| 2178 Condition condition = ComputeCompareCondition(op); | 2198 Condition condition = ComputeCompareCondition(op); |
| 2179 if (op == Token::GT || op == Token::LTE) { | |
| 2180 condition = ReverseCondition(condition); | |
| 2181 } | |
| 2182 __ LoadRoot(ToRegister(instr->result()), | 2199 __ LoadRoot(ToRegister(instr->result()), |
| 2183 Heap::kTrueValueRootIndex, | 2200 Heap::kTrueValueRootIndex, |
| 2184 condition); | 2201 condition); |
| 2185 __ LoadRoot(ToRegister(instr->result()), | 2202 __ LoadRoot(ToRegister(instr->result()), |
| 2186 Heap::kFalseValueRootIndex, | 2203 Heap::kFalseValueRootIndex, |
| 2187 NegateCondition(condition)); | 2204 NegateCondition(condition)); |
| 2188 } | 2205 } |
| 2189 | 2206 |
| 2190 | 2207 |
| 2191 void LCodeGen::DoReturn(LReturn* instr) { | 2208 void LCodeGen::DoReturn(LReturn* instr) { |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2244 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); | 2261 FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); |
| 2245 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 2262 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 2246 __ cmp(scratch2, ip); | 2263 __ cmp(scratch2, ip); |
| 2247 DeoptimizeIf(eq, instr->environment()); | 2264 DeoptimizeIf(eq, instr->environment()); |
| 2248 } | 2265 } |
| 2249 | 2266 |
| 2250 // Store the value. | 2267 // Store the value. |
| 2251 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); | 2268 __ str(value, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); |
| 2252 | 2269 |
| 2253 // Cells are always in the remembered set. | 2270 // Cells are always in the remembered set. |
| 2254 __ RecordWriteField(scratch, | 2271 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2255 JSGlobalPropertyCell::kValueOffset, | 2272 HType type = instr->hydrogen()->value()->type(); |
| 2256 value, | 2273 SmiCheck check_needed = |
| 2257 scratch2, | 2274 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 2258 kLRHasBeenSaved, | 2275 __ RecordWriteField(scratch, |
| 2259 kSaveFPRegs, | 2276 JSGlobalPropertyCell::kValueOffset, |
| 2260 OMIT_REMEMBERED_SET); | 2277 value, |
| 2278 scratch2, |
| 2279 kLRHasBeenSaved, |
| 2280 kSaveFPRegs, |
| 2281 OMIT_REMEMBERED_SET, |
| 2282 check_needed); |
| 2283 } |
| 2261 } | 2284 } |
| 2262 | 2285 |
| 2263 | 2286 |
| 2264 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { | 2287 void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { |
| 2265 ASSERT(ToRegister(instr->global_object()).is(r1)); | 2288 ASSERT(ToRegister(instr->global_object()).is(r1)); |
| 2266 ASSERT(ToRegister(instr->value()).is(r0)); | 2289 ASSERT(ToRegister(instr->value()).is(r0)); |
| 2267 | 2290 |
| 2268 __ mov(r2, Operand(instr->name())); | 2291 __ mov(r2, Operand(instr->name())); |
| 2269 Handle<Code> ic = instr->strict_mode() | 2292 Handle<Code> ic = instr->strict_mode() |
| 2270 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 2293 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 2271 : isolate()->builtins()->StoreIC_Initialize(); | 2294 : isolate()->builtins()->StoreIC_Initialize(); |
| 2272 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); | 2295 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr); |
| 2273 } | 2296 } |
| 2274 | 2297 |
| 2275 | 2298 |
| 2276 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { | 2299 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { |
| 2277 Register context = ToRegister(instr->context()); | 2300 Register context = ToRegister(instr->context()); |
| 2278 Register result = ToRegister(instr->result()); | 2301 Register result = ToRegister(instr->result()); |
| 2279 __ ldr(result, ContextOperand(context, instr->slot_index())); | 2302 __ ldr(result, ContextOperand(context, instr->slot_index())); |
| 2280 } | 2303 } |
| 2281 | 2304 |
| 2282 | 2305 |
| 2283 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { | 2306 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { |
| 2284 Register context = ToRegister(instr->context()); | 2307 Register context = ToRegister(instr->context()); |
| 2285 Register value = ToRegister(instr->value()); | 2308 Register value = ToRegister(instr->value()); |
| 2286 MemOperand target = ContextOperand(context, instr->slot_index()); | 2309 MemOperand target = ContextOperand(context, instr->slot_index()); |
| 2287 __ str(value, target); | 2310 __ str(value, target); |
| 2288 if (instr->needs_write_barrier()) { | 2311 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 2312 HType type = instr->hydrogen()->value()->type(); |
| 2313 SmiCheck check_needed = |
| 2314 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 2289 __ RecordWriteContextSlot(context, | 2315 __ RecordWriteContextSlot(context, |
| 2290 target.offset(), | 2316 target.offset(), |
| 2291 value, | 2317 value, |
| 2292 scratch0(), | 2318 scratch0(), |
| 2293 kLRHasBeenSaved, | 2319 kLRHasBeenSaved, |
| 2294 kSaveFPRegs); | 2320 kSaveFPRegs, |
| 2321 EMIT_REMEMBERED_SET, |
| 2322 check_needed); |
| 2295 } | 2323 } |
| 2296 } | 2324 } |
| 2297 | 2325 |
| 2298 | 2326 |
| 2299 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { | 2327 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { |
| 2300 Register object = ToRegister(instr->InputAt(0)); | 2328 Register object = ToRegister(instr->InputAt(0)); |
| 2301 Register result = ToRegister(instr->result()); | 2329 Register result = ToRegister(instr->result()); |
| 2302 if (instr->hydrogen()->is_in_object()) { | 2330 if (instr->hydrogen()->is_in_object()) { |
| 2303 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset())); | 2331 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset())); |
| 2304 } else { | 2332 } else { |
| 2305 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 2333 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 2306 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset())); | 2334 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset())); |
| 2307 } | 2335 } |
| 2308 } | 2336 } |
| 2309 | 2337 |
| 2310 | 2338 |
| 2311 void LCodeGen::EmitLoadFieldOrConstantFunction(Register result, | 2339 void LCodeGen::EmitLoadFieldOrConstantFunction(Register result, |
| 2312 Register object, | 2340 Register object, |
| 2313 Handle<Map> type, | 2341 Handle<Map> type, |
| 2314 Handle<String> name) { | 2342 Handle<String> name) { |
| 2315 LookupResult lookup; | 2343 LookupResult lookup(isolate()); |
| 2316 type->LookupInDescriptors(NULL, *name, &lookup); | 2344 type->LookupInDescriptors(NULL, *name, &lookup); |
| 2317 ASSERT(lookup.IsProperty() && | 2345 ASSERT(lookup.IsProperty() && |
| 2318 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION)); | 2346 (lookup.type() == FIELD || lookup.type() == CONSTANT_FUNCTION)); |
| 2319 if (lookup.type() == FIELD) { | 2347 if (lookup.type() == FIELD) { |
| 2320 int index = lookup.GetLocalFieldIndexFromMap(*type); | 2348 int index = lookup.GetLocalFieldIndexFromMap(*type); |
| 2321 int offset = index * kPointerSize; | 2349 int offset = index * kPointerSize; |
| 2322 if (index < 0) { | 2350 if (index < 0) { |
| 2323 // Negative property indices are in-object properties, indexed | 2351 // Negative property indices are in-object properties, indexed |
| 2324 // from the end of the fixed part of the object. | 2352 // from the end of the fixed part of the object. |
| 2325 __ ldr(result, FieldMemOperand(object, offset + type->instance_size())); | 2353 __ ldr(result, FieldMemOperand(object, offset + type->instance_size())); |
| (...skipping 445 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2771 Abort("DoPushArgument not implemented for double type."); | 2799 Abort("DoPushArgument not implemented for double type."); |
| 2772 } else { | 2800 } else { |
| 2773 Register argument_reg = EmitLoadRegister(argument, ip); | 2801 Register argument_reg = EmitLoadRegister(argument, ip); |
| 2774 __ push(argument_reg); | 2802 __ push(argument_reg); |
| 2775 } | 2803 } |
| 2776 } | 2804 } |
| 2777 | 2805 |
| 2778 | 2806 |
| 2779 void LCodeGen::DoThisFunction(LThisFunction* instr) { | 2807 void LCodeGen::DoThisFunction(LThisFunction* instr) { |
| 2780 Register result = ToRegister(instr->result()); | 2808 Register result = ToRegister(instr->result()); |
| 2781 __ ldr(result, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 2809 LoadHeapObject(result, instr->hydrogen()->closure()); |
| 2782 } | 2810 } |
| 2783 | 2811 |
| 2784 | 2812 |
| 2785 void LCodeGen::DoContext(LContext* instr) { | 2813 void LCodeGen::DoContext(LContext* instr) { |
| 2786 Register result = ToRegister(instr->result()); | 2814 Register result = ToRegister(instr->result()); |
| 2787 __ mov(result, cp); | 2815 __ mov(result, cp); |
| 2788 } | 2816 } |
| 2789 | 2817 |
| 2790 | 2818 |
| 2791 void LCodeGen::DoOuterContext(LOuterContext* instr) { | 2819 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| (...skipping 498 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3290 int offset = instr->offset(); | 3318 int offset = instr->offset(); |
| 3291 | 3319 |
| 3292 ASSERT(!object.is(value)); | 3320 ASSERT(!object.is(value)); |
| 3293 | 3321 |
| 3294 if (!instr->transition().is_null()) { | 3322 if (!instr->transition().is_null()) { |
| 3295 __ mov(scratch, Operand(instr->transition())); | 3323 __ mov(scratch, Operand(instr->transition())); |
| 3296 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); | 3324 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); |
| 3297 } | 3325 } |
| 3298 | 3326 |
| 3299 // Do the store. | 3327 // Do the store. |
| 3328 HType type = instr->hydrogen()->value()->type(); |
| 3329 SmiCheck check_needed = |
| 3330 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 3300 if (instr->is_in_object()) { | 3331 if (instr->is_in_object()) { |
| 3301 __ str(value, FieldMemOperand(object, offset)); | 3332 __ str(value, FieldMemOperand(object, offset)); |
| 3302 if (instr->needs_write_barrier()) { | 3333 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 3303 // Update the write barrier for the object for in-object properties. | 3334 // Update the write barrier for the object for in-object properties. |
| 3304 __ RecordWriteField( | 3335 __ RecordWriteField(object, |
| 3305 object, offset, value, scratch, kLRHasBeenSaved, kSaveFPRegs); | 3336 offset, |
| 3337 value, |
| 3338 scratch, |
| 3339 kLRHasBeenSaved, |
| 3340 kSaveFPRegs, |
| 3341 EMIT_REMEMBERED_SET, |
| 3342 check_needed); |
| 3306 } | 3343 } |
| 3307 } else { | 3344 } else { |
| 3308 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 3345 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 3309 __ str(value, FieldMemOperand(scratch, offset)); | 3346 __ str(value, FieldMemOperand(scratch, offset)); |
| 3310 if (instr->needs_write_barrier()) { | 3347 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 3311 // Update the write barrier for the properties array. | 3348 // Update the write barrier for the properties array. |
| 3312 // object is used as a scratch register. | 3349 // object is used as a scratch register. |
| 3313 __ RecordWriteField( | 3350 __ RecordWriteField(scratch, |
| 3314 scratch, offset, value, object, kLRHasBeenSaved, kSaveFPRegs); | 3351 offset, |
| 3352 value, |
| 3353 object, |
| 3354 kLRHasBeenSaved, |
| 3355 kSaveFPRegs, |
| 3356 EMIT_REMEMBERED_SET, |
| 3357 check_needed); |
| 3315 } | 3358 } |
| 3316 } | 3359 } |
| 3317 } | 3360 } |
| 3318 | 3361 |
| 3319 | 3362 |
| 3320 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { | 3363 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { |
| 3321 ASSERT(ToRegister(instr->object()).is(r1)); | 3364 ASSERT(ToRegister(instr->object()).is(r1)); |
| 3322 ASSERT(ToRegister(instr->value()).is(r0)); | 3365 ASSERT(ToRegister(instr->value()).is(r0)); |
| 3323 | 3366 |
| 3324 // Name is always in r2. | 3367 // Name is always in r2. |
| (...skipping 30 matching lines...) Expand all Loading... |
| 3355 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); | 3398 LConstantOperand* const_operand = LConstantOperand::cast(instr->key()); |
| 3356 int offset = | 3399 int offset = |
| 3357 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; | 3400 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize; |
| 3358 __ str(value, FieldMemOperand(elements, offset)); | 3401 __ str(value, FieldMemOperand(elements, offset)); |
| 3359 } else { | 3402 } else { |
| 3360 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2)); | 3403 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2)); |
| 3361 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize)); | 3404 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize)); |
| 3362 } | 3405 } |
| 3363 | 3406 |
| 3364 if (instr->hydrogen()->NeedsWriteBarrier()) { | 3407 if (instr->hydrogen()->NeedsWriteBarrier()) { |
| 3408 HType type = instr->hydrogen()->value()->type(); |
| 3409 SmiCheck check_needed = |
| 3410 type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; |
| 3365 // Compute address of modified element and store it into key register. | 3411 // Compute address of modified element and store it into key register. |
| 3366 __ add(key, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); | 3412 __ add(key, scratch, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); |
| 3367 __ RecordWrite(elements, key, value, kLRHasBeenSaved, kSaveFPRegs); | 3413 __ RecordWrite(elements, |
| 3414 key, |
| 3415 value, |
| 3416 kLRHasBeenSaved, |
| 3417 kSaveFPRegs, |
| 3418 EMIT_REMEMBERED_SET, |
| 3419 check_needed); |
| 3368 } | 3420 } |
| 3369 } | 3421 } |
| 3370 | 3422 |
| 3371 | 3423 |
| 3372 void LCodeGen::DoStoreKeyedFastDoubleElement( | 3424 void LCodeGen::DoStoreKeyedFastDoubleElement( |
| 3373 LStoreKeyedFastDoubleElement* instr) { | 3425 LStoreKeyedFastDoubleElement* instr) { |
| 3374 DwVfpRegister value = ToDoubleRegister(instr->value()); | 3426 DwVfpRegister value = ToDoubleRegister(instr->value()); |
| 3375 Register elements = ToRegister(instr->elements()); | 3427 Register elements = ToRegister(instr->elements()); |
| 3376 Register key = no_reg; | 3428 Register key = no_reg; |
| 3377 Register scratch = scratch0(); | 3429 Register scratch = scratch0(); |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3480 ASSERT(ToRegister(instr->key()).is(r1)); | 3532 ASSERT(ToRegister(instr->key()).is(r1)); |
| 3481 ASSERT(ToRegister(instr->value()).is(r0)); | 3533 ASSERT(ToRegister(instr->value()).is(r0)); |
| 3482 | 3534 |
| 3483 Handle<Code> ic = instr->strict_mode() | 3535 Handle<Code> ic = instr->strict_mode() |
| 3484 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() | 3536 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 3485 : isolate()->builtins()->KeyedStoreIC_Initialize(); | 3537 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 3486 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3538 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3487 } | 3539 } |
| 3488 | 3540 |
| 3489 | 3541 |
| 3542 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { |
| 3543 Register object_reg = ToRegister(instr->object()); |
| 3544 Register new_map_reg = ToRegister(instr->new_map_reg()); |
| 3545 Register scratch = scratch0(); |
| 3546 |
| 3547 Handle<Map> from_map = instr->original_map(); |
| 3548 Handle<Map> to_map = instr->transitioned_map(); |
| 3549 ElementsKind from_kind = from_map->elements_kind(); |
| 3550 ElementsKind to_kind = to_map->elements_kind(); |
| 3551 |
| 3552 Label not_applicable; |
| 3553 __ ldr(scratch, FieldMemOperand(object_reg, HeapObject::kMapOffset)); |
| 3554 __ cmp(scratch, Operand(from_map)); |
| 3555 __ b(ne, ¬_applicable); |
| 3556 __ mov(new_map_reg, Operand(to_map)); |
| 3557 if (from_kind == FAST_SMI_ONLY_ELEMENTS && to_kind == FAST_ELEMENTS) { |
| 3558 __ str(new_map_reg, FieldMemOperand(object_reg, HeapObject::kMapOffset)); |
| 3559 // Write barrier. |
| 3560 __ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, |
| 3561 scratch, kLRHasBeenSaved, kDontSaveFPRegs); |
| 3562 } else if (from_kind == FAST_SMI_ONLY_ELEMENTS && |
| 3563 to_kind == FAST_DOUBLE_ELEMENTS) { |
| 3564 Register fixed_object_reg = ToRegister(instr->temp_reg()); |
| 3565 ASSERT(fixed_object_reg.is(r2)); |
| 3566 ASSERT(new_map_reg.is(r3)); |
| 3567 __ mov(fixed_object_reg, object_reg); |
| 3568 CallCode(isolate()->builtins()->TransitionElementsSmiToDouble(), |
| 3569 RelocInfo::CODE_TARGET, instr); |
| 3570 } else if (from_kind == FAST_DOUBLE_ELEMENTS && to_kind == FAST_ELEMENTS) { |
| 3571 Register fixed_object_reg = ToRegister(instr->temp_reg()); |
| 3572 ASSERT(fixed_object_reg.is(r2)); |
| 3573 ASSERT(new_map_reg.is(r3)); |
| 3574 __ mov(fixed_object_reg, object_reg); |
| 3575 CallCode(isolate()->builtins()->TransitionElementsDoubleToObject(), |
| 3576 RelocInfo::CODE_TARGET, instr); |
| 3577 } else { |
| 3578 UNREACHABLE(); |
| 3579 } |
| 3580 __ bind(¬_applicable); |
| 3581 } |
| 3582 |
| 3583 |
| 3490 void LCodeGen::DoStringAdd(LStringAdd* instr) { | 3584 void LCodeGen::DoStringAdd(LStringAdd* instr) { |
| 3491 __ push(ToRegister(instr->left())); | 3585 __ push(ToRegister(instr->left())); |
| 3492 __ push(ToRegister(instr->right())); | 3586 __ push(ToRegister(instr->right())); |
| 3493 StringAddStub stub(NO_STRING_CHECK_IN_STUB); | 3587 StringAddStub stub(NO_STRING_CHECK_IN_STUB); |
| 3494 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 3588 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 3495 } | 3589 } |
| 3496 | 3590 |
| 3497 | 3591 |
| 3498 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { | 3592 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) { |
| 3499 class DeferredStringCharCodeAt: public LDeferredCode { | 3593 class DeferredStringCharCodeAt: public LDeferredCode { |
| (...skipping 696 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4196 } | 4290 } |
| 4197 | 4291 |
| 4198 // Check the holder map. | 4292 // Check the holder map. |
| 4199 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset)); | 4293 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset)); |
| 4200 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map()))); | 4294 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map()))); |
| 4201 DeoptimizeIf(ne, instr->environment()); | 4295 DeoptimizeIf(ne, instr->environment()); |
| 4202 } | 4296 } |
| 4203 | 4297 |
| 4204 | 4298 |
| 4205 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { | 4299 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { |
| 4300 Handle<FixedArray> constant_elements = instr->hydrogen()->constant_elements(); |
| 4301 ASSERT_EQ(2, constant_elements->length()); |
| 4302 ElementsKind constant_elements_kind = |
| 4303 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); |
| 4304 |
| 4206 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 4305 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 4207 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); | 4306 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); |
| 4208 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); | 4307 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| 4209 __ mov(r1, Operand(instr->hydrogen()->constant_elements())); | 4308 __ mov(r1, Operand(constant_elements)); |
| 4210 __ Push(r3, r2, r1); | 4309 __ Push(r3, r2, r1); |
| 4211 | 4310 |
| 4212 // Pick the right runtime function or stub to call. | 4311 // Pick the right runtime function or stub to call. |
| 4213 int length = instr->hydrogen()->length(); | 4312 int length = instr->hydrogen()->length(); |
| 4214 if (instr->hydrogen()->IsCopyOnWrite()) { | 4313 if (instr->hydrogen()->IsCopyOnWrite()) { |
| 4215 ASSERT(instr->hydrogen()->depth() == 1); | 4314 ASSERT(instr->hydrogen()->depth() == 1); |
| 4216 FastCloneShallowArrayStub::Mode mode = | 4315 FastCloneShallowArrayStub::Mode mode = |
| 4217 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; | 4316 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS; |
| 4218 FastCloneShallowArrayStub stub(mode, length); | 4317 FastCloneShallowArrayStub stub(mode, length); |
| 4219 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4318 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 4220 } else if (instr->hydrogen()->depth() > 1) { | 4319 } else if (instr->hydrogen()->depth() > 1) { |
| 4221 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); | 4320 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr); |
| 4222 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { | 4321 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) { |
| 4223 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); | 4322 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr); |
| 4224 } else { | 4323 } else { |
| 4225 FastCloneShallowArrayStub::Mode mode = | 4324 FastCloneShallowArrayStub::Mode mode = |
| 4226 FastCloneShallowArrayStub::CLONE_ELEMENTS; | 4325 constant_elements_kind == FAST_DOUBLE_ELEMENTS |
| 4326 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS |
| 4327 : FastCloneShallowArrayStub::CLONE_ELEMENTS; |
| 4227 FastCloneShallowArrayStub stub(mode, length); | 4328 FastCloneShallowArrayStub stub(mode, length); |
| 4228 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4329 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 4229 } | 4330 } |
| 4230 } | 4331 } |
| 4231 | 4332 |
| 4232 | 4333 |
| 4233 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { | 4334 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { |
| 4234 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); | 4335 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
| 4235 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset)); | 4336 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset)); |
| 4236 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); | 4337 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index()))); |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4308 } | 4409 } |
| 4309 } | 4410 } |
| 4310 | 4411 |
| 4311 | 4412 |
| 4312 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { | 4413 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { |
| 4313 // Use the fast case closure allocation code that allocates in new | 4414 // Use the fast case closure allocation code that allocates in new |
| 4314 // space for nested functions that don't need literals cloning. | 4415 // space for nested functions that don't need literals cloning. |
| 4315 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); | 4416 Handle<SharedFunctionInfo> shared_info = instr->shared_info(); |
| 4316 bool pretenure = instr->hydrogen()->pretenure(); | 4417 bool pretenure = instr->hydrogen()->pretenure(); |
| 4317 if (!pretenure && shared_info->num_literals() == 0) { | 4418 if (!pretenure && shared_info->num_literals() == 0) { |
| 4318 FastNewClosureStub stub( | 4419 FastNewClosureStub stub(shared_info->strict_mode_flag()); |
| 4319 shared_info->strict_mode() ? kStrictMode : kNonStrictMode); | |
| 4320 __ mov(r1, Operand(shared_info)); | 4420 __ mov(r1, Operand(shared_info)); |
| 4321 __ push(r1); | 4421 __ push(r1); |
| 4322 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 4422 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 4323 } else { | 4423 } else { |
| 4324 __ mov(r2, Operand(shared_info)); | 4424 __ mov(r2, Operand(shared_info)); |
| 4325 __ mov(r1, Operand(pretenure | 4425 __ mov(r1, Operand(pretenure |
| 4326 ? factory()->true_value() | 4426 ? factory()->true_value() |
| 4327 : factory()->false_value())); | 4427 : factory()->false_value())); |
| 4328 __ Push(cp, r2, r1); | 4428 __ Push(cp, r2, r1); |
| 4329 CallRuntime(Runtime::kNewClosure, 3, instr); | 4429 CallRuntime(Runtime::kNewClosure, 3, instr); |
| (...skipping 12 matching lines...) Expand all Loading... |
| 4342 Register input = ToRegister(instr->InputAt(0)); | 4442 Register input = ToRegister(instr->InputAt(0)); |
| 4343 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 4443 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 4344 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 4444 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| 4345 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 4445 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 4346 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 4446 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 4347 | 4447 |
| 4348 Condition final_branch_condition = EmitTypeofIs(true_label, | 4448 Condition final_branch_condition = EmitTypeofIs(true_label, |
| 4349 false_label, | 4449 false_label, |
| 4350 input, | 4450 input, |
| 4351 instr->type_literal()); | 4451 instr->type_literal()); |
| 4352 | 4452 if (final_branch_condition != kNoCondition) { |
| 4353 EmitBranch(true_block, false_block, final_branch_condition); | 4453 EmitBranch(true_block, false_block, final_branch_condition); |
| 4454 } |
| 4354 } | 4455 } |
| 4355 | 4456 |
| 4356 | 4457 |
| 4357 Condition LCodeGen::EmitTypeofIs(Label* true_label, | 4458 Condition LCodeGen::EmitTypeofIs(Label* true_label, |
| 4358 Label* false_label, | 4459 Label* false_label, |
| 4359 Register input, | 4460 Register input, |
| 4360 Handle<String> type_name) { | 4461 Handle<String> type_name) { |
| 4361 Condition final_branch_condition = kNoCondition; | 4462 Condition final_branch_condition = kNoCondition; |
| 4362 Register scratch = scratch0(); | 4463 Register scratch = scratch0(); |
| 4363 if (type_name->Equals(heap()->number_symbol())) { | 4464 if (type_name->Equals(heap()->number_symbol())) { |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4413 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); | 4514 FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); |
| 4414 __ b(lt, false_label); | 4515 __ b(lt, false_label); |
| 4415 __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); | 4516 __ CompareInstanceType(input, scratch, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); |
| 4416 __ b(gt, false_label); | 4517 __ b(gt, false_label); |
| 4417 // Check for undetectable objects => false. | 4518 // Check for undetectable objects => false. |
| 4418 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); | 4519 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); |
| 4419 __ tst(ip, Operand(1 << Map::kIsUndetectable)); | 4520 __ tst(ip, Operand(1 << Map::kIsUndetectable)); |
| 4420 final_branch_condition = eq; | 4521 final_branch_condition = eq; |
| 4421 | 4522 |
| 4422 } else { | 4523 } else { |
| 4423 final_branch_condition = ne; | |
| 4424 __ b(false_label); | 4524 __ b(false_label); |
| 4425 // A dead branch instruction will be generated after this point. | |
| 4426 } | 4525 } |
| 4427 | 4526 |
| 4428 return final_branch_condition; | 4527 return final_branch_condition; |
| 4429 } | 4528 } |
| 4430 | 4529 |
| 4431 | 4530 |
| 4432 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { | 4531 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { |
| 4433 Register temp1 = ToRegister(instr->TempAt(0)); | 4532 Register temp1 = ToRegister(instr->TempAt(0)); |
| 4434 int true_block = chunk_->LookupDestination(instr->true_block_id()); | 4533 int true_block = chunk_->LookupDestination(instr->true_block_id()); |
| 4435 int false_block = chunk_->LookupDestination(instr->false_block_id()); | 4534 int false_block = chunk_->LookupDestination(instr->false_block_id()); |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4566 ASSERT(osr_pc_offset_ == -1); | 4665 ASSERT(osr_pc_offset_ == -1); |
| 4567 osr_pc_offset_ = masm()->pc_offset(); | 4666 osr_pc_offset_ = masm()->pc_offset(); |
| 4568 } | 4667 } |
| 4569 | 4668 |
| 4570 | 4669 |
| 4571 | 4670 |
| 4572 | 4671 |
| 4573 #undef __ | 4672 #undef __ |
| 4574 | 4673 |
| 4575 } } // namespace v8::internal | 4674 } } // namespace v8::internal |
| OLD | NEW |