| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 284 Register LCodeGen::ToRegister(LOperand* op) const { | 284 Register LCodeGen::ToRegister(LOperand* op) const { |
| 285 ASSERT(op->IsRegister()); | 285 ASSERT(op->IsRegister()); |
| 286 return ToRegister(op->index()); | 286 return ToRegister(op->index()); |
| 287 } | 287 } |
| 288 | 288 |
| 289 | 289 |
| 290 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) { | 290 Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) { |
| 291 if (op->IsRegister()) { | 291 if (op->IsRegister()) { |
| 292 return ToRegister(op->index()); | 292 return ToRegister(op->index()); |
| 293 } else if (op->IsConstantOperand()) { | 293 } else if (op->IsConstantOperand()) { |
| 294 __ li(scratch, ToOperand(op)); | 294 LConstantOperand* const_op = LConstantOperand::cast(op); |
| 295 Handle<Object> literal = chunk_->LookupLiteral(const_op); |
| 296 Representation r = chunk_->LookupLiteralRepresentation(const_op); |
| 297 if (r.IsInteger32()) { |
| 298 ASSERT(literal->IsNumber()); |
| 299 __ li(scratch, Operand(static_cast<int32_t>(literal->Number()))); |
| 300 } else if (r.IsDouble()) { |
| 301 Abort("EmitLoadRegister: Unsupported double immediate."); |
| 302 } else { |
| 303 ASSERT(r.IsTagged()); |
| 304 if (literal->IsSmi()) { |
| 305 __ li(scratch, Operand(literal)); |
| 306 } else { |
| 307 __ LoadHeapObject(scratch, Handle<HeapObject>::cast(literal)); |
| 308 } |
| 309 } |
| 295 return scratch; | 310 return scratch; |
| 296 } else if (op->IsStackSlot() || op->IsArgument()) { | 311 } else if (op->IsStackSlot() || op->IsArgument()) { |
| 297 __ lw(scratch, ToMemOperand(op)); | 312 __ lw(scratch, ToMemOperand(op)); |
| 298 return scratch; | 313 return scratch; |
| 299 } | 314 } |
| 300 UNREACHABLE(); | 315 UNREACHABLE(); |
| 301 return scratch; | 316 return scratch; |
| 302 } | 317 } |
| 303 | 318 |
| 304 | 319 |
| (...skipping 850 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1155 | 1170 |
| 1156 void LCodeGen::DoConstantD(LConstantD* instr) { | 1171 void LCodeGen::DoConstantD(LConstantD* instr) { |
| 1157 ASSERT(instr->result()->IsDoubleRegister()); | 1172 ASSERT(instr->result()->IsDoubleRegister()); |
| 1158 DoubleRegister result = ToDoubleRegister(instr->result()); | 1173 DoubleRegister result = ToDoubleRegister(instr->result()); |
| 1159 double v = instr->value(); | 1174 double v = instr->value(); |
| 1160 __ Move(result, v); | 1175 __ Move(result, v); |
| 1161 } | 1176 } |
| 1162 | 1177 |
| 1163 | 1178 |
| 1164 void LCodeGen::DoConstantT(LConstantT* instr) { | 1179 void LCodeGen::DoConstantT(LConstantT* instr) { |
| 1165 ASSERT(instr->result()->IsRegister()); | 1180 Handle<Object> value = instr->value(); |
| 1166 __ li(ToRegister(instr->result()), Operand(instr->value())); | 1181 if (value->IsSmi()) { |
| 1182 __ li(ToRegister(instr->result()), Operand(value)); |
| 1183 } else { |
| 1184 __ LoadHeapObject(ToRegister(instr->result()), |
| 1185 Handle<HeapObject>::cast(value)); |
| 1186 } |
| 1167 } | 1187 } |
| 1168 | 1188 |
| 1169 | 1189 |
| 1170 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { | 1190 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) { |
| 1171 Register result = ToRegister(instr->result()); | 1191 Register result = ToRegister(instr->result()); |
| 1172 Register array = ToRegister(instr->InputAt(0)); | 1192 Register array = ToRegister(instr->InputAt(0)); |
| 1173 __ lw(result, FieldMemOperand(array, JSArray::kLengthOffset)); | 1193 __ lw(result, FieldMemOperand(array, JSArray::kLengthOffset)); |
| 1174 } | 1194 } |
| 1175 | 1195 |
| 1176 | 1196 |
| (...skipping 855 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2032 flags | InstanceofStub::kReturnTrueFalseObject); | 2052 flags | InstanceofStub::kReturnTrueFalseObject); |
| 2033 InstanceofStub stub(flags); | 2053 InstanceofStub stub(flags); |
| 2034 | 2054 |
| 2035 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); | 2055 PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters); |
| 2036 | 2056 |
| 2037 // Get the temp register reserved by the instruction. This needs to be t0 as | 2057 // Get the temp register reserved by the instruction. This needs to be t0 as |
| 2038 // its slot of the pushing of safepoint registers is used to communicate the | 2058 // its slot of the pushing of safepoint registers is used to communicate the |
| 2039 // offset to the location of the map check. | 2059 // offset to the location of the map check. |
| 2040 Register temp = ToRegister(instr->TempAt(0)); | 2060 Register temp = ToRegister(instr->TempAt(0)); |
| 2041 ASSERT(temp.is(t0)); | 2061 ASSERT(temp.is(t0)); |
| 2042 __ li(InstanceofStub::right(), Operand(instr->function())); | 2062 __ LoadHeapObject(InstanceofStub::right(), instr->function()); |
| 2043 static const int kAdditionalDelta = 7; | 2063 static const int kAdditionalDelta = 7; |
| 2044 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; | 2064 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
| 2045 Label before_push_delta; | 2065 Label before_push_delta; |
| 2046 __ bind(&before_push_delta); | 2066 __ bind(&before_push_delta); |
| 2047 { | 2067 { |
| 2048 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 2068 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 2049 __ li(temp, Operand(delta * kPointerSize), true); | 2069 __ li(temp, Operand(delta * kPointerSize), true); |
| 2050 __ StoreToSafepointRegisterSlot(temp, temp); | 2070 __ StoreToSafepointRegisterSlot(temp, temp); |
| 2051 } | 2071 } |
| 2052 CallCodeGeneric(stub.GetCode(), | 2072 CallCodeGeneric(stub.GetCode(), |
| (...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2226 // Negative property indices are in-object properties, indexed | 2246 // Negative property indices are in-object properties, indexed |
| 2227 // from the end of the fixed part of the object. | 2247 // from the end of the fixed part of the object. |
| 2228 __ lw(result, FieldMemOperand(object, offset + type->instance_size())); | 2248 __ lw(result, FieldMemOperand(object, offset + type->instance_size())); |
| 2229 } else { | 2249 } else { |
| 2230 // Non-negative property indices are in the properties array. | 2250 // Non-negative property indices are in the properties array. |
| 2231 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 2251 __ lw(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 2232 __ lw(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize)); | 2252 __ lw(result, FieldMemOperand(result, offset + FixedArray::kHeaderSize)); |
| 2233 } | 2253 } |
| 2234 } else { | 2254 } else { |
| 2235 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type)); | 2255 Handle<JSFunction> function(lookup.GetConstantFunctionFromMap(*type)); |
| 2236 LoadHeapObject(result, Handle<HeapObject>::cast(function)); | 2256 __ LoadHeapObject(result, function); |
| 2237 } | 2257 } |
| 2238 } | 2258 } |
| 2239 | 2259 |
| 2240 | 2260 |
| 2241 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { | 2261 void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) { |
| 2242 Register object = ToRegister(instr->object()); | 2262 Register object = ToRegister(instr->object()); |
| 2243 Register result = ToRegister(instr->result()); | 2263 Register result = ToRegister(instr->result()); |
| 2244 Register scratch = scratch0(); | 2264 Register scratch = scratch0(); |
| 2245 int map_count = instr->hydrogen()->types()->length(); | 2265 int map_count = instr->hydrogen()->types()->length(); |
| 2246 Handle<String> name = instr->hydrogen()->name(); | 2266 Handle<String> name = instr->hydrogen()->name(); |
| (...skipping 433 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2680 Abort("DoPushArgument not implemented for double type."); | 2700 Abort("DoPushArgument not implemented for double type."); |
| 2681 } else { | 2701 } else { |
| 2682 Register argument_reg = EmitLoadRegister(argument, at); | 2702 Register argument_reg = EmitLoadRegister(argument, at); |
| 2683 __ push(argument_reg); | 2703 __ push(argument_reg); |
| 2684 } | 2704 } |
| 2685 } | 2705 } |
| 2686 | 2706 |
| 2687 | 2707 |
| 2688 void LCodeGen::DoThisFunction(LThisFunction* instr) { | 2708 void LCodeGen::DoThisFunction(LThisFunction* instr) { |
| 2689 Register result = ToRegister(instr->result()); | 2709 Register result = ToRegister(instr->result()); |
| 2690 LoadHeapObject(result, instr->hydrogen()->closure()); | 2710 __ LoadHeapObject(result, instr->hydrogen()->closure()); |
| 2691 } | 2711 } |
| 2692 | 2712 |
| 2693 | 2713 |
| 2694 void LCodeGen::DoContext(LContext* instr) { | 2714 void LCodeGen::DoContext(LContext* instr) { |
| 2695 Register result = ToRegister(instr->result()); | 2715 Register result = ToRegister(instr->result()); |
| 2696 __ mov(result, cp); | 2716 __ mov(result, cp); |
| 2697 } | 2717 } |
| 2698 | 2718 |
| 2699 | 2719 |
| 2700 void LCodeGen::DoOuterContext(LOuterContext* instr) { | 2720 void LCodeGen::DoOuterContext(LOuterContext* instr) { |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2750 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); | 2770 RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT); |
| 2751 | 2771 |
| 2752 // Restore context. | 2772 // Restore context. |
| 2753 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 2773 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 2754 } | 2774 } |
| 2755 | 2775 |
| 2756 | 2776 |
| 2757 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { | 2777 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { |
| 2758 ASSERT(ToRegister(instr->result()).is(v0)); | 2778 ASSERT(ToRegister(instr->result()).is(v0)); |
| 2759 __ mov(a0, v0); | 2779 __ mov(a0, v0); |
| 2760 __ li(a1, Operand(instr->function())); | 2780 __ LoadHeapObject(a1, instr->function()); |
| 2761 CallKnownFunction(instr->function(), instr->arity(), instr, CALL_AS_METHOD); | 2781 CallKnownFunction(instr->function(), instr->arity(), instr, CALL_AS_METHOD); |
| 2762 } | 2782 } |
| 2763 | 2783 |
| 2764 | 2784 |
| 2765 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { | 2785 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { |
| 2766 Register input = ToRegister(instr->InputAt(0)); | 2786 Register input = ToRegister(instr->InputAt(0)); |
| 2767 Register result = ToRegister(instr->result()); | 2787 Register result = ToRegister(instr->result()); |
| 2768 Register scratch = scratch0(); | 2788 Register scratch = scratch0(); |
| 2769 | 2789 |
| 2770 // Deoptimize if not a heap number. | 2790 // Deoptimize if not a heap number. |
| (...skipping 416 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3187 Handle<Code> ic = | 3207 Handle<Code> ic = |
| 3188 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); | 3208 isolate()->stub_cache()->ComputeCallInitialize(arity, mode); |
| 3189 __ li(a2, Operand(instr->name())); | 3209 __ li(a2, Operand(instr->name())); |
| 3190 CallCode(ic, mode, instr); | 3210 CallCode(ic, mode, instr); |
| 3191 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 3211 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 3192 } | 3212 } |
| 3193 | 3213 |
| 3194 | 3214 |
| 3195 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { | 3215 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { |
| 3196 ASSERT(ToRegister(instr->result()).is(v0)); | 3216 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3197 __ li(a1, Operand(instr->target())); | 3217 __ LoadHeapObject(a1, instr->target()); |
| 3198 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION); | 3218 CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION); |
| 3199 } | 3219 } |
| 3200 | 3220 |
| 3201 | 3221 |
| 3202 void LCodeGen::DoCallNew(LCallNew* instr) { | 3222 void LCodeGen::DoCallNew(LCallNew* instr) { |
| 3203 ASSERT(ToRegister(instr->InputAt(0)).is(a1)); | 3223 ASSERT(ToRegister(instr->InputAt(0)).is(a1)); |
| 3204 ASSERT(ToRegister(instr->result()).is(v0)); | 3224 ASSERT(ToRegister(instr->result()).is(v0)); |
| 3205 | 3225 |
| 3206 Handle<Code> builtin = isolate()->builtins()->JSConstructCall(); | 3226 Handle<Code> builtin = isolate()->builtins()->JSConstructCall(); |
| 3207 __ li(a0, Operand(instr->arity())); | 3227 __ li(a0, Operand(instr->arity())); |
| (...skipping 807 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4015 at, Operand(zero_reg)); | 4035 at, Operand(zero_reg)); |
| 4016 } else { | 4036 } else { |
| 4017 __ And(scratch, scratch, Operand(mask)); | 4037 __ And(scratch, scratch, Operand(mask)); |
| 4018 DeoptimizeIf(ne, instr->environment(), scratch, Operand(tag)); | 4038 DeoptimizeIf(ne, instr->environment(), scratch, Operand(tag)); |
| 4019 } | 4039 } |
| 4020 } | 4040 } |
| 4021 } | 4041 } |
| 4022 | 4042 |
| 4023 | 4043 |
| 4024 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { | 4044 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { |
| 4025 ASSERT(instr->InputAt(0)->IsRegister()); | 4045 Register reg = ToRegister(instr->value()); |
| 4026 Register reg = ToRegister(instr->InputAt(0)); | 4046 Handle<JSFunction> target = instr->hydrogen()->target(); |
| 4027 DeoptimizeIf(ne, instr->environment(), reg, | 4047 if (isolate()->heap()->InNewSpace(*target)) { |
| 4028 Operand(instr->hydrogen()->target())); | 4048 Register reg = ToRegister(instr->value()); |
| 4049 Handle<JSGlobalPropertyCell> cell = |
| 4050 isolate()->factory()->NewJSGlobalPropertyCell(target); |
| 4051 __ li(at, Operand(Handle<Object>(cell))); |
| 4052 __ lw(at, FieldMemOperand(at, JSGlobalPropertyCell::kValueOffset)); |
| 4053 DeoptimizeIf(ne, instr->environment(), reg, |
| 4054 Operand(at)); |
| 4055 } else { |
| 4056 DeoptimizeIf(ne, instr->environment(), reg, |
| 4057 Operand(target)); |
| 4058 } |
| 4029 } | 4059 } |
| 4030 | 4060 |
| 4031 | 4061 |
| 4032 void LCodeGen::DoCheckMap(LCheckMap* instr) { | 4062 void LCodeGen::DoCheckMap(LCheckMap* instr) { |
| 4033 Register scratch = scratch0(); | 4063 Register scratch = scratch0(); |
| 4034 LOperand* input = instr->InputAt(0); | 4064 LOperand* input = instr->InputAt(0); |
| 4035 ASSERT(input->IsRegister()); | 4065 ASSERT(input->IsRegister()); |
| 4036 Register reg = ToRegister(input); | 4066 Register reg = ToRegister(input); |
| 4037 __ lw(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); | 4067 __ lw(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 4038 DeoptimizeIf(ne, | 4068 DeoptimizeIf(ne, |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4087 | 4117 |
| 4088 // smi | 4118 // smi |
| 4089 __ bind(&is_smi); | 4119 __ bind(&is_smi); |
| 4090 __ SmiUntag(scratch, input_reg); | 4120 __ SmiUntag(scratch, input_reg); |
| 4091 __ ClampUint8(result_reg, scratch); | 4121 __ ClampUint8(result_reg, scratch); |
| 4092 | 4122 |
| 4093 __ bind(&done); | 4123 __ bind(&done); |
| 4094 } | 4124 } |
| 4095 | 4125 |
| 4096 | 4126 |
| 4097 void LCodeGen::LoadHeapObject(Register result, | |
| 4098 Handle<HeapObject> object) { | |
| 4099 if (heap()->InNewSpace(*object)) { | |
| 4100 Handle<JSGlobalPropertyCell> cell = | |
| 4101 factory()->NewJSGlobalPropertyCell(object); | |
| 4102 __ li(result, Operand(cell)); | |
| 4103 __ lw(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset)); | |
| 4104 } else { | |
| 4105 __ li(result, Operand(object)); | |
| 4106 } | |
| 4107 } | |
| 4108 | |
| 4109 | |
| 4110 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { | 4127 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { |
| 4111 Register temp1 = ToRegister(instr->TempAt(0)); | 4128 Register temp1 = ToRegister(instr->TempAt(0)); |
| 4112 Register temp2 = ToRegister(instr->TempAt(1)); | 4129 Register temp2 = ToRegister(instr->TempAt(1)); |
| 4113 | 4130 |
| 4114 Handle<JSObject> holder = instr->holder(); | 4131 Handle<JSObject> holder = instr->holder(); |
| 4115 Handle<JSObject> current_prototype = instr->prototype(); | 4132 Handle<JSObject> current_prototype = instr->prototype(); |
| 4116 | 4133 |
| 4117 // Load prototype object. | 4134 // Load prototype object. |
| 4118 LoadHeapObject(temp1, current_prototype); | 4135 __ LoadHeapObject(temp1, current_prototype); |
| 4119 | 4136 |
| 4120 // Check prototype maps up to the holder. | 4137 // Check prototype maps up to the holder. |
| 4121 while (!current_prototype.is_identical_to(holder)) { | 4138 while (!current_prototype.is_identical_to(holder)) { |
| 4122 __ lw(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset)); | 4139 __ lw(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset)); |
| 4123 DeoptimizeIf(ne, | 4140 DeoptimizeIf(ne, |
| 4124 instr->environment(), | 4141 instr->environment(), |
| 4125 temp2, | 4142 temp2, |
| 4126 Operand(Handle<Map>(current_prototype->map()))); | 4143 Operand(Handle<Map>(current_prototype->map()))); |
| 4127 current_prototype = | 4144 current_prototype = |
| 4128 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); | 4145 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype())); |
| 4129 // Load next prototype object. | 4146 // Load next prototype object. |
| 4130 LoadHeapObject(temp1, current_prototype); | 4147 __ LoadHeapObject(temp1, current_prototype); |
| 4131 } | 4148 } |
| 4132 | 4149 |
| 4133 // Check the holder map. | 4150 // Check the holder map. |
| 4134 __ lw(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset)); | 4151 __ lw(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset)); |
| 4135 DeoptimizeIf(ne, | 4152 DeoptimizeIf(ne, |
| 4136 instr->environment(), | 4153 instr->environment(), |
| 4137 temp2, | 4154 temp2, |
| 4138 Operand(Handle<Map>(current_prototype->map()))); | 4155 Operand(Handle<Map>(current_prototype->map()))); |
| 4139 } | 4156 } |
| 4140 | 4157 |
| 4141 | 4158 |
| 4142 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { | 4159 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { |
| 4143 Heap* heap = isolate()->heap(); | 4160 Heap* heap = isolate()->heap(); |
| 4144 ElementsKind boilerplate_elements_kind = | 4161 ElementsKind boilerplate_elements_kind = |
| 4145 instr->hydrogen()->boilerplate_elements_kind(); | 4162 instr->hydrogen()->boilerplate_elements_kind(); |
| 4146 | 4163 |
| 4147 // Deopt if the array literal boilerplate ElementsKind is of a type different | 4164 // Deopt if the array literal boilerplate ElementsKind is of a type different |
| 4148 // than the expected one. The check isn't necessary if the boilerplate has | 4165 // than the expected one. The check isn't necessary if the boilerplate has |
| 4149 // already been converted to FAST_ELEMENTS. | 4166 // already been converted to FAST_ELEMENTS. |
| 4150 if (boilerplate_elements_kind != FAST_ELEMENTS) { | 4167 if (boilerplate_elements_kind != FAST_ELEMENTS) { |
| 4151 LoadHeapObject(a1, instr->hydrogen()->boilerplate_object()); | 4168 __ LoadHeapObject(a1, instr->hydrogen()->boilerplate_object()); |
| 4152 // Load map into a2. | 4169 // Load map into a2. |
| 4153 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); | 4170 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset)); |
| 4154 // Load the map's "bit field 2". | 4171 // Load the map's "bit field 2". |
| 4155 __ lbu(a2, FieldMemOperand(a2, Map::kBitField2Offset)); | 4172 __ lbu(a2, FieldMemOperand(a2, Map::kBitField2Offset)); |
| 4156 // Retrieve elements_kind from bit field 2. | 4173 // Retrieve elements_kind from bit field 2. |
| 4157 __ Ext(a2, a2, Map::kElementsKindShift, Map::kElementsKindBitCount); | 4174 __ Ext(a2, a2, Map::kElementsKindShift, Map::kElementsKindBitCount); |
| 4158 DeoptimizeIf(ne, | 4175 DeoptimizeIf(ne, |
| 4159 instr->environment(), | 4176 instr->environment(), |
| 4160 a2, | 4177 a2, |
| 4161 Operand(boilerplate_elements_kind)); | 4178 Operand(boilerplate_elements_kind)); |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4216 } | 4233 } |
| 4217 | 4234 |
| 4218 // Copy in-object properties. | 4235 // Copy in-object properties. |
| 4219 for (int i = 0; i < inobject_properties; i++) { | 4236 for (int i = 0; i < inobject_properties; i++) { |
| 4220 int total_offset = current_offset + object->GetInObjectPropertyOffset(i); | 4237 int total_offset = current_offset + object->GetInObjectPropertyOffset(i); |
| 4221 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i)); | 4238 Handle<Object> value = Handle<Object>(object->InObjectPropertyAt(i)); |
| 4222 if (value->IsJSObject()) { | 4239 if (value->IsJSObject()) { |
| 4223 Handle<JSObject> value_object = Handle<JSObject>::cast(value); | 4240 Handle<JSObject> value_object = Handle<JSObject>::cast(value); |
| 4224 __ Addu(a2, result, Operand(*offset)); | 4241 __ Addu(a2, result, Operand(*offset)); |
| 4225 __ sw(a2, FieldMemOperand(result, total_offset)); | 4242 __ sw(a2, FieldMemOperand(result, total_offset)); |
| 4226 LoadHeapObject(source, value_object); | 4243 __ LoadHeapObject(source, value_object); |
| 4227 EmitDeepCopy(value_object, result, source, offset); | 4244 EmitDeepCopy(value_object, result, source, offset); |
| 4228 } else if (value->IsHeapObject()) { | 4245 } else if (value->IsHeapObject()) { |
| 4229 LoadHeapObject(a2, Handle<HeapObject>::cast(value)); | 4246 __ LoadHeapObject(a2, Handle<HeapObject>::cast(value)); |
| 4230 __ sw(a2, FieldMemOperand(result, total_offset)); | 4247 __ sw(a2, FieldMemOperand(result, total_offset)); |
| 4231 } else { | 4248 } else { |
| 4232 __ li(a2, Operand(value)); | 4249 __ li(a2, Operand(value)); |
| 4233 __ sw(a2, FieldMemOperand(result, total_offset)); | 4250 __ sw(a2, FieldMemOperand(result, total_offset)); |
| 4234 } | 4251 } |
| 4235 } | 4252 } |
| 4236 } | 4253 } |
| 4237 | 4254 |
| 4238 | 4255 |
| 4239 void LCodeGen::DoObjectLiteralFast(LObjectLiteralFast* instr) { | 4256 void LCodeGen::DoObjectLiteralFast(LObjectLiteralFast* instr) { |
| 4240 int size = instr->hydrogen()->total_size(); | 4257 int size = instr->hydrogen()->total_size(); |
| 4241 | 4258 |
| 4242 // Allocate all objects that are part of the literal in one big | 4259 // Allocate all objects that are part of the literal in one big |
| 4243 // allocation. This avoids multiple limit checks. | 4260 // allocation. This avoids multiple limit checks. |
| 4244 Label allocated, runtime_allocate; | 4261 Label allocated, runtime_allocate; |
| 4245 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); | 4262 __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); |
| 4246 __ jmp(&allocated); | 4263 __ jmp(&allocated); |
| 4247 | 4264 |
| 4248 __ bind(&runtime_allocate); | 4265 __ bind(&runtime_allocate); |
| 4249 __ li(a0, Operand(Smi::FromInt(size))); | 4266 __ li(a0, Operand(Smi::FromInt(size))); |
| 4250 __ push(a0); | 4267 __ push(a0); |
| 4251 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); | 4268 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr); |
| 4252 | 4269 |
| 4253 __ bind(&allocated); | 4270 __ bind(&allocated); |
| 4254 int offset = 0; | 4271 int offset = 0; |
| 4255 LoadHeapObject(a1, instr->hydrogen()->boilerplate()); | 4272 __ LoadHeapObject(a1, instr->hydrogen()->boilerplate()); |
| 4256 EmitDeepCopy(instr->hydrogen()->boilerplate(), v0, a1, &offset); | 4273 EmitDeepCopy(instr->hydrogen()->boilerplate(), v0, a1, &offset); |
| 4257 ASSERT_EQ(size, offset); | 4274 ASSERT_EQ(size, offset); |
| 4258 } | 4275 } |
| 4259 | 4276 |
| 4260 | 4277 |
| 4261 void LCodeGen::DoObjectLiteralGeneric(LObjectLiteralGeneric* instr) { | 4278 void LCodeGen::DoObjectLiteralGeneric(LObjectLiteralGeneric* instr) { |
| 4262 ASSERT(ToRegister(instr->result()).is(v0)); | 4279 ASSERT(ToRegister(instr->result()).is(v0)); |
| 4263 | 4280 |
| 4264 Handle<FixedArray> constant_properties = | 4281 Handle<FixedArray> constant_properties = |
| 4265 instr->hydrogen()->constant_properties(); | 4282 instr->hydrogen()->constant_properties(); |
| (...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4662 ASSERT(!environment->HasBeenRegistered()); | 4679 ASSERT(!environment->HasBeenRegistered()); |
| 4663 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 4680 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 4664 ASSERT(osr_pc_offset_ == -1); | 4681 ASSERT(osr_pc_offset_ == -1); |
| 4665 osr_pc_offset_ = masm()->pc_offset(); | 4682 osr_pc_offset_ = masm()->pc_offset(); |
| 4666 } | 4683 } |
| 4667 | 4684 |
| 4668 | 4685 |
| 4669 #undef __ | 4686 #undef __ |
| 4670 | 4687 |
| 4671 } } // namespace v8::internal | 4688 } } // namespace v8::internal |
| OLD | NEW |