| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 // |
| 5 // Copyright IBM Corp. 2012, 2013. All rights reserved. |
| 4 | 6 |
| 5 #include "src/v8.h" | 7 #include "src/v8.h" |
| 6 | 8 |
| 7 #if V8_TARGET_ARCH_ARM | 9 #if V8_TARGET_ARCH_PPC |
| 8 | 10 |
| 9 #include "src/ic/call-optimization.h" | 11 #include "src/ic/call-optimization.h" |
| 10 #include "src/ic/handler-compiler.h" | 12 #include "src/ic/handler-compiler.h" |
| 13 #include "src/ic/ic.h" |
| 11 | 14 |
| 12 namespace v8 { | 15 namespace v8 { |
| 13 namespace internal { | 16 namespace internal { |
| 14 | 17 |
| 15 #define __ ACCESS_MASM(masm) | 18 #define __ ACCESS_MASM(masm) |
| 16 | 19 |
| 17 | 20 |
| 18 void NamedLoadHandlerCompiler::GenerateLoadViaGetter( | 21 void NamedLoadHandlerCompiler::GenerateLoadViaGetter( |
| 19 MacroAssembler* masm, Handle<HeapType> type, Register receiver, | 22 MacroAssembler* masm, Handle<HeapType> type, Register receiver, |
| 20 Handle<JSFunction> getter) { | 23 Handle<JSFunction> getter) { |
| 21 // ----------- S t a t e ------------- | 24 // ----------- S t a t e ------------- |
| 22 // -- r0 : receiver | 25 // -- r3 : receiver |
| 23 // -- r2 : name | 26 // -- r5 : name |
| 24 // -- lr : return address | 27 // -- lr : return address |
| 25 // ----------------------------------- | 28 // ----------------------------------- |
| 26 { | 29 { |
| 27 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 30 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 28 | 31 |
| 29 if (!getter.is_null()) { | 32 if (!getter.is_null()) { |
| 30 // Call the JavaScript getter with the receiver on the stack. | 33 // Call the JavaScript getter with the receiver on the stack. |
| 31 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | 34 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
| 32 // Swap in the global receiver. | 35 // Swap in the global receiver. |
| 33 __ ldr(receiver, | 36 __ LoadP(receiver, |
| 34 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); | 37 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); |
| 35 } | 38 } |
| 36 __ push(receiver); | 39 __ push(receiver); |
| 37 ParameterCount actual(0); | 40 ParameterCount actual(0); |
| 38 ParameterCount expected(getter); | 41 ParameterCount expected(getter); |
| 39 __ InvokeFunction(getter, expected, actual, CALL_FUNCTION, | 42 __ InvokeFunction(getter, expected, actual, CALL_FUNCTION, |
| 40 NullCallWrapper()); | 43 NullCallWrapper()); |
| 41 } else { | 44 } else { |
| 42 // If we generate a global code snippet for deoptimization only, remember | 45 // If we generate a global code snippet for deoptimization only, remember |
| 43 // the place to continue after deoptimization. | 46 // the place to continue after deoptimization. |
| 44 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); | 47 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); |
| 45 } | 48 } |
| 46 | 49 |
| 47 // Restore context register. | 50 // Restore context register. |
| 48 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 51 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 49 } | 52 } |
| 50 __ Ret(); | 53 __ Ret(); |
| 51 } | 54 } |
| 52 | 55 |
| 53 | 56 |
| 54 void NamedStoreHandlerCompiler::GenerateStoreViaSetter( | 57 void NamedStoreHandlerCompiler::GenerateStoreViaSetter( |
| 55 MacroAssembler* masm, Handle<HeapType> type, Register receiver, | 58 MacroAssembler* masm, Handle<HeapType> type, Register receiver, |
| 56 Handle<JSFunction> setter) { | 59 Handle<JSFunction> setter) { |
| 57 // ----------- S t a t e ------------- | 60 // ----------- S t a t e ------------- |
| 58 // -- lr : return address | 61 // -- lr : return address |
| 59 // ----------------------------------- | 62 // ----------------------------------- |
| 60 { | 63 { |
| 61 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); | 64 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
| 62 | 65 |
| 63 // Save value register, so we can restore it later. | 66 // Save value register, so we can restore it later. |
| 64 __ push(value()); | 67 __ push(value()); |
| 65 | 68 |
| 66 if (!setter.is_null()) { | 69 if (!setter.is_null()) { |
| 67 // Call the JavaScript setter with receiver and value on the stack. | 70 // Call the JavaScript setter with receiver and value on the stack. |
| 68 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { | 71 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { |
| 69 // Swap in the global receiver. | 72 // Swap in the global receiver. |
| 70 __ ldr(receiver, | 73 __ LoadP(receiver, |
| 71 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); | 74 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); |
| 72 } | 75 } |
| 73 __ Push(receiver, value()); | 76 __ Push(receiver, value()); |
| 74 ParameterCount actual(1); | 77 ParameterCount actual(1); |
| 75 ParameterCount expected(setter); | 78 ParameterCount expected(setter); |
| 76 __ InvokeFunction(setter, expected, actual, CALL_FUNCTION, | 79 __ InvokeFunction(setter, expected, actual, CALL_FUNCTION, |
| 77 NullCallWrapper()); | 80 NullCallWrapper()); |
| 78 } else { | 81 } else { |
| 79 // If we generate a global code snippet for deoptimization only, remember | 82 // If we generate a global code snippet for deoptimization only, remember |
| 80 // the place to continue after deoptimization. | 83 // the place to continue after deoptimization. |
| 81 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); | 84 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); |
| 82 } | 85 } |
| 83 | 86 |
| 84 // We have to return the passed value, not the return value of the setter. | 87 // We have to return the passed value, not the return value of the setter. |
| 85 __ pop(r0); | 88 __ pop(r3); |
| 86 | 89 |
| 87 // Restore context register. | 90 // Restore context register. |
| 88 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); | 91 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); |
| 89 } | 92 } |
| 90 __ Ret(); | 93 __ Ret(); |
| 91 } | 94 } |
| 92 | 95 |
| 93 | 96 |
| 94 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( | 97 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( |
| 95 MacroAssembler* masm, Label* miss_label, Register receiver, | 98 MacroAssembler* masm, Label* miss_label, Register receiver, |
| 96 Handle<Name> name, Register scratch0, Register scratch1) { | 99 Handle<Name> name, Register scratch0, Register scratch1) { |
| 97 DCHECK(name->IsUniqueName()); | 100 DCHECK(name->IsUniqueName()); |
| 98 DCHECK(!receiver.is(scratch0)); | 101 DCHECK(!receiver.is(scratch0)); |
| 99 Counters* counters = masm->isolate()->counters(); | 102 Counters* counters = masm->isolate()->counters(); |
| 100 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); | 103 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); |
| 101 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); | 104 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); |
| 102 | 105 |
| 103 Label done; | 106 Label done; |
| 104 | 107 |
| 105 const int kInterceptorOrAccessCheckNeededMask = | 108 const int kInterceptorOrAccessCheckNeededMask = |
| 106 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); | 109 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); |
| 107 | 110 |
| 108 // Bail out if the receiver has a named interceptor or requires access checks. | 111 // Bail out if the receiver has a named interceptor or requires access checks. |
| 109 Register map = scratch1; | 112 Register map = scratch1; |
| 110 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); | 113 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); |
| 111 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); | 114 __ lbz(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); |
| 112 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); | 115 __ andi(r0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); |
| 113 __ b(ne, miss_label); | 116 __ bne(miss_label, cr0); |
| 114 | 117 |
| 115 // Check that receiver is a JSObject. | 118 // Check that receiver is a JSObject. |
| 116 __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); | 119 __ lbz(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); |
| 117 __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE)); | 120 __ cmpi(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
| 118 __ b(lt, miss_label); | 121 __ blt(miss_label); |
| 119 | 122 |
| 120 // Load properties array. | 123 // Load properties array. |
| 121 Register properties = scratch0; | 124 Register properties = scratch0; |
| 122 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 125 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 123 // Check that the properties array is a dictionary. | 126 // Check that the properties array is a dictionary. |
| 124 __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset)); | 127 __ LoadP(map, FieldMemOperand(properties, HeapObject::kMapOffset)); |
| 125 Register tmp = properties; | 128 Register tmp = properties; |
| 126 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); | 129 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); |
| 127 __ cmp(map, tmp); | 130 __ cmp(map, tmp); |
| 128 __ b(ne, miss_label); | 131 __ bne(miss_label); |
| 129 | 132 |
| 130 // Restore the temporarily used register. | 133 // Restore the temporarily used register. |
| 131 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); | 134 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); |
| 132 | 135 |
| 133 | 136 |
| 134 NameDictionaryLookupStub::GenerateNegativeLookup( | 137 NameDictionaryLookupStub::GenerateNegativeLookup( |
| 135 masm, miss_label, &done, receiver, properties, name, scratch1); | 138 masm, miss_label, &done, receiver, properties, name, scratch1); |
| 136 __ bind(&done); | 139 __ bind(&done); |
| 137 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); | 140 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); |
| 138 } | 141 } |
| 139 | 142 |
| 140 | 143 |
| 141 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( | 144 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( |
| 142 MacroAssembler* masm, int index, Register prototype, Label* miss) { | 145 MacroAssembler* masm, int index, Register prototype, Label* miss) { |
| 143 Isolate* isolate = masm->isolate(); | 146 Isolate* isolate = masm->isolate(); |
| 144 // Get the global function with the given index. | 147 // Get the global function with the given index. |
| 145 Handle<JSFunction> function( | 148 Handle<JSFunction> function( |
| 146 JSFunction::cast(isolate->native_context()->get(index))); | 149 JSFunction::cast(isolate->native_context()->get(index))); |
| 147 | 150 |
| 148 // Check we're still in the same context. | 151 // Check we're still in the same context. |
| 149 Register scratch = prototype; | 152 Register scratch = prototype; |
| 150 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); | 153 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); |
| 151 __ ldr(scratch, MemOperand(cp, offset)); | 154 __ LoadP(scratch, MemOperand(cp, offset)); |
| 152 __ ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); | 155 __ LoadP(scratch, |
| 153 __ ldr(scratch, MemOperand(scratch, Context::SlotOffset(index))); | 156 FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); |
| 157 __ LoadP(scratch, MemOperand(scratch, Context::SlotOffset(index))); |
| 154 __ Move(ip, function); | 158 __ Move(ip, function); |
| 155 __ cmp(ip, scratch); | 159 __ cmp(ip, scratch); |
| 156 __ b(ne, miss); | 160 __ bne(miss); |
| 157 | 161 |
| 158 // Load its initial map. The global functions all have initial maps. | 162 // Load its initial map. The global functions all have initial maps. |
| 159 __ Move(prototype, Handle<Map>(function->initial_map())); | 163 __ Move(prototype, Handle<Map>(function->initial_map())); |
| 160 // Load the prototype from the initial map. | 164 // Load the prototype from the initial map. |
| 161 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); | 165 __ LoadP(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); |
| 162 } | 166 } |
| 163 | 167 |
| 164 | 168 |
| 165 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( | 169 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( |
| 166 MacroAssembler* masm, Register receiver, Register scratch1, | 170 MacroAssembler* masm, Register receiver, Register scratch1, |
| 167 Register scratch2, Label* miss_label) { | 171 Register scratch2, Label* miss_label) { |
| 168 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); | 172 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); |
| 169 __ mov(r0, scratch1); | 173 __ mr(r3, scratch1); |
| 170 __ Ret(); | 174 __ Ret(); |
| 171 } | 175 } |
| 172 | 176 |
| 173 | 177 |
| 174 // Generate code to check that a global property cell is empty. Create | 178 // Generate code to check that a global property cell is empty. Create |
| 175 // the property cell at compilation time if no cell exists for the | 179 // the property cell at compilation time if no cell exists for the |
| 176 // property. | 180 // property. |
| 177 void PropertyHandlerCompiler::GenerateCheckPropertyCell( | 181 void PropertyHandlerCompiler::GenerateCheckPropertyCell( |
| 178 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, | 182 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, |
| 179 Register scratch, Label* miss) { | 183 Register scratch, Label* miss) { |
| 180 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); | 184 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); |
| 181 DCHECK(cell->value()->IsTheHole()); | 185 DCHECK(cell->value()->IsTheHole()); |
| 182 __ mov(scratch, Operand(cell)); | 186 __ mov(scratch, Operand(cell)); |
| 183 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); | 187 __ LoadP(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
| 184 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 188 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 185 __ cmp(scratch, ip); | 189 __ cmp(scratch, ip); |
| 186 __ b(ne, miss); | 190 __ bne(miss); |
| 187 } | 191 } |
| 188 | 192 |
| 189 | 193 |
| 190 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, | 194 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, |
| 191 Register holder, Register name, | 195 Register holder, Register name, |
| 192 Handle<JSObject> holder_obj) { | 196 Handle<JSObject> holder_obj) { |
| 193 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); | 197 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); |
| 194 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); | 198 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); |
| 195 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); | 199 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); |
| 196 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); | 200 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); |
| (...skipping 28 matching lines...) Expand all Loading... |
| 225 // Write the arguments to stack frame. | 229 // Write the arguments to stack frame. |
| 226 for (int i = 0; i < argc; i++) { | 230 for (int i = 0; i < argc; i++) { |
| 227 Register arg = values[argc - 1 - i]; | 231 Register arg = values[argc - 1 - i]; |
| 228 DCHECK(!receiver.is(arg)); | 232 DCHECK(!receiver.is(arg)); |
| 229 DCHECK(!scratch_in.is(arg)); | 233 DCHECK(!scratch_in.is(arg)); |
| 230 __ push(arg); | 234 __ push(arg); |
| 231 } | 235 } |
| 232 DCHECK(optimization.is_simple_api_call()); | 236 DCHECK(optimization.is_simple_api_call()); |
| 233 | 237 |
| 234 // Abi for CallApiFunctionStub. | 238 // Abi for CallApiFunctionStub. |
| 235 Register callee = r0; | 239 Register callee = r3; |
| 236 Register call_data = r4; | 240 Register call_data = r7; |
| 237 Register holder = r2; | 241 Register holder = r5; |
| 238 Register api_function_address = r1; | 242 Register api_function_address = r4; |
| 239 | 243 |
| 240 // Put holder in place. | 244 // Put holder in place. |
| 241 CallOptimization::HolderLookup holder_lookup; | 245 CallOptimization::HolderLookup holder_lookup; |
| 242 Handle<JSObject> api_holder = | 246 Handle<JSObject> api_holder = |
| 243 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup); | 247 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup); |
| 244 switch (holder_lookup) { | 248 switch (holder_lookup) { |
| 245 case CallOptimization::kHolderIsReceiver: | 249 case CallOptimization::kHolderIsReceiver: |
| 246 __ Move(holder, receiver); | 250 __ Move(holder, receiver); |
| 247 break; | 251 break; |
| 248 case CallOptimization::kHolderFound: | 252 case CallOptimization::kHolderFound: |
| 249 __ Move(holder, api_holder); | 253 __ Move(holder, api_holder); |
| 250 break; | 254 break; |
| 251 case CallOptimization::kHolderNotFound: | 255 case CallOptimization::kHolderNotFound: |
| 252 UNREACHABLE(); | 256 UNREACHABLE(); |
| 253 break; | 257 break; |
| 254 } | 258 } |
| 255 | 259 |
| 256 Isolate* isolate = masm->isolate(); | 260 Isolate* isolate = masm->isolate(); |
| 257 Handle<JSFunction> function = optimization.constant_function(); | 261 Handle<JSFunction> function = optimization.constant_function(); |
| 258 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); | 262 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); |
| 259 Handle<Object> call_data_obj(api_call_info->data(), isolate); | 263 Handle<Object> call_data_obj(api_call_info->data(), isolate); |
| 260 | 264 |
| 261 // Put callee in place. | 265 // Put callee in place. |
| 262 __ Move(callee, function); | 266 __ Move(callee, function); |
| 263 | 267 |
| 264 bool call_data_undefined = false; | 268 bool call_data_undefined = false; |
| 265 // Put call_data in place. | 269 // Put call_data in place. |
| 266 if (isolate->heap()->InNewSpace(*call_data_obj)) { | 270 if (isolate->heap()->InNewSpace(*call_data_obj)) { |
| 267 __ Move(call_data, api_call_info); | 271 __ Move(call_data, api_call_info); |
| 268 __ ldr(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset)); | 272 __ LoadP(call_data, |
| 273 FieldMemOperand(call_data, CallHandlerInfo::kDataOffset)); |
| 269 } else if (call_data_obj->IsUndefined()) { | 274 } else if (call_data_obj->IsUndefined()) { |
| 270 call_data_undefined = true; | 275 call_data_undefined = true; |
| 271 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); | 276 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); |
| 272 } else { | 277 } else { |
| 273 __ Move(call_data, call_data_obj); | 278 __ Move(call_data, call_data_obj); |
| 274 } | 279 } |
| 275 | 280 |
| 276 // Put api_function_address in place. | 281 // Put api_function_address in place. |
| 277 Address function_address = v8::ToCData<Address>(api_call_info->callback()); | 282 Address function_address = v8::ToCData<Address>(api_call_info->callback()); |
| 278 ApiFunction fun(function_address); | 283 ApiFunction fun(function_address); |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 318 | 323 |
| 319 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, | 324 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, |
| 320 Handle<Name> name) { | 325 Handle<Name> name) { |
| 321 if (!label->is_unused()) { | 326 if (!label->is_unused()) { |
| 322 __ bind(label); | 327 __ bind(label); |
| 323 __ mov(this->name(), Operand(name)); | 328 __ mov(this->name(), Operand(name)); |
| 324 } | 329 } |
| 325 } | 330 } |
| 326 | 331 |
| 327 | 332 |
| 328 // Generate StoreTransition code, value is passed in r0 register. | 333 // Generate StoreTransition code, value is passed in r3 register. |
| 329 // When leaving generated code after success, the receiver_reg and name_reg | 334 // When leaving generated code after success, the receiver_reg and name_reg |
| 330 // may be clobbered. Upon branch to miss_label, the receiver and name | 335 // may be clobbered. Upon branch to miss_label, the receiver and name |
| 331 // registers have their original values. | 336 // registers have their original values. |
| 332 void NamedStoreHandlerCompiler::GenerateStoreTransition( | 337 void NamedStoreHandlerCompiler::GenerateStoreTransition( |
| 333 Handle<Map> transition, Handle<Name> name, Register receiver_reg, | 338 Handle<Map> transition, Handle<Name> name, Register receiver_reg, |
| 334 Register storage_reg, Register value_reg, Register scratch1, | 339 Register storage_reg, Register value_reg, Register scratch1, |
| 335 Register scratch2, Register scratch3, Label* miss_label, Label* slow) { | 340 Register scratch2, Register scratch3, Label* miss_label, Label* slow) { |
| 336 // r0 : value | 341 // r3 : value |
| 337 Label exit; | 342 Label exit; |
| 338 | 343 |
| 339 int descriptor = transition->LastAdded(); | 344 int descriptor = transition->LastAdded(); |
| 340 DescriptorArray* descriptors = transition->instance_descriptors(); | 345 DescriptorArray* descriptors = transition->instance_descriptors(); |
| 341 PropertyDetails details = descriptors->GetDetails(descriptor); | 346 PropertyDetails details = descriptors->GetDetails(descriptor); |
| 342 Representation representation = details.representation(); | 347 Representation representation = details.representation(); |
| 343 DCHECK(!representation.IsNone()); | 348 DCHECK(!representation.IsNone()); |
| 344 | 349 |
| 345 if (details.type() == CONSTANT) { | 350 if (details.type() == CONSTANT) { |
| 346 Handle<Object> constant(descriptors->GetValue(descriptor), isolate()); | 351 Handle<Object> constant(descriptors->GetValue(descriptor), isolate()); |
| 347 __ Move(scratch1, constant); | 352 __ Move(scratch1, constant); |
| 348 __ cmp(value_reg, scratch1); | 353 __ cmp(value_reg, scratch1); |
| 349 __ b(ne, miss_label); | 354 __ bne(miss_label); |
| 350 } else if (representation.IsSmi()) { | 355 } else if (representation.IsSmi()) { |
| 351 __ JumpIfNotSmi(value_reg, miss_label); | 356 __ JumpIfNotSmi(value_reg, miss_label); |
| 352 } else if (representation.IsHeapObject()) { | 357 } else if (representation.IsHeapObject()) { |
| 353 __ JumpIfSmi(value_reg, miss_label); | 358 __ JumpIfSmi(value_reg, miss_label); |
| 354 HeapType* field_type = descriptors->GetFieldType(descriptor); | 359 HeapType* field_type = descriptors->GetFieldType(descriptor); |
| 355 HeapType::Iterator<Map> it = field_type->Classes(); | 360 HeapType::Iterator<Map> it = field_type->Classes(); |
| 356 if (!it.Done()) { | 361 if (!it.Done()) { |
| 357 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); | 362 __ LoadP(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 358 Label do_store; | 363 Label do_store; |
| 359 while (true) { | 364 while (true) { |
| 360 __ CompareMap(scratch1, it.Current(), &do_store); | 365 __ CompareMap(scratch1, it.Current(), &do_store); |
| 361 it.Advance(); | 366 it.Advance(); |
| 362 if (it.Done()) { | 367 if (it.Done()) { |
| 363 __ b(ne, miss_label); | 368 __ bne(miss_label); |
| 364 break; | 369 break; |
| 365 } | 370 } |
| 366 __ b(eq, &do_store); | 371 __ beq(&do_store); |
| 367 } | 372 } |
| 368 __ bind(&do_store); | 373 __ bind(&do_store); |
| 369 } | 374 } |
| 370 } else if (representation.IsDouble()) { | 375 } else if (representation.IsDouble()) { |
| 371 Label do_store, heap_number; | 376 Label do_store, heap_number; |
| 372 __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex); | 377 __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex); |
| 373 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow, | 378 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow, |
| 374 TAG_RESULT, MUTABLE); | 379 TAG_RESULT, MUTABLE); |
| 375 | 380 |
| 376 __ JumpIfNotSmi(value_reg, &heap_number); | 381 __ JumpIfNotSmi(value_reg, &heap_number); |
| 377 __ SmiUntag(scratch1, value_reg); | 382 __ SmiUntag(scratch1, value_reg); |
| 378 __ vmov(s0, scratch1); | 383 __ ConvertIntToDouble(scratch1, d0); |
| 379 __ vcvt_f64_s32(d0, s0); | |
| 380 __ jmp(&do_store); | 384 __ jmp(&do_store); |
| 381 | 385 |
| 382 __ bind(&heap_number); | 386 __ bind(&heap_number); |
| 383 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, miss_label, | 387 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, miss_label, |
| 384 DONT_DO_SMI_CHECK); | 388 DONT_DO_SMI_CHECK); |
| 385 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); | 389 __ lfd(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); |
| 386 | 390 |
| 387 __ bind(&do_store); | 391 __ bind(&do_store); |
| 388 __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); | 392 __ stfd(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); |
| 389 } | 393 } |
| 390 | 394 |
| 391 // Stub never generated for objects that require access checks. | 395 // Stub never generated for objects that require access checks. |
| 392 DCHECK(!transition->is_access_check_needed()); | 396 DCHECK(!transition->is_access_check_needed()); |
| 393 | 397 |
| 394 // Perform map transition for the receiver if necessary. | 398 // Perform map transition for the receiver if necessary. |
| 395 if (details.type() == FIELD && | 399 if (details.type() == FIELD && |
| 396 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { | 400 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { |
| 397 // The properties must be extended before we can store the value. | 401 // The properties must be extended before we can store the value. |
| 398 // We jump to a runtime call that extends the properties array. | 402 // We jump to a runtime call that extends the properties array. |
| 399 __ push(receiver_reg); | 403 __ push(receiver_reg); |
| 400 __ mov(r2, Operand(transition)); | 404 __ mov(r5, Operand(transition)); |
| 401 __ Push(r2, r0); | 405 __ Push(r5, r3); |
| 402 __ TailCallExternalReference( | 406 __ TailCallExternalReference( |
| 403 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), | 407 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), |
| 404 isolate()), | 408 isolate()), |
| 405 3, 1); | 409 3, 1); |
| 406 return; | 410 return; |
| 407 } | 411 } |
| 408 | 412 |
| 409 // Update the map of the object. | 413 // Update the map of the object. |
| 410 __ mov(scratch1, Operand(transition)); | 414 __ mov(scratch1, Operand(transition)); |
| 411 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset)); | 415 __ StoreP(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset), |
| 416 r0); |
| 412 | 417 |
| 413 // Update the write barrier for the map field. | 418 // Update the write barrier for the map field. |
| 414 __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2, | 419 __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2, |
| 415 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET, | 420 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET, |
| 416 OMIT_SMI_CHECK); | 421 OMIT_SMI_CHECK); |
| 417 | 422 |
| 418 if (details.type() == CONSTANT) { | 423 if (details.type() == CONSTANT) { |
| 419 DCHECK(value_reg.is(r0)); | 424 DCHECK(value_reg.is(r3)); |
| 420 __ Ret(); | 425 __ Ret(); |
| 421 return; | 426 return; |
| 422 } | 427 } |
| 423 | 428 |
| 424 int index = transition->instance_descriptors()->GetFieldIndex( | 429 int index = transition->instance_descriptors()->GetFieldIndex( |
| 425 transition->LastAdded()); | 430 transition->LastAdded()); |
| 426 | 431 |
| 427 // Adjust for the number of properties stored in the object. Even in the | 432 // Adjust for the number of properties stored in the object. Even in the |
| 428 // face of a transition we can use the old map here because the size of the | 433 // face of a transition we can use the old map here because the size of the |
| 429 // object and the number of in-object properties is not going to change. | 434 // object and the number of in-object properties is not going to change. |
| 430 index -= transition->inobject_properties(); | 435 index -= transition->inobject_properties(); |
| 431 | 436 |
| 432 // TODO(verwaest): Share this code as a code stub. | 437 // TODO(verwaest): Share this code as a code stub. |
| 433 SmiCheck smi_check = | 438 SmiCheck smi_check = |
| 434 representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; | 439 representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; |
| 435 if (index < 0) { | 440 if (index < 0) { |
| 436 // Set the property straight into the object. | 441 // Set the property straight into the object. |
| 437 int offset = transition->instance_size() + (index * kPointerSize); | 442 int offset = transition->instance_size() + (index * kPointerSize); |
| 438 if (representation.IsDouble()) { | 443 if (representation.IsDouble()) { |
| 439 __ str(storage_reg, FieldMemOperand(receiver_reg, offset)); | 444 __ StoreP(storage_reg, FieldMemOperand(receiver_reg, offset), r0); |
| 440 } else { | 445 } else { |
| 441 __ str(value_reg, FieldMemOperand(receiver_reg, offset)); | 446 __ StoreP(value_reg, FieldMemOperand(receiver_reg, offset), r0); |
| 442 } | 447 } |
| 443 | 448 |
| 444 if (!representation.IsSmi()) { | 449 if (!representation.IsSmi()) { |
| 445 // Update the write barrier for the array address. | 450 // Update the write barrier for the array address. |
| 446 if (!representation.IsDouble()) { | 451 if (!representation.IsDouble()) { |
| 447 __ mov(storage_reg, value_reg); | 452 __ mr(storage_reg, value_reg); |
| 448 } | 453 } |
| 449 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1, | 454 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1, |
| 450 kLRHasNotBeenSaved, kDontSaveFPRegs, | 455 kLRHasNotBeenSaved, kDontSaveFPRegs, |
| 451 EMIT_REMEMBERED_SET, smi_check); | 456 EMIT_REMEMBERED_SET, smi_check); |
| 452 } | 457 } |
| 453 } else { | 458 } else { |
| 454 // Write to the properties array. | 459 // Write to the properties array. |
| 455 int offset = index * kPointerSize + FixedArray::kHeaderSize; | 460 int offset = index * kPointerSize + FixedArray::kHeaderSize; |
| 456 // Get the properties array | 461 // Get the properties array |
| 457 __ ldr(scratch1, | 462 __ LoadP(scratch1, |
| 458 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); | 463 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); |
| 459 if (representation.IsDouble()) { | 464 if (representation.IsDouble()) { |
| 460 __ str(storage_reg, FieldMemOperand(scratch1, offset)); | 465 __ StoreP(storage_reg, FieldMemOperand(scratch1, offset), r0); |
| 461 } else { | 466 } else { |
| 462 __ str(value_reg, FieldMemOperand(scratch1, offset)); | 467 __ StoreP(value_reg, FieldMemOperand(scratch1, offset), r0); |
| 463 } | 468 } |
| 464 | 469 |
| 465 if (!representation.IsSmi()) { | 470 if (!representation.IsSmi()) { |
| 466 // Update the write barrier for the array address. | 471 // Update the write barrier for the array address. |
| 467 if (!representation.IsDouble()) { | 472 if (!representation.IsDouble()) { |
| 468 __ mov(storage_reg, value_reg); | 473 __ mr(storage_reg, value_reg); |
| 469 } | 474 } |
| 470 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg, | 475 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg, |
| 471 kLRHasNotBeenSaved, kDontSaveFPRegs, | 476 kLRHasNotBeenSaved, kDontSaveFPRegs, |
| 472 EMIT_REMEMBERED_SET, smi_check); | 477 EMIT_REMEMBERED_SET, smi_check); |
| 473 } | 478 } |
| 474 } | 479 } |
| 475 | 480 |
| 476 // Return the value (register r0). | 481 // Return the value (register r3). |
| 477 DCHECK(value_reg.is(r0)); | 482 DCHECK(value_reg.is(r3)); |
| 478 __ bind(&exit); | 483 __ bind(&exit); |
| 479 __ Ret(); | 484 __ Ret(); |
| 480 } | 485 } |
| 481 | 486 |
| 482 | 487 |
| 483 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup, | 488 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup, |
| 484 Register value_reg, | 489 Register value_reg, |
| 485 Label* miss_label) { | 490 Label* miss_label) { |
| 486 DCHECK(lookup->representation().IsHeapObject()); | 491 DCHECK(lookup->representation().IsHeapObject()); |
| 487 __ JumpIfSmi(value_reg, miss_label); | 492 __ JumpIfSmi(value_reg, miss_label); |
| 488 HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes(); | 493 HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes(); |
| 489 __ ldr(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset)); | 494 __ LoadP(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 490 Label do_store; | 495 Label do_store; |
| 491 while (true) { | 496 while (true) { |
| 492 __ CompareMap(scratch1(), it.Current(), &do_store); | 497 __ CompareMap(scratch1(), it.Current(), &do_store); |
| 493 it.Advance(); | 498 it.Advance(); |
| 494 if (it.Done()) { | 499 if (it.Done()) { |
| 495 __ b(ne, miss_label); | 500 __ bne(miss_label); |
| 496 break; | 501 break; |
| 497 } | 502 } |
| 498 __ b(eq, &do_store); | 503 __ beq(&do_store); |
| 499 } | 504 } |
| 500 __ bind(&do_store); | 505 __ bind(&do_store); |
| 501 | 506 |
| 502 StoreFieldStub stub(isolate(), lookup->GetFieldIndex(), | 507 StoreFieldStub stub(isolate(), lookup->GetFieldIndex(), |
| 503 lookup->representation()); | 508 lookup->representation()); |
| 504 GenerateTailCall(masm(), stub.GetCode()); | 509 GenerateTailCall(masm(), stub.GetCode()); |
| 505 } | 510 } |
| 506 | 511 |
| 507 | 512 |
| 508 Register PropertyHandlerCompiler::CheckPrototypes( | 513 Register PropertyHandlerCompiler::CheckPrototypes( |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 545 DCHECK(name->IsString()); | 550 DCHECK(name->IsString()); |
| 546 name = factory()->InternalizeString(Handle<String>::cast(name)); | 551 name = factory()->InternalizeString(Handle<String>::cast(name)); |
| 547 } | 552 } |
| 548 DCHECK(current.is_null() || | 553 DCHECK(current.is_null() || |
| 549 current->property_dictionary()->FindEntry(name) == | 554 current->property_dictionary()->FindEntry(name) == |
| 550 NameDictionary::kNotFound); | 555 NameDictionary::kNotFound); |
| 551 | 556 |
| 552 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, | 557 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, |
| 553 scratch2); | 558 scratch2); |
| 554 | 559 |
| 555 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); | 560 __ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 556 reg = holder_reg; // From now on the object will be in holder_reg. | 561 reg = holder_reg; // From now on the object will be in holder_reg. |
| 557 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); | 562 __ LoadP(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); |
| 558 } else { | 563 } else { |
| 559 Register map_reg = scratch1; | 564 Register map_reg = scratch1; |
| 560 if (depth != 1 || check == CHECK_ALL_MAPS) { | 565 if (depth != 1 || check == CHECK_ALL_MAPS) { |
| 561 // CheckMap implicitly loads the map of |reg| into |map_reg|. | 566 // CheckMap implicitly loads the map of |reg| into |map_reg|. |
| 562 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK); | 567 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK); |
| 563 } else { | 568 } else { |
| 564 __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); | 569 __ LoadP(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); |
| 565 } | 570 } |
| 566 | 571 |
| 567 // Check access rights to the global object. This has to happen after | 572 // Check access rights to the global object. This has to happen after |
| 568 // the map check so that we know that the object is actually a global | 573 // the map check so that we know that the object is actually a global |
| 569 // object. | 574 // object. |
| 570 // This allows us to install generated handlers for accesses to the | 575 // This allows us to install generated handlers for accesses to the |
| 571 // global proxy (as opposed to using slow ICs). See corresponding code | 576 // global proxy (as opposed to using slow ICs). See corresponding code |
| 572 // in LookupForRead(). | 577 // in LookupForRead(). |
| 573 if (current_map->IsJSGlobalProxyMap()) { | 578 if (current_map->IsJSGlobalProxyMap()) { |
| 574 __ CheckAccessGlobalProxy(reg, scratch2, miss); | 579 __ CheckAccessGlobalProxy(reg, scratch2, miss); |
| 575 } else if (current_map->IsJSGlobalObjectMap()) { | 580 } else if (current_map->IsJSGlobalObjectMap()) { |
| 576 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), | 581 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), |
| 577 name, scratch2, miss); | 582 name, scratch2, miss); |
| 578 } | 583 } |
| 579 | 584 |
| 580 reg = holder_reg; // From now on the object will be in holder_reg. | 585 reg = holder_reg; // From now on the object will be in holder_reg. |
| 581 | 586 |
| 582 // Two possible reasons for loading the prototype from the map: | 587 // Two possible reasons for loading the prototype from the map: |
| 583 // (1) Can't store references to new space in code. | 588 // (1) Can't store references to new space in code. |
| 584 // (2) Handler is shared for all receivers with the same prototype | 589 // (2) Handler is shared for all receivers with the same prototype |
| 585 // map (but not necessarily the same prototype instance). | 590 // map (but not necessarily the same prototype instance). |
| 586 bool load_prototype_from_map = | 591 bool load_prototype_from_map = |
| 587 heap()->InNewSpace(*prototype) || depth == 1; | 592 heap()->InNewSpace(*prototype) || depth == 1; |
| 588 if (load_prototype_from_map) { | 593 if (load_prototype_from_map) { |
| 589 __ ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); | 594 __ LoadP(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); |
| 590 } else { | 595 } else { |
| 591 __ mov(reg, Operand(prototype)); | 596 __ mov(reg, Operand(prototype)); |
| 592 } | 597 } |
| 593 } | 598 } |
| 594 | 599 |
| 595 // Go to the next object in the prototype chain. | 600 // Go to the next object in the prototype chain. |
| 596 current = prototype; | 601 current = prototype; |
| 597 current_map = handle(current->map()); | 602 current_map = handle(current->map()); |
| 598 } | 603 } |
| 599 | 604 |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 634 __ b(&success); | 639 __ b(&success); |
| 635 GenerateRestoreName(miss, name); | 640 GenerateRestoreName(miss, name); |
| 636 TailCallBuiltin(masm(), MissBuiltin(kind())); | 641 TailCallBuiltin(masm(), MissBuiltin(kind())); |
| 637 __ bind(&success); | 642 __ bind(&success); |
| 638 } | 643 } |
| 639 } | 644 } |
| 640 | 645 |
| 641 | 646 |
| 642 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { | 647 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { |
| 643 // Return the constant value. | 648 // Return the constant value. |
| 644 __ Move(r0, value); | 649 __ Move(r3, value); |
| 645 __ Ret(); | 650 __ Ret(); |
| 646 } | 651 } |
| 647 | 652 |
| 648 | 653 |
| 649 void NamedLoadHandlerCompiler::GenerateLoadCallback( | 654 void NamedLoadHandlerCompiler::GenerateLoadCallback( |
| 650 Register reg, Handle<ExecutableAccessorInfo> callback) { | 655 Register reg, Handle<ExecutableAccessorInfo> callback) { |
| 651 // Build AccessorInfo::args_ list on the stack and push property name below | 656 // Build AccessorInfo::args_ list on the stack and push property name below |
| 652 // the exit frame to make GC aware of them and store pointers to them. | 657 // the exit frame to make GC aware of them and store pointers to them. |
| 653 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); | 658 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); |
| 654 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); | 659 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); |
| 655 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); | 660 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); |
| 656 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); | 661 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); |
| 657 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); | 662 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); |
| 658 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); | 663 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); |
| 659 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); | 664 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); |
| 660 DCHECK(!scratch2().is(reg)); | 665 DCHECK(!scratch2().is(reg)); |
| 661 DCHECK(!scratch3().is(reg)); | 666 DCHECK(!scratch3().is(reg)); |
| 662 DCHECK(!scratch4().is(reg)); | 667 DCHECK(!scratch4().is(reg)); |
| 663 __ push(receiver()); | 668 __ push(receiver()); |
| 664 if (heap()->InNewSpace(callback->data())) { | 669 if (heap()->InNewSpace(callback->data())) { |
| 665 __ Move(scratch3(), callback); | 670 __ Move(scratch3(), callback); |
| 666 __ ldr(scratch3(), | 671 __ LoadP(scratch3(), |
| 667 FieldMemOperand(scratch3(), ExecutableAccessorInfo::kDataOffset)); | 672 FieldMemOperand(scratch3(), ExecutableAccessorInfo::kDataOffset)); |
| 668 } else { | 673 } else { |
| 669 __ Move(scratch3(), Handle<Object>(callback->data(), isolate())); | 674 __ Move(scratch3(), Handle<Object>(callback->data(), isolate())); |
| 670 } | 675 } |
| 671 __ push(scratch3()); | 676 __ push(scratch3()); |
| 672 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex); | 677 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex); |
| 673 __ mov(scratch4(), scratch3()); | 678 __ mr(scratch4(), scratch3()); |
| 674 __ Push(scratch3(), scratch4()); | 679 __ Push(scratch3(), scratch4()); |
| 675 __ mov(scratch4(), Operand(ExternalReference::isolate_address(isolate()))); | 680 __ mov(scratch4(), Operand(ExternalReference::isolate_address(isolate()))); |
| 676 __ Push(scratch4(), reg); | 681 __ Push(scratch4(), reg); |
| 677 __ mov(scratch2(), sp); // scratch2 = PropertyAccessorInfo::args_ | |
| 678 __ push(name()); | 682 __ push(name()); |
| 679 | 683 |
| 680 // Abi for CallApiGetter | 684 // Abi for CallApiGetter |
| 681 Register getter_address_reg = ApiGetterDescriptor::function_address(); | 685 Register getter_address_reg = ApiGetterDescriptor::function_address(); |
| 682 | 686 |
| 683 Address getter_address = v8::ToCData<Address>(callback->getter()); | 687 Address getter_address = v8::ToCData<Address>(callback->getter()); |
| 684 ApiFunction fun(getter_address); | 688 ApiFunction fun(getter_address); |
| 685 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL; | 689 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL; |
| 686 ExternalReference ref = ExternalReference(&fun, type, isolate()); | 690 ExternalReference ref = ExternalReference(&fun, type, isolate()); |
| 687 __ mov(getter_address_reg, Operand(ref)); | 691 __ mov(getter_address_reg, Operand(ref)); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 724 // interceptor's holder has been compiled before (see a caller | 728 // interceptor's holder has been compiled before (see a caller |
| 725 // of this method.) | 729 // of this method.) |
| 726 CompileCallLoadPropertyWithInterceptor( | 730 CompileCallLoadPropertyWithInterceptor( |
| 727 masm(), receiver(), holder_reg, this->name(), holder(), | 731 masm(), receiver(), holder_reg, this->name(), holder(), |
| 728 IC::kLoadPropertyWithInterceptorOnly); | 732 IC::kLoadPropertyWithInterceptorOnly); |
| 729 | 733 |
| 730 // Check if interceptor provided a value for property. If it's | 734 // Check if interceptor provided a value for property. If it's |
| 731 // the case, return immediately. | 735 // the case, return immediately. |
| 732 Label interceptor_failed; | 736 Label interceptor_failed; |
| 733 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex); | 737 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex); |
| 734 __ cmp(r0, scratch1()); | 738 __ cmp(r3, scratch1()); |
| 735 __ b(eq, &interceptor_failed); | 739 __ beq(&interceptor_failed); |
| 736 frame_scope.GenerateLeaveFrame(); | 740 frame_scope.GenerateLeaveFrame(); |
| 737 __ Ret(); | 741 __ Ret(); |
| 738 | 742 |
| 739 __ bind(&interceptor_failed); | 743 __ bind(&interceptor_failed); |
| 740 __ pop(this->name()); | 744 __ pop(this->name()); |
| 741 __ pop(holder_reg); | 745 __ pop(holder_reg); |
| 742 if (must_preserve_receiver_reg) { | 746 if (must_preserve_receiver_reg) { |
| 743 __ pop(receiver()); | 747 __ pop(receiver()); |
| 744 } | 748 } |
| 745 // Leave the internal frame. | 749 // Leave the internal frame. |
| (...skipping 15 matching lines...) Expand all Loading... |
| 761 __ TailCallExternalReference( | 765 __ TailCallExternalReference( |
| 762 ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1); | 766 ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1); |
| 763 } | 767 } |
| 764 | 768 |
| 765 | 769 |
| 766 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( | 770 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( |
| 767 Handle<JSObject> object, Handle<Name> name, | 771 Handle<JSObject> object, Handle<Name> name, |
| 768 Handle<ExecutableAccessorInfo> callback) { | 772 Handle<ExecutableAccessorInfo> callback) { |
| 769 Register holder_reg = Frontend(receiver(), name); | 773 Register holder_reg = Frontend(receiver(), name); |
| 770 | 774 |
| 771 __ push(receiver()); // receiver | 775 __ Push(receiver(), holder_reg); // receiver |
| 772 __ push(holder_reg); | 776 __ mov(ip, Operand(callback)); // callback info |
| 773 __ mov(ip, Operand(callback)); // callback info | |
| 774 __ push(ip); | 777 __ push(ip); |
| 775 __ mov(ip, Operand(name)); | 778 __ mov(ip, Operand(name)); |
| 776 __ Push(ip, value()); | 779 __ Push(ip, value()); |
| 777 | 780 |
| 778 // Do tail-call to the runtime system. | 781 // Do tail-call to the runtime system. |
| 779 ExternalReference store_callback_property = | 782 ExternalReference store_callback_property = |
| 780 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); | 783 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); |
| 781 __ TailCallExternalReference(store_callback_property, 5, 1); | 784 __ TailCallExternalReference(store_callback_property, 5, 1); |
| 782 | 785 |
| 783 // Return the generated code. | 786 // Return the generated code. |
| (...skipping 21 matching lines...) Expand all Loading... |
| 805 | 808 |
| 806 | 809 |
| 807 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( | 810 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( |
| 808 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { | 811 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { |
| 809 Label miss; | 812 Label miss; |
| 810 FrontendHeader(receiver(), name, &miss); | 813 FrontendHeader(receiver(), name, &miss); |
| 811 | 814 |
| 812 // Get the value from the cell. | 815 // Get the value from the cell. |
| 813 Register result = StoreDescriptor::ValueRegister(); | 816 Register result = StoreDescriptor::ValueRegister(); |
| 814 __ mov(result, Operand(cell)); | 817 __ mov(result, Operand(cell)); |
| 815 __ ldr(result, FieldMemOperand(result, Cell::kValueOffset)); | 818 __ LoadP(result, FieldMemOperand(result, Cell::kValueOffset)); |
| 816 | 819 |
| 817 // Check for deleted property if property can actually be deleted. | 820 // Check for deleted property if property can actually be deleted. |
| 818 if (is_configurable) { | 821 if (!is_configurable) { |
| 819 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); | 822 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); |
| 820 __ cmp(result, ip); | 823 __ cmp(result, ip); |
| 821 __ b(eq, &miss); | 824 __ beq(&miss); |
| 822 } | 825 } |
| 823 | 826 |
| 824 Counters* counters = isolate()->counters(); | 827 Counters* counters = isolate()->counters(); |
| 825 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3); | 828 __ IncrementCounter(counters->named_load_global_stub(), 1, r4, r6); |
| 826 __ Ret(); | 829 __ Ret(); |
| 827 | 830 |
| 828 FrontendFooter(name, &miss); | 831 FrontendFooter(name, &miss); |
| 829 | 832 |
| 830 // Return the generated code. | 833 // Return the generated code. |
| 831 return GetCode(kind(), Code::NORMAL, name); | 834 return GetCode(kind(), Code::NORMAL, name); |
| 832 } | 835 } |
| 833 | 836 |
| 834 | 837 |
| 835 #undef __ | 838 #undef __ |
| 836 } | 839 } |
| 837 } // namespace v8::internal | 840 } // namespace v8::internal |
| 838 | 841 |
| 839 #endif // V8_TARGET_ARCH_ARM | 842 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |