Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: src/ic/ppc/handler-compiler-ppc.cc

Issue 571173003: PowerPC specific sub-directories (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Remove IBM copyright, update code to later level Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_PPC
8 8
9 #include "src/ic/call-optimization.h" 9 #include "src/ic/call-optimization.h"
10 #include "src/ic/handler-compiler.h" 10 #include "src/ic/handler-compiler.h"
11 #include "src/ic/ic.h" 11 #include "src/ic/ic.h"
12 12
13 namespace v8 { 13 namespace v8 {
14 namespace internal { 14 namespace internal {
15 15
16 #define __ ACCESS_MASM(masm) 16 #define __ ACCESS_MASM(masm)
17 17
18 18
19 void NamedLoadHandlerCompiler::GenerateLoadViaGetter( 19 void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
20 MacroAssembler* masm, Handle<HeapType> type, Register receiver, 20 MacroAssembler* masm, Handle<HeapType> type, Register receiver,
21 Handle<JSFunction> getter) { 21 Handle<JSFunction> getter) {
22 // ----------- S t a t e ------------- 22 // ----------- S t a t e -------------
23 // -- r0 : receiver 23 // -- r3 : receiver
24 // -- r2 : name 24 // -- r5 : name
25 // -- lr : return address 25 // -- lr : return address
26 // ----------------------------------- 26 // -----------------------------------
27 { 27 {
28 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 28 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
29 29
30 if (!getter.is_null()) { 30 if (!getter.is_null()) {
31 // Call the JavaScript getter with the receiver on the stack. 31 // Call the JavaScript getter with the receiver on the stack.
32 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { 32 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
33 // Swap in the global receiver. 33 // Swap in the global receiver.
34 __ ldr(receiver, 34 __ LoadP(receiver,
35 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 35 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
36 } 36 }
37 __ push(receiver); 37 __ push(receiver);
38 ParameterCount actual(0); 38 ParameterCount actual(0);
39 ParameterCount expected(getter); 39 ParameterCount expected(getter);
40 __ InvokeFunction(getter, expected, actual, CALL_FUNCTION, 40 __ InvokeFunction(getter, expected, actual, CALL_FUNCTION,
41 NullCallWrapper()); 41 NullCallWrapper());
42 } else { 42 } else {
43 // If we generate a global code snippet for deoptimization only, remember 43 // If we generate a global code snippet for deoptimization only, remember
44 // the place to continue after deoptimization. 44 // the place to continue after deoptimization.
45 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset()); 45 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
46 } 46 }
47 47
48 // Restore context register. 48 // Restore context register.
49 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 49 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
50 } 50 }
51 __ Ret(); 51 __ Ret();
52 } 52 }
53 53
54 54
55 void NamedStoreHandlerCompiler::GenerateStoreViaSetter( 55 void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
56 MacroAssembler* masm, Handle<HeapType> type, Register receiver, 56 MacroAssembler* masm, Handle<HeapType> type, Register receiver,
57 Handle<JSFunction> setter) { 57 Handle<JSFunction> setter) {
58 // ----------- S t a t e ------------- 58 // ----------- S t a t e -------------
59 // -- lr : return address 59 // -- lr : return address
60 // ----------------------------------- 60 // -----------------------------------
61 { 61 {
62 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); 62 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
63 63
64 // Save value register, so we can restore it later. 64 // Save value register, so we can restore it later.
65 __ push(value()); 65 __ push(value());
66 66
67 if (!setter.is_null()) { 67 if (!setter.is_null()) {
68 // Call the JavaScript setter with receiver and value on the stack. 68 // Call the JavaScript setter with receiver and value on the stack.
69 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { 69 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
70 // Swap in the global receiver. 70 // Swap in the global receiver.
71 __ ldr(receiver, 71 __ LoadP(receiver,
72 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset)); 72 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
73 } 73 }
74 __ Push(receiver, value()); 74 __ Push(receiver, value());
75 ParameterCount actual(1); 75 ParameterCount actual(1);
76 ParameterCount expected(setter); 76 ParameterCount expected(setter);
77 __ InvokeFunction(setter, expected, actual, CALL_FUNCTION, 77 __ InvokeFunction(setter, expected, actual, CALL_FUNCTION,
78 NullCallWrapper()); 78 NullCallWrapper());
79 } else { 79 } else {
80 // If we generate a global code snippet for deoptimization only, remember 80 // If we generate a global code snippet for deoptimization only, remember
81 // the place to continue after deoptimization. 81 // the place to continue after deoptimization.
82 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 82 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
83 } 83 }
84 84
85 // We have to return the passed value, not the return value of the setter. 85 // We have to return the passed value, not the return value of the setter.
86 __ pop(r0); 86 __ pop(r3);
87 87
88 // Restore context register. 88 // Restore context register.
89 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 89 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
90 } 90 }
91 __ Ret(); 91 __ Ret();
92 } 92 }
93 93
94 94
95 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 95 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
96 MacroAssembler* masm, Label* miss_label, Register receiver, 96 MacroAssembler* masm, Label* miss_label, Register receiver,
97 Handle<Name> name, Register scratch0, Register scratch1) { 97 Handle<Name> name, Register scratch0, Register scratch1) {
98 DCHECK(name->IsUniqueName()); 98 DCHECK(name->IsUniqueName());
99 DCHECK(!receiver.is(scratch0)); 99 DCHECK(!receiver.is(scratch0));
100 Counters* counters = masm->isolate()->counters(); 100 Counters* counters = masm->isolate()->counters();
101 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 101 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
102 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 102 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
103 103
104 Label done; 104 Label done;
105 105
106 const int kInterceptorOrAccessCheckNeededMask = 106 const int kInterceptorOrAccessCheckNeededMask =
107 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 107 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
108 108
109 // Bail out if the receiver has a named interceptor or requires access checks. 109 // Bail out if the receiver has a named interceptor or requires access checks.
110 Register map = scratch1; 110 Register map = scratch1;
111 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); 111 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
112 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset)); 112 __ lbz(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
113 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask)); 113 __ andi(r0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
114 __ b(ne, miss_label); 114 __ bne(miss_label, cr0);
115 115
116 // Check that receiver is a JSObject. 116 // Check that receiver is a JSObject.
117 __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset)); 117 __ lbz(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
118 __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE)); 118 __ cmpi(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
119 __ b(lt, miss_label); 119 __ blt(miss_label);
120 120
121 // Load properties array. 121 // Load properties array.
122 Register properties = scratch0; 122 Register properties = scratch0;
123 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 123 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
124 // Check that the properties array is a dictionary. 124 // Check that the properties array is a dictionary.
125 __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset)); 125 __ LoadP(map, FieldMemOperand(properties, HeapObject::kMapOffset));
126 Register tmp = properties; 126 Register tmp = properties;
127 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex); 127 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
128 __ cmp(map, tmp); 128 __ cmp(map, tmp);
129 __ b(ne, miss_label); 129 __ bne(miss_label);
130 130
131 // Restore the temporarily used register. 131 // Restore the temporarily used register.
132 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 132 __ LoadP(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
133 133
134 134
135 NameDictionaryLookupStub::GenerateNegativeLookup( 135 NameDictionaryLookupStub::GenerateNegativeLookup(
136 masm, miss_label, &done, receiver, properties, name, scratch1); 136 masm, miss_label, &done, receiver, properties, name, scratch1);
137 __ bind(&done); 137 __ bind(&done);
138 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 138 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
139 } 139 }
140 140
141 141
142 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype( 142 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
143 MacroAssembler* masm, int index, Register prototype, Label* miss) { 143 MacroAssembler* masm, int index, Register prototype, Label* miss) {
144 Isolate* isolate = masm->isolate(); 144 Isolate* isolate = masm->isolate();
145 // Get the global function with the given index. 145 // Get the global function with the given index.
146 Handle<JSFunction> function( 146 Handle<JSFunction> function(
147 JSFunction::cast(isolate->native_context()->get(index))); 147 JSFunction::cast(isolate->native_context()->get(index)));
148 148
149 // Check we're still in the same context. 149 // Check we're still in the same context.
150 Register scratch = prototype; 150 Register scratch = prototype;
151 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX); 151 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
152 __ ldr(scratch, MemOperand(cp, offset)); 152 __ LoadP(scratch, MemOperand(cp, offset));
153 __ ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset)); 153 __ LoadP(scratch,
154 __ ldr(scratch, MemOperand(scratch, Context::SlotOffset(index))); 154 FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
155 __ LoadP(scratch, MemOperand(scratch, Context::SlotOffset(index)));
155 __ Move(ip, function); 156 __ Move(ip, function);
156 __ cmp(ip, scratch); 157 __ cmp(ip, scratch);
157 __ b(ne, miss); 158 __ bne(miss);
158 159
159 // Load its initial map. The global functions all have initial maps. 160 // Load its initial map. The global functions all have initial maps.
160 __ Move(prototype, Handle<Map>(function->initial_map())); 161 __ Move(prototype, Handle<Map>(function->initial_map()));
161 // Load the prototype from the initial map. 162 // Load the prototype from the initial map.
162 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset)); 163 __ LoadP(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
163 } 164 }
164 165
165 166
166 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype( 167 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
167 MacroAssembler* masm, Register receiver, Register scratch1, 168 MacroAssembler* masm, Register receiver, Register scratch1,
168 Register scratch2, Label* miss_label) { 169 Register scratch2, Label* miss_label) {
169 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 170 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
170 __ mov(r0, scratch1); 171 __ mr(r3, scratch1);
171 __ Ret(); 172 __ Ret();
172 } 173 }
173 174
174 175
175 // Generate code to check that a global property cell is empty. Create 176 // Generate code to check that a global property cell is empty. Create
176 // the property cell at compilation time if no cell exists for the 177 // the property cell at compilation time if no cell exists for the
177 // property. 178 // property.
178 void PropertyHandlerCompiler::GenerateCheckPropertyCell( 179 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
179 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 180 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
180 Register scratch, Label* miss) { 181 Register scratch, Label* miss) {
181 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); 182 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
182 DCHECK(cell->value()->IsTheHole()); 183 DCHECK(cell->value()->IsTheHole());
183 __ mov(scratch, Operand(cell)); 184 __ mov(scratch, Operand(cell));
184 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); 185 __ LoadP(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
185 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 186 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
186 __ cmp(scratch, ip); 187 __ cmp(scratch, ip);
187 __ b(ne, miss); 188 __ bne(miss);
188 } 189 }
189 190
190 191
191 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, 192 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
192 Register holder, Register name, 193 Register holder, Register name,
193 Handle<JSObject> holder_obj) { 194 Handle<JSObject> holder_obj) {
194 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 195 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
195 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); 196 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
196 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); 197 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
197 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); 198 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
(...skipping 28 matching lines...) Expand all
226 // Write the arguments to stack frame. 227 // Write the arguments to stack frame.
227 for (int i = 0; i < argc; i++) { 228 for (int i = 0; i < argc; i++) {
228 Register arg = values[argc - 1 - i]; 229 Register arg = values[argc - 1 - i];
229 DCHECK(!receiver.is(arg)); 230 DCHECK(!receiver.is(arg));
230 DCHECK(!scratch_in.is(arg)); 231 DCHECK(!scratch_in.is(arg));
231 __ push(arg); 232 __ push(arg);
232 } 233 }
233 DCHECK(optimization.is_simple_api_call()); 234 DCHECK(optimization.is_simple_api_call());
234 235
235 // Abi for CallApiFunctionStub. 236 // Abi for CallApiFunctionStub.
236 Register callee = r0; 237 Register callee = r3;
237 Register call_data = r4; 238 Register call_data = r7;
238 Register holder = r2; 239 Register holder = r5;
239 Register api_function_address = r1; 240 Register api_function_address = r4;
240 241
241 // Put holder in place. 242 // Put holder in place.
242 CallOptimization::HolderLookup holder_lookup; 243 CallOptimization::HolderLookup holder_lookup;
243 Handle<JSObject> api_holder = 244 Handle<JSObject> api_holder =
244 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup); 245 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
245 switch (holder_lookup) { 246 switch (holder_lookup) {
246 case CallOptimization::kHolderIsReceiver: 247 case CallOptimization::kHolderIsReceiver:
247 __ Move(holder, receiver); 248 __ Move(holder, receiver);
248 break; 249 break;
249 case CallOptimization::kHolderFound: 250 case CallOptimization::kHolderFound:
250 __ Move(holder, api_holder); 251 __ Move(holder, api_holder);
251 break; 252 break;
252 case CallOptimization::kHolderNotFound: 253 case CallOptimization::kHolderNotFound:
253 UNREACHABLE(); 254 UNREACHABLE();
254 break; 255 break;
255 } 256 }
256 257
257 Isolate* isolate = masm->isolate(); 258 Isolate* isolate = masm->isolate();
258 Handle<JSFunction> function = optimization.constant_function(); 259 Handle<JSFunction> function = optimization.constant_function();
259 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 260 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
260 Handle<Object> call_data_obj(api_call_info->data(), isolate); 261 Handle<Object> call_data_obj(api_call_info->data(), isolate);
261 262
262 // Put callee in place. 263 // Put callee in place.
263 __ Move(callee, function); 264 __ Move(callee, function);
264 265
265 bool call_data_undefined = false; 266 bool call_data_undefined = false;
266 // Put call_data in place. 267 // Put call_data in place.
267 if (isolate->heap()->InNewSpace(*call_data_obj)) { 268 if (isolate->heap()->InNewSpace(*call_data_obj)) {
268 __ Move(call_data, api_call_info); 269 __ Move(call_data, api_call_info);
269 __ ldr(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset)); 270 __ LoadP(call_data,
271 FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
270 } else if (call_data_obj->IsUndefined()) { 272 } else if (call_data_obj->IsUndefined()) {
271 call_data_undefined = true; 273 call_data_undefined = true;
272 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex); 274 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
273 } else { 275 } else {
274 __ Move(call_data, call_data_obj); 276 __ Move(call_data, call_data_obj);
275 } 277 }
276 278
277 // Put api_function_address in place. 279 // Put api_function_address in place.
278 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 280 Address function_address = v8::ToCData<Address>(api_call_info->callback());
279 ApiFunction fun(function_address); 281 ApiFunction fun(function_address);
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 321
320 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label, 322 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
321 Handle<Name> name) { 323 Handle<Name> name) {
322 if (!label->is_unused()) { 324 if (!label->is_unused()) {
323 __ bind(label); 325 __ bind(label);
324 __ mov(this->name(), Operand(name)); 326 __ mov(this->name(), Operand(name));
325 } 327 }
326 } 328 }
327 329
328 330
329 // Generate StoreTransition code, value is passed in r0 register. 331 void NamedStoreHandlerCompiler::GenerateRestoreNameAndMap(
330 // When leaving generated code after success, the receiver_reg and name_reg 332 Handle<Name> name, Handle<Map> transition) {
331 // may be clobbered. Upon branch to miss_label, the receiver and name 333 __ mov(this->name(), Operand(name));
332 // registers have their original values. 334 __ mov(StoreTransitionDescriptor::MapRegister(), Operand(transition));
333 void NamedStoreHandlerCompiler::GenerateStoreTransition(
334 Handle<Map> transition, Handle<Name> name, Register receiver_reg,
335 Register storage_reg, Register value_reg, Register scratch1,
336 Register scratch2, Register scratch3, Label* miss_label, Label* slow) {
337 // r0 : value
338 Label exit;
339
340 int descriptor = transition->LastAdded();
341 DescriptorArray* descriptors = transition->instance_descriptors();
342 PropertyDetails details = descriptors->GetDetails(descriptor);
343 Representation representation = details.representation();
344 DCHECK(!representation.IsNone());
345
346 if (details.type() == CONSTANT) {
347 Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
348 __ Move(scratch1, constant);
349 __ cmp(value_reg, scratch1);
350 __ b(ne, miss_label);
351 } else if (representation.IsSmi()) {
352 __ JumpIfNotSmi(value_reg, miss_label);
353 } else if (representation.IsHeapObject()) {
354 __ JumpIfSmi(value_reg, miss_label);
355 HeapType* field_type = descriptors->GetFieldType(descriptor);
356 HeapType::Iterator<Map> it = field_type->Classes();
357 if (!it.Done()) {
358 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
359 Label do_store;
360 while (true) {
361 __ CompareMap(scratch1, it.Current(), &do_store);
362 it.Advance();
363 if (it.Done()) {
364 __ b(ne, miss_label);
365 break;
366 }
367 __ b(eq, &do_store);
368 }
369 __ bind(&do_store);
370 }
371 } else if (representation.IsDouble()) {
372 Label do_store, heap_number;
373 __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex);
374 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow,
375 TAG_RESULT, MUTABLE);
376
377 __ JumpIfNotSmi(value_reg, &heap_number);
378 __ SmiUntag(scratch1, value_reg);
379 __ vmov(s0, scratch1);
380 __ vcvt_f64_s32(d0, s0);
381 __ jmp(&do_store);
382
383 __ bind(&heap_number);
384 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, miss_label,
385 DONT_DO_SMI_CHECK);
386 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
387
388 __ bind(&do_store);
389 __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
390 }
391
392 // Stub never generated for objects that require access checks.
393 DCHECK(!transition->is_access_check_needed());
394
395 // Perform map transition for the receiver if necessary.
396 if (details.type() == FIELD &&
397 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
398 // The properties must be extended before we can store the value.
399 // We jump to a runtime call that extends the properties array.
400 __ push(receiver_reg);
401 __ mov(r2, Operand(transition));
402 __ Push(r2, r0);
403 __ TailCallExternalReference(
404 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
405 isolate()),
406 3, 1);
407 return;
408 }
409
410 // Update the map of the object.
411 __ mov(scratch1, Operand(transition));
412 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
413
414 // Update the write barrier for the map field.
415 __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, scratch2,
416 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
417 OMIT_SMI_CHECK);
418
419 if (details.type() == CONSTANT) {
420 DCHECK(value_reg.is(r0));
421 __ Ret();
422 return;
423 }
424
425 int index = transition->instance_descriptors()->GetFieldIndex(
426 transition->LastAdded());
427
428 // Adjust for the number of properties stored in the object. Even in the
429 // face of a transition we can use the old map here because the size of the
430 // object and the number of in-object properties is not going to change.
431 index -= transition->inobject_properties();
432
433 // TODO(verwaest): Share this code as a code stub.
434 SmiCheck smi_check =
435 representation.IsTagged() ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
436 if (index < 0) {
437 // Set the property straight into the object.
438 int offset = transition->instance_size() + (index * kPointerSize);
439 if (representation.IsDouble()) {
440 __ str(storage_reg, FieldMemOperand(receiver_reg, offset));
441 } else {
442 __ str(value_reg, FieldMemOperand(receiver_reg, offset));
443 }
444
445 if (!representation.IsSmi()) {
446 // Update the write barrier for the array address.
447 if (!representation.IsDouble()) {
448 __ mov(storage_reg, value_reg);
449 }
450 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1,
451 kLRHasNotBeenSaved, kDontSaveFPRegs,
452 EMIT_REMEMBERED_SET, smi_check);
453 }
454 } else {
455 // Write to the properties array.
456 int offset = index * kPointerSize + FixedArray::kHeaderSize;
457 // Get the properties array
458 __ ldr(scratch1,
459 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
460 if (representation.IsDouble()) {
461 __ str(storage_reg, FieldMemOperand(scratch1, offset));
462 } else {
463 __ str(value_reg, FieldMemOperand(scratch1, offset));
464 }
465
466 if (!representation.IsSmi()) {
467 // Update the write barrier for the array address.
468 if (!representation.IsDouble()) {
469 __ mov(storage_reg, value_reg);
470 }
471 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg,
472 kLRHasNotBeenSaved, kDontSaveFPRegs,
473 EMIT_REMEMBERED_SET, smi_check);
474 }
475 }
476
477 // Return the value (register r0).
478 DCHECK(value_reg.is(r0));
479 __ bind(&exit);
480 __ Ret();
481 } 335 }
482 336
483 337
484 void NamedStoreHandlerCompiler::GenerateStoreField(LookupIterator* lookup, 338 void NamedStoreHandlerCompiler::GenerateConstantCheck(Object* constant,
485 Register value_reg, 339 Register value_reg,
486 Label* miss_label) { 340 Label* miss_label) {
487 DCHECK(lookup->representation().IsHeapObject()); 341 __ Move(scratch1(), handle(constant, isolate()));
488 __ JumpIfSmi(value_reg, miss_label); 342 __ cmp(value_reg, scratch1());
489 HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes(); 343 __ bne(miss_label);
490 __ ldr(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset));
491 Label do_store;
492 while (true) {
493 __ CompareMap(scratch1(), it.Current(), &do_store);
494 it.Advance();
495 if (it.Done()) {
496 __ b(ne, miss_label);
497 break;
498 }
499 __ b(eq, &do_store);
500 }
501 __ bind(&do_store);
502
503 StoreFieldStub stub(isolate(), lookup->GetFieldIndex(),
504 lookup->representation());
505 GenerateTailCall(masm(), stub.GetCode());
506 } 344 }
507 345
508 346
347 void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(HeapType* field_type,
348 Register value_reg,
349 Label* miss_label) {
350 __ JumpIfSmi(value_reg, miss_label);
351 HeapType::Iterator<Map> it = field_type->Classes();
352 if (!it.Done()) {
353 __ LoadP(scratch1(), FieldMemOperand(value_reg, HeapObject::kMapOffset));
354 Label do_store;
355 while (true) {
356 __ CompareMap(scratch1(), it.Current(), &do_store);
357 it.Advance();
358 if (it.Done()) {
359 __ bne(miss_label);
360 break;
361 }
362 __ beq(&do_store);
363 }
364 __ bind(&do_store);
365 }
366 }
367
368
509 Register PropertyHandlerCompiler::CheckPrototypes( 369 Register PropertyHandlerCompiler::CheckPrototypes(
510 Register object_reg, Register holder_reg, Register scratch1, 370 Register object_reg, Register holder_reg, Register scratch1,
511 Register scratch2, Handle<Name> name, Label* miss, 371 Register scratch2, Handle<Name> name, Label* miss,
512 PrototypeCheckType check) { 372 PrototypeCheckType check) {
513 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); 373 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
514 374
515 // Make sure there's no overlap between holder and object registers. 375 // Make sure there's no overlap between holder and object registers.
516 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 376 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
517 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) && 377 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
518 !scratch2.is(scratch1)); 378 !scratch2.is(scratch1));
(...skipping 27 matching lines...) Expand all
546 DCHECK(name->IsString()); 406 DCHECK(name->IsString());
547 name = factory()->InternalizeString(Handle<String>::cast(name)); 407 name = factory()->InternalizeString(Handle<String>::cast(name));
548 } 408 }
549 DCHECK(current.is_null() || 409 DCHECK(current.is_null() ||
550 current->property_dictionary()->FindEntry(name) == 410 current->property_dictionary()->FindEntry(name) ==
551 NameDictionary::kNotFound); 411 NameDictionary::kNotFound);
552 412
553 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, 413 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
554 scratch2); 414 scratch2);
555 415
556 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 416 __ LoadP(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
557 reg = holder_reg; // From now on the object will be in holder_reg. 417 reg = holder_reg; // From now on the object will be in holder_reg.
558 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); 418 __ LoadP(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
559 } else { 419 } else {
560 Register map_reg = scratch1; 420 Register map_reg = scratch1;
561 if (depth != 1 || check == CHECK_ALL_MAPS) { 421 if (depth != 1 || check == CHECK_ALL_MAPS) {
562 // CheckMap implicitly loads the map of |reg| into |map_reg|. 422 // CheckMap implicitly loads the map of |reg| into |map_reg|.
563 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK); 423 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
564 } else { 424 } else {
565 __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset)); 425 __ LoadP(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
566 } 426 }
567 427
568 // Check access rights to the global object. This has to happen after 428 // Check access rights to the global object. This has to happen after
569 // the map check so that we know that the object is actually a global 429 // the map check so that we know that the object is actually a global
570 // object. 430 // object.
571 // This allows us to install generated handlers for accesses to the 431 // This allows us to install generated handlers for accesses to the
572 // global proxy (as opposed to using slow ICs). See corresponding code 432 // global proxy (as opposed to using slow ICs). See corresponding code
573 // in LookupForRead(). 433 // in LookupForRead().
574 if (current_map->IsJSGlobalProxyMap()) { 434 if (current_map->IsJSGlobalProxyMap()) {
575 __ CheckAccessGlobalProxy(reg, scratch2, miss); 435 __ CheckAccessGlobalProxy(reg, scratch2, miss);
576 } else if (current_map->IsJSGlobalObjectMap()) { 436 } else if (current_map->IsJSGlobalObjectMap()) {
577 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), 437 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
578 name, scratch2, miss); 438 name, scratch2, miss);
579 } 439 }
580 440
581 reg = holder_reg; // From now on the object will be in holder_reg. 441 reg = holder_reg; // From now on the object will be in holder_reg.
582 442
583 // Two possible reasons for loading the prototype from the map: 443 // Two possible reasons for loading the prototype from the map:
584 // (1) Can't store references to new space in code. 444 // (1) Can't store references to new space in code.
585 // (2) Handler is shared for all receivers with the same prototype 445 // (2) Handler is shared for all receivers with the same prototype
586 // map (but not necessarily the same prototype instance). 446 // map (but not necessarily the same prototype instance).
587 bool load_prototype_from_map = 447 bool load_prototype_from_map =
588 heap()->InNewSpace(*prototype) || depth == 1; 448 heap()->InNewSpace(*prototype) || depth == 1;
589 if (load_prototype_from_map) { 449 if (load_prototype_from_map) {
590 __ ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset)); 450 __ LoadP(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
591 } else { 451 } else {
592 __ mov(reg, Operand(prototype)); 452 __ mov(reg, Operand(prototype));
593 } 453 }
594 } 454 }
595 455
596 // Go to the next object in the prototype chain. 456 // Go to the next object in the prototype chain.
597 current = prototype; 457 current = prototype;
598 current_map = handle(current->map()); 458 current_map = handle(current->map());
599 } 459 }
600 460
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
635 __ b(&success); 495 __ b(&success);
636 GenerateRestoreName(miss, name); 496 GenerateRestoreName(miss, name);
637 TailCallBuiltin(masm(), MissBuiltin(kind())); 497 TailCallBuiltin(masm(), MissBuiltin(kind()));
638 __ bind(&success); 498 __ bind(&success);
639 } 499 }
640 } 500 }
641 501
642 502
643 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { 503 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
644 // Return the constant value. 504 // Return the constant value.
645 __ Move(r0, value); 505 __ Move(r3, value);
646 __ Ret(); 506 __ Ret();
647 } 507 }
648 508
649 509
650 void NamedLoadHandlerCompiler::GenerateLoadCallback( 510 void NamedLoadHandlerCompiler::GenerateLoadCallback(
651 Register reg, Handle<ExecutableAccessorInfo> callback) { 511 Register reg, Handle<ExecutableAccessorInfo> callback) {
652 // Build AccessorInfo::args_ list on the stack and push property name below 512 // Build AccessorInfo::args_ list on the stack and push property name below
653 // the exit frame to make GC aware of them and store pointers to them. 513 // the exit frame to make GC aware of them and store pointers to them.
654 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); 514 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
655 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); 515 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
656 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); 516 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
657 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); 517 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
658 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); 518 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
659 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); 519 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
660 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); 520 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
661 DCHECK(!scratch2().is(reg)); 521 DCHECK(!scratch2().is(reg));
662 DCHECK(!scratch3().is(reg)); 522 DCHECK(!scratch3().is(reg));
663 DCHECK(!scratch4().is(reg)); 523 DCHECK(!scratch4().is(reg));
664 __ push(receiver()); 524 __ push(receiver());
665 if (heap()->InNewSpace(callback->data())) { 525 if (heap()->InNewSpace(callback->data())) {
666 __ Move(scratch3(), callback); 526 __ Move(scratch3(), callback);
667 __ ldr(scratch3(), 527 __ LoadP(scratch3(),
668 FieldMemOperand(scratch3(), ExecutableAccessorInfo::kDataOffset)); 528 FieldMemOperand(scratch3(), ExecutableAccessorInfo::kDataOffset));
669 } else { 529 } else {
670 __ Move(scratch3(), Handle<Object>(callback->data(), isolate())); 530 __ Move(scratch3(), Handle<Object>(callback->data(), isolate()));
671 } 531 }
672 __ push(scratch3()); 532 __ push(scratch3());
673 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex); 533 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
674 __ mov(scratch4(), scratch3()); 534 __ mr(scratch4(), scratch3());
675 __ Push(scratch3(), scratch4()); 535 __ Push(scratch3(), scratch4());
676 __ mov(scratch4(), Operand(ExternalReference::isolate_address(isolate()))); 536 __ mov(scratch4(), Operand(ExternalReference::isolate_address(isolate())));
677 __ Push(scratch4(), reg); 537 __ Push(scratch4(), reg);
678 __ mov(scratch2(), sp); // scratch2 = PropertyAccessorInfo::args_
679 __ push(name()); 538 __ push(name());
680 539
681 // Abi for CallApiGetter 540 // Abi for CallApiGetter
682 Register getter_address_reg = ApiGetterDescriptor::function_address(); 541 Register getter_address_reg = ApiGetterDescriptor::function_address();
683 542
684 Address getter_address = v8::ToCData<Address>(callback->getter()); 543 Address getter_address = v8::ToCData<Address>(callback->getter());
685 ApiFunction fun(getter_address); 544 ApiFunction fun(getter_address);
686 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL; 545 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
687 ExternalReference ref = ExternalReference(&fun, type, isolate()); 546 ExternalReference ref = ExternalReference(&fun, type, isolate());
688 __ mov(getter_address_reg, Operand(ref)); 547 __ mov(getter_address_reg, Operand(ref));
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
725 // interceptor's holder has been compiled before (see a caller 584 // interceptor's holder has been compiled before (see a caller
726 // of this method.) 585 // of this method.)
727 CompileCallLoadPropertyWithInterceptor( 586 CompileCallLoadPropertyWithInterceptor(
728 masm(), receiver(), holder_reg, this->name(), holder(), 587 masm(), receiver(), holder_reg, this->name(), holder(),
729 IC::kLoadPropertyWithInterceptorOnly); 588 IC::kLoadPropertyWithInterceptorOnly);
730 589
731 // Check if interceptor provided a value for property. If it's 590 // Check if interceptor provided a value for property. If it's
732 // the case, return immediately. 591 // the case, return immediately.
733 Label interceptor_failed; 592 Label interceptor_failed;
734 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex); 593 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
735 __ cmp(r0, scratch1()); 594 __ cmp(r3, scratch1());
736 __ b(eq, &interceptor_failed); 595 __ beq(&interceptor_failed);
737 frame_scope.GenerateLeaveFrame(); 596 frame_scope.GenerateLeaveFrame();
738 __ Ret(); 597 __ Ret();
739 598
740 __ bind(&interceptor_failed); 599 __ bind(&interceptor_failed);
741 __ pop(this->name()); 600 __ pop(this->name());
742 __ pop(holder_reg); 601 __ pop(holder_reg);
743 if (must_preserve_receiver_reg) { 602 if (must_preserve_receiver_reg) {
744 __ pop(receiver()); 603 __ pop(receiver());
745 } 604 }
746 // Leave the internal frame. 605 // Leave the internal frame.
(...skipping 15 matching lines...) Expand all
762 __ TailCallExternalReference( 621 __ TailCallExternalReference(
763 ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1); 622 ref, NamedLoadHandlerCompiler::kInterceptorArgsLength, 1);
764 } 623 }
765 624
766 625
767 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( 626 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
768 Handle<JSObject> object, Handle<Name> name, 627 Handle<JSObject> object, Handle<Name> name,
769 Handle<ExecutableAccessorInfo> callback) { 628 Handle<ExecutableAccessorInfo> callback) {
770 Register holder_reg = Frontend(receiver(), name); 629 Register holder_reg = Frontend(receiver(), name);
771 630
772 __ push(receiver()); // receiver 631 __ Push(receiver(), holder_reg); // receiver
773 __ push(holder_reg); 632 __ mov(ip, Operand(callback)); // callback info
774 __ mov(ip, Operand(callback)); // callback info
775 __ push(ip); 633 __ push(ip);
776 __ mov(ip, Operand(name)); 634 __ mov(ip, Operand(name));
777 __ Push(ip, value()); 635 __ Push(ip, value());
778 636
779 // Do tail-call to the runtime system. 637 // Do tail-call to the runtime system.
780 ExternalReference store_callback_property = 638 ExternalReference store_callback_property =
781 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); 639 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
782 __ TailCallExternalReference(store_callback_property, 5, 1); 640 __ TailCallExternalReference(store_callback_property, 5, 1);
783 641
784 // Return the generated code. 642 // Return the generated code.
(...skipping 21 matching lines...) Expand all
806 664
807 665
808 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( 666 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
809 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) { 667 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
810 Label miss; 668 Label miss;
811 FrontendHeader(receiver(), name, &miss); 669 FrontendHeader(receiver(), name, &miss);
812 670
813 // Get the value from the cell. 671 // Get the value from the cell.
814 Register result = StoreDescriptor::ValueRegister(); 672 Register result = StoreDescriptor::ValueRegister();
815 __ mov(result, Operand(cell)); 673 __ mov(result, Operand(cell));
816 __ ldr(result, FieldMemOperand(result, Cell::kValueOffset)); 674 __ LoadP(result, FieldMemOperand(result, Cell::kValueOffset));
817 675
818 // Check for deleted property if property can actually be deleted. 676 // Check for deleted property if property can actually be deleted.
819 if (is_configurable) { 677 if (is_configurable) {
820 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 678 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
821 __ cmp(result, ip); 679 __ cmp(result, ip);
822 __ b(eq, &miss); 680 __ beq(&miss);
823 } 681 }
824 682
825 Counters* counters = isolate()->counters(); 683 Counters* counters = isolate()->counters();
826 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3); 684 __ IncrementCounter(counters->named_load_global_stub(), 1, r4, r6);
827 __ Ret(); 685 __ Ret();
828 686
829 FrontendFooter(name, &miss); 687 FrontendFooter(name, &miss);
830 688
831 // Return the generated code. 689 // Return the generated code.
832 return GetCode(kind(), Code::NORMAL, name); 690 return GetCode(kind(), Code::NORMAL, name);
833 } 691 }
834 692
835 693
836 #undef __ 694 #undef __
837 } 695 }
838 } // namespace v8::internal 696 } // namespace v8::internal
839 697
840 #endif // V8_TARGET_ARCH_ARM 698 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698