Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(223)

Side by Side Diff: src/arm/stub-cache-arm.cc

Issue 430503007: Rename ASSERT* to DCHECK*. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: REBASE and fixes Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/simulator-arm.cc ('k') | src/arm64/assembler-arm64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #if V8_TARGET_ARCH_ARM 7 #if V8_TARGET_ARCH_ARM
8 8
9 #include "src/codegen.h" 9 #include "src/codegen.h"
10 #include "src/ic-inl.h" 10 #include "src/ic-inl.h"
(...skipping 18 matching lines...) Expand all
29 Register offset_scratch) { 29 Register offset_scratch) {
30 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 30 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
31 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); 31 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
32 ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); 32 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
33 33
34 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address()); 34 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
35 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address()); 35 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
36 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address()); 36 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
37 37
38 // Check the relative positions of the address fields. 38 // Check the relative positions of the address fields.
39 ASSERT(value_off_addr > key_off_addr); 39 DCHECK(value_off_addr > key_off_addr);
40 ASSERT((value_off_addr - key_off_addr) % 4 == 0); 40 DCHECK((value_off_addr - key_off_addr) % 4 == 0);
41 ASSERT((value_off_addr - key_off_addr) < (256 * 4)); 41 DCHECK((value_off_addr - key_off_addr) < (256 * 4));
42 ASSERT(map_off_addr > key_off_addr); 42 DCHECK(map_off_addr > key_off_addr);
43 ASSERT((map_off_addr - key_off_addr) % 4 == 0); 43 DCHECK((map_off_addr - key_off_addr) % 4 == 0);
44 ASSERT((map_off_addr - key_off_addr) < (256 * 4)); 44 DCHECK((map_off_addr - key_off_addr) < (256 * 4));
45 45
46 Label miss; 46 Label miss;
47 Register base_addr = scratch; 47 Register base_addr = scratch;
48 scratch = no_reg; 48 scratch = no_reg;
49 49
50 // Multiply by 3 because there are 3 fields per entry (name, code, map). 50 // Multiply by 3 because there are 3 fields per entry (name, code, map).
51 __ add(offset_scratch, offset, Operand(offset, LSL, 1)); 51 __ add(offset_scratch, offset, Operand(offset, LSL, 1));
52 52
53 // Calculate the base address of the entry. 53 // Calculate the base address of the entry.
54 __ mov(base_addr, Operand(key_offset)); 54 __ mov(base_addr, Operand(key_offset));
(...skipping 15 matching lines...) Expand all
70 scratch2 = no_reg; 70 scratch2 = no_reg;
71 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr)); 71 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
72 72
73 // Check that the flags match what we're looking for. 73 // Check that the flags match what we're looking for.
74 Register flags_reg = base_addr; 74 Register flags_reg = base_addr;
75 base_addr = no_reg; 75 base_addr = no_reg;
76 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset)); 76 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
77 // It's a nice optimization if this constant is encodable in the bic insn. 77 // It's a nice optimization if this constant is encodable in the bic insn.
78 78
79 uint32_t mask = Code::kFlagsNotUsedInLookup; 79 uint32_t mask = Code::kFlagsNotUsedInLookup;
80 ASSERT(__ ImmediateFitsAddrMode1Instruction(mask)); 80 DCHECK(__ ImmediateFitsAddrMode1Instruction(mask));
81 __ bic(flags_reg, flags_reg, Operand(mask)); 81 __ bic(flags_reg, flags_reg, Operand(mask));
82 __ cmp(flags_reg, Operand(flags)); 82 __ cmp(flags_reg, Operand(flags));
83 __ b(ne, &miss); 83 __ b(ne, &miss);
84 84
85 #ifdef DEBUG 85 #ifdef DEBUG
86 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 86 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
87 __ jmp(&miss); 87 __ jmp(&miss);
88 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 88 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
89 __ jmp(&miss); 89 __ jmp(&miss);
90 } 90 }
91 #endif 91 #endif
92 92
93 // Jump to the first instruction in the code stub. 93 // Jump to the first instruction in the code stub.
94 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag)); 94 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
95 95
96 // Miss: fall through. 96 // Miss: fall through.
97 __ bind(&miss); 97 __ bind(&miss);
98 } 98 }
99 99
100 100
101 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup( 101 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
102 MacroAssembler* masm, Label* miss_label, Register receiver, 102 MacroAssembler* masm, Label* miss_label, Register receiver,
103 Handle<Name> name, Register scratch0, Register scratch1) { 103 Handle<Name> name, Register scratch0, Register scratch1) {
104 ASSERT(name->IsUniqueName()); 104 DCHECK(name->IsUniqueName());
105 ASSERT(!receiver.is(scratch0)); 105 DCHECK(!receiver.is(scratch0));
106 Counters* counters = masm->isolate()->counters(); 106 Counters* counters = masm->isolate()->counters();
107 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1); 107 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
108 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1); 108 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
109 109
110 Label done; 110 Label done;
111 111
112 const int kInterceptorOrAccessCheckNeededMask = 112 const int kInterceptorOrAccessCheckNeededMask =
113 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 113 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
114 114
115 // Bail out if the receiver has a named interceptor or requires access checks. 115 // Bail out if the receiver has a named interceptor or requires access checks.
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
156 Register name, 156 Register name,
157 Register scratch, 157 Register scratch,
158 Register extra, 158 Register extra,
159 Register extra2, 159 Register extra2,
160 Register extra3) { 160 Register extra3) {
161 Isolate* isolate = masm->isolate(); 161 Isolate* isolate = masm->isolate();
162 Label miss; 162 Label miss;
163 163
164 // Make sure that code is valid. The multiplying code relies on the 164 // Make sure that code is valid. The multiplying code relies on the
165 // entry size being 12. 165 // entry size being 12.
166 ASSERT(sizeof(Entry) == 12); 166 DCHECK(sizeof(Entry) == 12);
167 167
168 // Make sure the flags does not name a specific type. 168 // Make sure the flags does not name a specific type.
169 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 169 DCHECK(Code::ExtractTypeFromFlags(flags) == 0);
170 170
171 // Make sure that there are no register conflicts. 171 // Make sure that there are no register conflicts.
172 ASSERT(!scratch.is(receiver)); 172 DCHECK(!scratch.is(receiver));
173 ASSERT(!scratch.is(name)); 173 DCHECK(!scratch.is(name));
174 ASSERT(!extra.is(receiver)); 174 DCHECK(!extra.is(receiver));
175 ASSERT(!extra.is(name)); 175 DCHECK(!extra.is(name));
176 ASSERT(!extra.is(scratch)); 176 DCHECK(!extra.is(scratch));
177 ASSERT(!extra2.is(receiver)); 177 DCHECK(!extra2.is(receiver));
178 ASSERT(!extra2.is(name)); 178 DCHECK(!extra2.is(name));
179 ASSERT(!extra2.is(scratch)); 179 DCHECK(!extra2.is(scratch));
180 ASSERT(!extra2.is(extra)); 180 DCHECK(!extra2.is(extra));
181 181
182 // Check scratch, extra and extra2 registers are valid. 182 // Check scratch, extra and extra2 registers are valid.
183 ASSERT(!scratch.is(no_reg)); 183 DCHECK(!scratch.is(no_reg));
184 ASSERT(!extra.is(no_reg)); 184 DCHECK(!extra.is(no_reg));
185 ASSERT(!extra2.is(no_reg)); 185 DCHECK(!extra2.is(no_reg));
186 ASSERT(!extra3.is(no_reg)); 186 DCHECK(!extra3.is(no_reg));
187 187
188 Counters* counters = masm->isolate()->counters(); 188 Counters* counters = masm->isolate()->counters();
189 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1, 189 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
190 extra2, extra3); 190 extra2, extra3);
191 191
192 // Check that the receiver isn't a smi. 192 // Check that the receiver isn't a smi.
193 __ JumpIfSmi(receiver, &miss); 193 __ JumpIfSmi(receiver, &miss);
194 194
195 // Get the map of the receiver and compute the hash. 195 // Get the map of the receiver and compute the hash.
196 __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset)); 196 __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
277 } 277 }
278 278
279 279
280 // Generate code to check that a global property cell is empty. Create 280 // Generate code to check that a global property cell is empty. Create
281 // the property cell at compilation time if no cell exists for the 281 // the property cell at compilation time if no cell exists for the
282 // property. 282 // property.
283 void PropertyHandlerCompiler::GenerateCheckPropertyCell( 283 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
284 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, 284 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
285 Register scratch, Label* miss) { 285 Register scratch, Label* miss) {
286 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name); 286 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
287 ASSERT(cell->value()->IsTheHole()); 287 DCHECK(cell->value()->IsTheHole());
288 __ mov(scratch, Operand(cell)); 288 __ mov(scratch, Operand(cell));
289 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); 289 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
290 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 290 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
291 __ cmp(scratch, ip); 291 __ cmp(scratch, ip);
292 __ b(ne, miss); 292 __ b(ne, miss);
293 } 293 }
294 294
295 295
296 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver, 296 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
297 Register holder, Register name, 297 Register holder, Register name,
298 Handle<JSObject> holder_obj) { 298 Handle<JSObject> holder_obj) {
299 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0); 299 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
300 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1); 300 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsInfoIndex == 1);
301 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2); 301 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 2);
302 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3); 302 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 3);
303 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4); 303 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 4);
304 __ push(name); 304 __ push(name);
305 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); 305 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
306 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); 306 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
307 Register scratch = name; 307 Register scratch = name;
308 __ mov(scratch, Operand(interceptor)); 308 __ mov(scratch, Operand(interceptor));
309 __ push(scratch); 309 __ push(scratch);
310 __ push(receiver); 310 __ push(receiver);
311 __ push(holder); 311 __ push(holder);
312 } 312 }
313 313
314 314
315 static void CompileCallLoadPropertyWithInterceptor( 315 static void CompileCallLoadPropertyWithInterceptor(
316 MacroAssembler* masm, Register receiver, Register holder, Register name, 316 MacroAssembler* masm, Register receiver, Register holder, Register name,
317 Handle<JSObject> holder_obj, IC::UtilityId id) { 317 Handle<JSObject> holder_obj, IC::UtilityId id) {
318 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 318 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
319 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()), 319 __ CallExternalReference(ExternalReference(IC_Utility(id), masm->isolate()),
320 NamedLoadHandlerCompiler::kInterceptorArgsLength); 320 NamedLoadHandlerCompiler::kInterceptorArgsLength);
321 } 321 }
322 322
323 323
324 // Generate call to api function. 324 // Generate call to api function.
325 void PropertyHandlerCompiler::GenerateFastApiCall( 325 void PropertyHandlerCompiler::GenerateFastApiCall(
326 MacroAssembler* masm, const CallOptimization& optimization, 326 MacroAssembler* masm, const CallOptimization& optimization,
327 Handle<Map> receiver_map, Register receiver, Register scratch_in, 327 Handle<Map> receiver_map, Register receiver, Register scratch_in,
328 bool is_store, int argc, Register* values) { 328 bool is_store, int argc, Register* values) {
329 ASSERT(!receiver.is(scratch_in)); 329 DCHECK(!receiver.is(scratch_in));
330 __ push(receiver); 330 __ push(receiver);
331 // Write the arguments to stack frame. 331 // Write the arguments to stack frame.
332 for (int i = 0; i < argc; i++) { 332 for (int i = 0; i < argc; i++) {
333 Register arg = values[argc - 1 - i]; 333 Register arg = values[argc - 1 - i];
334 ASSERT(!receiver.is(arg)); 334 DCHECK(!receiver.is(arg));
335 ASSERT(!scratch_in.is(arg)); 335 DCHECK(!scratch_in.is(arg));
336 __ push(arg); 336 __ push(arg);
337 } 337 }
338 ASSERT(optimization.is_simple_api_call()); 338 DCHECK(optimization.is_simple_api_call());
339 339
340 // Abi for CallApiFunctionStub. 340 // Abi for CallApiFunctionStub.
341 Register callee = r0; 341 Register callee = r0;
342 Register call_data = r4; 342 Register call_data = r4;
343 Register holder = r2; 343 Register holder = r2;
344 Register api_function_address = r1; 344 Register api_function_address = r1;
345 345
346 // Put holder in place. 346 // Put holder in place.
347 CallOptimization::HolderLookup holder_lookup; 347 CallOptimization::HolderLookup holder_lookup;
348 Handle<JSObject> api_holder = 348 Handle<JSObject> api_holder =
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
419 Handle<Map> transition, Handle<Name> name, Register receiver_reg, 419 Handle<Map> transition, Handle<Name> name, Register receiver_reg,
420 Register storage_reg, Register value_reg, Register scratch1, 420 Register storage_reg, Register value_reg, Register scratch1,
421 Register scratch2, Register scratch3, Label* miss_label, Label* slow) { 421 Register scratch2, Register scratch3, Label* miss_label, Label* slow) {
422 // r0 : value 422 // r0 : value
423 Label exit; 423 Label exit;
424 424
425 int descriptor = transition->LastAdded(); 425 int descriptor = transition->LastAdded();
426 DescriptorArray* descriptors = transition->instance_descriptors(); 426 DescriptorArray* descriptors = transition->instance_descriptors();
427 PropertyDetails details = descriptors->GetDetails(descriptor); 427 PropertyDetails details = descriptors->GetDetails(descriptor);
428 Representation representation = details.representation(); 428 Representation representation = details.representation();
429 ASSERT(!representation.IsNone()); 429 DCHECK(!representation.IsNone());
430 430
431 if (details.type() == CONSTANT) { 431 if (details.type() == CONSTANT) {
432 Handle<Object> constant(descriptors->GetValue(descriptor), isolate()); 432 Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
433 __ Move(scratch1, constant); 433 __ Move(scratch1, constant);
434 __ cmp(value_reg, scratch1); 434 __ cmp(value_reg, scratch1);
435 __ b(ne, miss_label); 435 __ b(ne, miss_label);
436 } else if (representation.IsSmi()) { 436 } else if (representation.IsSmi()) {
437 __ JumpIfNotSmi(value_reg, miss_label); 437 __ JumpIfNotSmi(value_reg, miss_label);
438 } else if (representation.IsHeapObject()) { 438 } else if (representation.IsHeapObject()) {
439 __ JumpIfSmi(value_reg, miss_label); 439 __ JumpIfSmi(value_reg, miss_label);
(...skipping 28 matching lines...) Expand all
468 __ bind(&heap_number); 468 __ bind(&heap_number);
469 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, 469 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
470 miss_label, DONT_DO_SMI_CHECK); 470 miss_label, DONT_DO_SMI_CHECK);
471 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); 471 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
472 472
473 __ bind(&do_store); 473 __ bind(&do_store);
474 __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); 474 __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
475 } 475 }
476 476
477 // Stub never generated for objects that require access checks. 477 // Stub never generated for objects that require access checks.
478 ASSERT(!transition->is_access_check_needed()); 478 DCHECK(!transition->is_access_check_needed());
479 479
480 // Perform map transition for the receiver if necessary. 480 // Perform map transition for the receiver if necessary.
481 if (details.type() == FIELD && 481 if (details.type() == FIELD &&
482 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) { 482 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
483 // The properties must be extended before we can store the value. 483 // The properties must be extended before we can store the value.
484 // We jump to a runtime call that extends the properties array. 484 // We jump to a runtime call that extends the properties array.
485 __ push(receiver_reg); 485 __ push(receiver_reg);
486 __ mov(r2, Operand(transition)); 486 __ mov(r2, Operand(transition));
487 __ Push(r2, r0); 487 __ Push(r2, r0);
488 __ TailCallExternalReference( 488 __ TailCallExternalReference(
(...skipping 11 matching lines...) Expand all
500 __ RecordWriteField(receiver_reg, 500 __ RecordWriteField(receiver_reg,
501 HeapObject::kMapOffset, 501 HeapObject::kMapOffset,
502 scratch1, 502 scratch1,
503 scratch2, 503 scratch2,
504 kLRHasNotBeenSaved, 504 kLRHasNotBeenSaved,
505 kDontSaveFPRegs, 505 kDontSaveFPRegs,
506 OMIT_REMEMBERED_SET, 506 OMIT_REMEMBERED_SET,
507 OMIT_SMI_CHECK); 507 OMIT_SMI_CHECK);
508 508
509 if (details.type() == CONSTANT) { 509 if (details.type() == CONSTANT) {
510 ASSERT(value_reg.is(r0)); 510 DCHECK(value_reg.is(r0));
511 __ Ret(); 511 __ Ret();
512 return; 512 return;
513 } 513 }
514 514
515 int index = transition->instance_descriptors()->GetFieldIndex( 515 int index = transition->instance_descriptors()->GetFieldIndex(
516 transition->LastAdded()); 516 transition->LastAdded());
517 517
518 // Adjust for the number of properties stored in the object. Even in the 518 // Adjust for the number of properties stored in the object. Even in the
519 // face of a transition we can use the old map here because the size of the 519 // face of a transition we can use the old map here because the size of the
520 // object and the number of in-object properties is not going to change. 520 // object and the number of in-object properties is not going to change.
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
568 storage_reg, 568 storage_reg,
569 receiver_reg, 569 receiver_reg,
570 kLRHasNotBeenSaved, 570 kLRHasNotBeenSaved,
571 kDontSaveFPRegs, 571 kDontSaveFPRegs,
572 EMIT_REMEMBERED_SET, 572 EMIT_REMEMBERED_SET,
573 smi_check); 573 smi_check);
574 } 574 }
575 } 575 }
576 576
577 // Return the value (register r0). 577 // Return the value (register r0).
578 ASSERT(value_reg.is(r0)); 578 DCHECK(value_reg.is(r0));
579 __ bind(&exit); 579 __ bind(&exit);
580 __ Ret(); 580 __ Ret();
581 } 581 }
582 582
583 583
584 // Generate StoreField code, value is passed in r0 register. 584 // Generate StoreField code, value is passed in r0 register.
585 // When leaving generated code after success, the receiver_reg and name_reg 585 // When leaving generated code after success, the receiver_reg and name_reg
586 // may be clobbered. Upon branch to miss_label, the receiver and name 586 // may be clobbered. Upon branch to miss_label, the receiver and name
587 // registers have their original values. 587 // registers have their original values.
588 void NamedStoreHandlerCompiler::GenerateStoreField( 588 void NamedStoreHandlerCompiler::GenerateStoreField(
589 Handle<JSObject> object, LookupResult* lookup, Register receiver_reg, 589 Handle<JSObject> object, LookupResult* lookup, Register receiver_reg,
590 Register name_reg, Register value_reg, Register scratch1, Register scratch2, 590 Register name_reg, Register value_reg, Register scratch1, Register scratch2,
591 Label* miss_label) { 591 Label* miss_label) {
592 // r0 : value 592 // r0 : value
593 Label exit; 593 Label exit;
594 594
595 // Stub never generated for objects that require access checks. 595 // Stub never generated for objects that require access checks.
596 ASSERT(!object->IsAccessCheckNeeded()); 596 DCHECK(!object->IsAccessCheckNeeded());
597 ASSERT(!object->IsJSGlobalProxy()); 597 DCHECK(!object->IsJSGlobalProxy());
598 598
599 FieldIndex index = lookup->GetFieldIndex(); 599 FieldIndex index = lookup->GetFieldIndex();
600 600
601 Representation representation = lookup->representation(); 601 Representation representation = lookup->representation();
602 ASSERT(!representation.IsNone()); 602 DCHECK(!representation.IsNone());
603 if (representation.IsSmi()) { 603 if (representation.IsSmi()) {
604 __ JumpIfNotSmi(value_reg, miss_label); 604 __ JumpIfNotSmi(value_reg, miss_label);
605 } else if (representation.IsHeapObject()) { 605 } else if (representation.IsHeapObject()) {
606 __ JumpIfSmi(value_reg, miss_label); 606 __ JumpIfSmi(value_reg, miss_label);
607 HeapType* field_type = lookup->GetFieldType(); 607 HeapType* field_type = lookup->GetFieldType();
608 HeapType::Iterator<Map> it = field_type->Classes(); 608 HeapType::Iterator<Map> it = field_type->Classes();
609 if (!it.Done()) { 609 if (!it.Done()) {
610 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset)); 610 __ ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
611 Label do_store; 611 Label do_store;
612 while (true) { 612 while (true) {
(...skipping 26 matching lines...) Expand all
639 __ jmp(&do_store); 639 __ jmp(&do_store);
640 640
641 __ bind(&heap_number); 641 __ bind(&heap_number);
642 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, 642 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
643 miss_label, DONT_DO_SMI_CHECK); 643 miss_label, DONT_DO_SMI_CHECK);
644 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); 644 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
645 645
646 __ bind(&do_store); 646 __ bind(&do_store);
647 __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); 647 __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
648 // Return the value (register r0). 648 // Return the value (register r0).
649 ASSERT(value_reg.is(r0)); 649 DCHECK(value_reg.is(r0));
650 __ Ret(); 650 __ Ret();
651 return; 651 return;
652 } 652 }
653 653
654 // TODO(verwaest): Share this code as a code stub. 654 // TODO(verwaest): Share this code as a code stub.
655 SmiCheck smi_check = representation.IsTagged() 655 SmiCheck smi_check = representation.IsTagged()
656 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 656 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
657 if (index.is_inobject()) { 657 if (index.is_inobject()) {
658 // Set the property straight into the object. 658 // Set the property straight into the object.
659 __ str(value_reg, FieldMemOperand(receiver_reg, index.offset())); 659 __ str(value_reg, FieldMemOperand(receiver_reg, index.offset()));
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
693 name_reg, 693 name_reg,
694 receiver_reg, 694 receiver_reg,
695 kLRHasNotBeenSaved, 695 kLRHasNotBeenSaved,
696 kDontSaveFPRegs, 696 kDontSaveFPRegs,
697 EMIT_REMEMBERED_SET, 697 EMIT_REMEMBERED_SET,
698 smi_check); 698 smi_check);
699 } 699 }
700 } 700 }
701 701
702 // Return the value (register r0). 702 // Return the value (register r0).
703 ASSERT(value_reg.is(r0)); 703 DCHECK(value_reg.is(r0));
704 __ bind(&exit); 704 __ bind(&exit);
705 __ Ret(); 705 __ Ret();
706 } 706 }
707 707
708 708
709 Register PropertyHandlerCompiler::CheckPrototypes( 709 Register PropertyHandlerCompiler::CheckPrototypes(
710 Register object_reg, Register holder_reg, Register scratch1, 710 Register object_reg, Register holder_reg, Register scratch1,
711 Register scratch2, Handle<Name> name, Label* miss, 711 Register scratch2, Handle<Name> name, Label* miss,
712 PrototypeCheckType check) { 712 PrototypeCheckType check) {
713 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate())); 713 Handle<Map> receiver_map(IC::TypeToMap(*type(), isolate()));
714 714
715 // Make sure there's no overlap between holder and object registers. 715 // Make sure there's no overlap between holder and object registers.
716 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 716 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
717 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 717 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
718 && !scratch2.is(scratch1)); 718 && !scratch2.is(scratch1));
719 719
720 // Keep track of the current object in register reg. 720 // Keep track of the current object in register reg.
721 Register reg = object_reg; 721 Register reg = object_reg;
722 int depth = 0; 722 int depth = 0;
723 723
724 Handle<JSObject> current = Handle<JSObject>::null(); 724 Handle<JSObject> current = Handle<JSObject>::null();
725 if (type()->IsConstant()) { 725 if (type()->IsConstant()) {
726 current = Handle<JSObject>::cast(type()->AsConstant()->Value()); 726 current = Handle<JSObject>::cast(type()->AsConstant()->Value());
727 } 727 }
728 Handle<JSObject> prototype = Handle<JSObject>::null(); 728 Handle<JSObject> prototype = Handle<JSObject>::null();
729 Handle<Map> current_map = receiver_map; 729 Handle<Map> current_map = receiver_map;
730 Handle<Map> holder_map(holder()->map()); 730 Handle<Map> holder_map(holder()->map());
731 // Traverse the prototype chain and check the maps in the prototype chain for 731 // Traverse the prototype chain and check the maps in the prototype chain for
732 // fast and global objects or do negative lookup for normal objects. 732 // fast and global objects or do negative lookup for normal objects.
733 while (!current_map.is_identical_to(holder_map)) { 733 while (!current_map.is_identical_to(holder_map)) {
734 ++depth; 734 ++depth;
735 735
736 // Only global objects and objects that do not require access 736 // Only global objects and objects that do not require access
737 // checks are allowed in stubs. 737 // checks are allowed in stubs.
738 ASSERT(current_map->IsJSGlobalProxyMap() || 738 DCHECK(current_map->IsJSGlobalProxyMap() ||
739 !current_map->is_access_check_needed()); 739 !current_map->is_access_check_needed());
740 740
741 prototype = handle(JSObject::cast(current_map->prototype())); 741 prototype = handle(JSObject::cast(current_map->prototype()));
742 if (current_map->is_dictionary_map() && 742 if (current_map->is_dictionary_map() &&
743 !current_map->IsJSGlobalObjectMap() && 743 !current_map->IsJSGlobalObjectMap() &&
744 !current_map->IsJSGlobalProxyMap()) { 744 !current_map->IsJSGlobalProxyMap()) {
745 if (!name->IsUniqueName()) { 745 if (!name->IsUniqueName()) {
746 ASSERT(name->IsString()); 746 DCHECK(name->IsString());
747 name = factory()->InternalizeString(Handle<String>::cast(name)); 747 name = factory()->InternalizeString(Handle<String>::cast(name));
748 } 748 }
749 ASSERT(current.is_null() || 749 DCHECK(current.is_null() ||
750 current->property_dictionary()->FindEntry(name) == 750 current->property_dictionary()->FindEntry(name) ==
751 NameDictionary::kNotFound); 751 NameDictionary::kNotFound);
752 752
753 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, 753 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
754 scratch1, scratch2); 754 scratch1, scratch2);
755 755
756 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset)); 756 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
757 reg = holder_reg; // From now on the object will be in holder_reg. 757 reg = holder_reg; // From now on the object will be in holder_reg.
758 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset)); 758 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
759 } else { 759 } else {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
798 798
799 // Log the check depth. 799 // Log the check depth.
800 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 800 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
801 801
802 if (depth != 0 || check == CHECK_ALL_MAPS) { 802 if (depth != 0 || check == CHECK_ALL_MAPS) {
803 // Check the holder map. 803 // Check the holder map.
804 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK); 804 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
805 } 805 }
806 806
807 // Perform security check for access to the global object. 807 // Perform security check for access to the global object.
808 ASSERT(current_map->IsJSGlobalProxyMap() || 808 DCHECK(current_map->IsJSGlobalProxyMap() ||
809 !current_map->is_access_check_needed()); 809 !current_map->is_access_check_needed());
810 if (current_map->IsJSGlobalProxyMap()) { 810 if (current_map->IsJSGlobalProxyMap()) {
811 __ CheckAccessGlobalProxy(reg, scratch1, miss); 811 __ CheckAccessGlobalProxy(reg, scratch1, miss);
812 } 812 }
813 813
814 // Return the register containing the holder. 814 // Return the register containing the holder.
815 return reg; 815 return reg;
816 } 816 }
817 817
818 818
(...skipping 20 matching lines...) Expand all
839 839
840 840
841 Register NamedLoadHandlerCompiler::CallbackFrontend(Register object_reg, 841 Register NamedLoadHandlerCompiler::CallbackFrontend(Register object_reg,
842 Handle<Name> name, 842 Handle<Name> name,
843 Handle<Object> callback) { 843 Handle<Object> callback) {
844 Label miss; 844 Label miss;
845 845
846 Register reg = FrontendHeader(object_reg, name, &miss); 846 Register reg = FrontendHeader(object_reg, name, &miss);
847 847
848 if (!holder()->HasFastProperties()) { 848 if (!holder()->HasFastProperties()) {
849 ASSERT(!holder()->IsGlobalObject()); 849 DCHECK(!holder()->IsGlobalObject());
850 ASSERT(!reg.is(scratch2())); 850 DCHECK(!reg.is(scratch2()));
851 ASSERT(!reg.is(scratch3())); 851 DCHECK(!reg.is(scratch3()));
852 ASSERT(!reg.is(scratch4())); 852 DCHECK(!reg.is(scratch4()));
853 853
854 // Load the properties dictionary. 854 // Load the properties dictionary.
855 Register dictionary = scratch4(); 855 Register dictionary = scratch4();
856 __ ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset)); 856 __ ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
857 857
858 // Probe the dictionary. 858 // Probe the dictionary.
859 Label probe_done; 859 Label probe_done;
860 NameDictionaryLookupStub::GeneratePositiveLookup(masm(), 860 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
861 &miss, 861 &miss,
862 &probe_done, 862 &probe_done,
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
901 Register reg, Handle<ExecutableAccessorInfo> callback) { 901 Register reg, Handle<ExecutableAccessorInfo> callback) {
902 // Build AccessorInfo::args_ list on the stack and push property name below 902 // Build AccessorInfo::args_ list on the stack and push property name below
903 // the exit frame to make GC aware of them and store pointers to them. 903 // the exit frame to make GC aware of them and store pointers to them.
904 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0); 904 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
905 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1); 905 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
906 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2); 906 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
907 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3); 907 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
908 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4); 908 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
909 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5); 909 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
910 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); 910 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
911 ASSERT(!scratch2().is(reg)); 911 DCHECK(!scratch2().is(reg));
912 ASSERT(!scratch3().is(reg)); 912 DCHECK(!scratch3().is(reg));
913 ASSERT(!scratch4().is(reg)); 913 DCHECK(!scratch4().is(reg));
914 __ push(receiver()); 914 __ push(receiver());
915 if (heap()->InNewSpace(callback->data())) { 915 if (heap()->InNewSpace(callback->data())) {
916 __ Move(scratch3(), callback); 916 __ Move(scratch3(), callback);
917 __ ldr(scratch3(), FieldMemOperand(scratch3(), 917 __ ldr(scratch3(), FieldMemOperand(scratch3(),
918 ExecutableAccessorInfo::kDataOffset)); 918 ExecutableAccessorInfo::kDataOffset));
919 } else { 919 } else {
920 __ Move(scratch3(), Handle<Object>(callback->data(), isolate())); 920 __ Move(scratch3(), Handle<Object>(callback->data(), isolate()));
921 } 921 }
922 __ push(scratch3()); 922 __ push(scratch3());
923 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex); 923 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
(...skipping 15 matching lines...) Expand all
939 __ mov(getter_address_reg, Operand(ref)); 939 __ mov(getter_address_reg, Operand(ref));
940 940
941 CallApiGetterStub stub(isolate()); 941 CallApiGetterStub stub(isolate());
942 __ TailCallStub(&stub); 942 __ TailCallStub(&stub);
943 } 943 }
944 944
945 945
946 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg, 946 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg,
947 LookupResult* lookup, 947 LookupResult* lookup,
948 Handle<Name> name) { 948 Handle<Name> name) {
949 ASSERT(holder()->HasNamedInterceptor()); 949 DCHECK(holder()->HasNamedInterceptor());
950 ASSERT(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); 950 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
951 951
952 // So far the most popular follow ups for interceptor loads are FIELD 952 // So far the most popular follow ups for interceptor loads are FIELD
953 // and CALLBACKS, so inline only them, other cases may be added 953 // and CALLBACKS, so inline only them, other cases may be added
954 // later. 954 // later.
955 bool compile_followup_inline = false; 955 bool compile_followup_inline = false;
956 if (lookup->IsFound() && lookup->IsCacheable()) { 956 if (lookup->IsFound() && lookup->IsCacheable()) {
957 if (lookup->IsField()) { 957 if (lookup->IsField()) {
958 compile_followup_inline = true; 958 compile_followup_inline = true;
959 } else if (lookup->type() == CALLBACKS && 959 } else if (lookup->type() == CALLBACKS &&
960 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) { 960 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
961 Handle<ExecutableAccessorInfo> callback( 961 Handle<ExecutableAccessorInfo> callback(
962 ExecutableAccessorInfo::cast(lookup->GetCallbackObject())); 962 ExecutableAccessorInfo::cast(lookup->GetCallbackObject()));
963 compile_followup_inline = 963 compile_followup_inline =
964 callback->getter() != NULL && 964 callback->getter() != NULL &&
965 ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback, 965 ExecutableAccessorInfo::IsCompatibleReceiverType(isolate(), callback,
966 type()); 966 type());
967 } 967 }
968 } 968 }
969 969
970 if (compile_followup_inline) { 970 if (compile_followup_inline) {
971 // Compile the interceptor call, followed by inline code to load the 971 // Compile the interceptor call, followed by inline code to load the
972 // property from further up the prototype chain if the call fails. 972 // property from further up the prototype chain if the call fails.
973 // Check that the maps haven't changed. 973 // Check that the maps haven't changed.
974 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1())); 974 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
975 975
976 // Preserve the receiver register explicitly whenever it is different from 976 // Preserve the receiver register explicitly whenever it is different from
977 // the holder and it is needed should the interceptor return without any 977 // the holder and it is needed should the interceptor return without any
978 // result. The CALLBACKS case needs the receiver to be passed into C++ code, 978 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
979 // the FIELD case might cause a miss during the prototype check. 979 // the FIELD case might cause a miss during the prototype check.
980 bool must_perfrom_prototype_check = *holder() != lookup->holder(); 980 bool must_perfrom_prototype_check = *holder() != lookup->holder();
981 bool must_preserve_receiver_reg = !receiver().is(holder_reg) && 981 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
982 (lookup->type() == CALLBACKS || must_perfrom_prototype_check); 982 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
983 983
984 // Save necessary data before invoking an interceptor. 984 // Save necessary data before invoking an interceptor.
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
1121 Register name = LoadIC::NameRegister(); 1121 Register name = LoadIC::NameRegister();
1122 static Register registers[] = { receiver, name, r3, r0, r4, r5 }; 1122 static Register registers[] = { receiver, name, r3, r0, r4, r5 };
1123 return registers; 1123 return registers;
1124 } 1124 }
1125 1125
1126 1126
1127 Register* PropertyAccessCompiler::store_calling_convention() { 1127 Register* PropertyAccessCompiler::store_calling_convention() {
1128 // receiver, name, scratch1, scratch2, scratch3. 1128 // receiver, name, scratch1, scratch2, scratch3.
1129 Register receiver = StoreIC::ReceiverRegister(); 1129 Register receiver = StoreIC::ReceiverRegister();
1130 Register name = StoreIC::NameRegister(); 1130 Register name = StoreIC::NameRegister();
1131 ASSERT(r3.is(KeyedStoreIC::MapRegister())); 1131 DCHECK(r3.is(KeyedStoreIC::MapRegister()));
1132 static Register registers[] = { receiver, name, r3, r4, r5 }; 1132 static Register registers[] = { receiver, name, r3, r4, r5 };
1133 return registers; 1133 return registers;
1134 } 1134 }
1135 1135
1136 1136
1137 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); } 1137 Register NamedStoreHandlerCompiler::value() { return StoreIC::ValueRegister(); }
1138 1138
1139 1139
1140 #undef __ 1140 #undef __
1141 #define __ ACCESS_MASM(masm) 1141 #define __ ACCESS_MASM(masm)
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
1227 __ b(ne, &miss); 1227 __ b(ne, &miss);
1228 } 1228 }
1229 } 1229 }
1230 1230
1231 Label number_case; 1231 Label number_case;
1232 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss; 1232 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1233 __ JumpIfSmi(receiver(), smi_target); 1233 __ JumpIfSmi(receiver(), smi_target);
1234 1234
1235 // Polymorphic keyed stores may use the map register 1235 // Polymorphic keyed stores may use the map register
1236 Register map_reg = scratch1(); 1236 Register map_reg = scratch1();
1237 ASSERT(kind() != Code::KEYED_STORE_IC || 1237 DCHECK(kind() != Code::KEYED_STORE_IC ||
1238 map_reg.is(KeyedStoreIC::MapRegister())); 1238 map_reg.is(KeyedStoreIC::MapRegister()));
1239 1239
1240 int receiver_count = types->length(); 1240 int receiver_count = types->length();
1241 int number_of_handled_maps = 0; 1241 int number_of_handled_maps = 0;
1242 __ ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset)); 1242 __ ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1243 for (int current = 0; current < receiver_count; ++current) { 1243 for (int current = 0; current < receiver_count; ++current) {
1244 Handle<HeapType> type = types->at(current); 1244 Handle<HeapType> type = types->at(current);
1245 Handle<Map> map = IC::TypeToMap(*type, isolate()); 1245 Handle<Map> map = IC::TypeToMap(*type, isolate());
1246 if (!map->is_deprecated()) { 1246 if (!map->is_deprecated()) {
1247 number_of_handled_maps++; 1247 number_of_handled_maps++;
1248 __ mov(ip, Operand(map)); 1248 __ mov(ip, Operand(map));
1249 __ cmp(map_reg, ip); 1249 __ cmp(map_reg, ip);
1250 if (type->Is(HeapType::Number())) { 1250 if (type->Is(HeapType::Number())) {
1251 ASSERT(!number_case.is_unused()); 1251 DCHECK(!number_case.is_unused());
1252 __ bind(&number_case); 1252 __ bind(&number_case);
1253 } 1253 }
1254 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq); 1254 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
1255 } 1255 }
1256 } 1256 }
1257 ASSERT(number_of_handled_maps != 0); 1257 DCHECK(number_of_handled_maps != 0);
1258 1258
1259 __ bind(&miss); 1259 __ bind(&miss);
1260 TailCallBuiltin(masm(), MissBuiltin(kind())); 1260 TailCallBuiltin(masm(), MissBuiltin(kind()));
1261 1261
1262 // Return the generated code. 1262 // Return the generated code.
1263 InlineCacheState state = 1263 InlineCacheState state =
1264 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC; 1264 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1265 return GetCode(kind(), type, name, state); 1265 return GetCode(kind(), type, name, state);
1266 } 1266 }
1267 1267
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1300 #define __ ACCESS_MASM(masm) 1300 #define __ ACCESS_MASM(masm)
1301 1301
1302 1302
1303 void ElementHandlerCompiler::GenerateLoadDictionaryElement( 1303 void ElementHandlerCompiler::GenerateLoadDictionaryElement(
1304 MacroAssembler* masm) { 1304 MacroAssembler* masm) {
1305 // The return address is in lr. 1305 // The return address is in lr.
1306 Label slow, miss; 1306 Label slow, miss;
1307 1307
1308 Register key = LoadIC::NameRegister(); 1308 Register key = LoadIC::NameRegister();
1309 Register receiver = LoadIC::ReceiverRegister(); 1309 Register receiver = LoadIC::ReceiverRegister();
1310 ASSERT(receiver.is(r1)); 1310 DCHECK(receiver.is(r1));
1311 ASSERT(key.is(r2)); 1311 DCHECK(key.is(r2));
1312 1312
1313 __ UntagAndJumpIfNotSmi(r6, key, &miss); 1313 __ UntagAndJumpIfNotSmi(r6, key, &miss);
1314 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset)); 1314 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
1315 __ LoadFromNumberDictionary(&slow, r4, key, r0, r6, r3, r5); 1315 __ LoadFromNumberDictionary(&slow, r4, key, r0, r6, r3, r5);
1316 __ Ret(); 1316 __ Ret();
1317 1317
1318 __ bind(&slow); 1318 __ bind(&slow);
1319 __ IncrementCounter( 1319 __ IncrementCounter(
1320 masm->isolate()->counters()->keyed_load_external_array_slow(), 1320 masm->isolate()->counters()->keyed_load_external_array_slow(),
1321 1, r2, r3); 1321 1, r2, r3);
1322 1322
1323 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow); 1323 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1324 1324
1325 // Miss case, call the runtime. 1325 // Miss case, call the runtime.
1326 __ bind(&miss); 1326 __ bind(&miss);
1327 1327
1328 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); 1328 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1329 } 1329 }
1330 1330
1331 1331
1332 #undef __ 1332 #undef __
1333 1333
1334 } } // namespace v8::internal 1334 } } // namespace v8::internal
1335 1335
1336 #endif // V8_TARGET_ARCH_ARM 1336 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/simulator-arm.cc ('k') | src/arm64/assembler-arm64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698