Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(169)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 6685088: Merge isolates to bleeding_edge. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/simulator-x64.h ('k') | src/x64/virtual-frame-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 21 matching lines...) Expand all
32 #include "ic-inl.h" 32 #include "ic-inl.h"
33 #include "codegen-inl.h" 33 #include "codegen-inl.h"
34 #include "stub-cache.h" 34 #include "stub-cache.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
40 40
41 41
42 static void ProbeTable(MacroAssembler* masm, 42 static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm,
43 Code::Flags flags, 44 Code::Flags flags,
44 StubCache::Table table, 45 StubCache::Table table,
45 Register name, 46 Register name,
46 Register offset) { 47 Register offset) {
47 ASSERT_EQ(8, kPointerSize); 48 ASSERT_EQ(8, kPointerSize);
48 ASSERT_EQ(16, sizeof(StubCache::Entry)); 49 ASSERT_EQ(16, sizeof(StubCache::Entry));
49 // The offset register holds the entry offset times four (due to masking 50 // The offset register holds the entry offset times four (due to masking
50 // and shifting optimizations). 51 // and shifting optimizations).
51 ExternalReference key_offset(SCTableReference::keyReference(table)); 52 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
52 Label miss; 53 Label miss;
53 54
54 __ movq(kScratchRegister, key_offset); 55 __ movq(kScratchRegister, key_offset);
55 // Check that the key in the entry matches the name. 56 // Check that the key in the entry matches the name.
56 // Multiply entry offset by 16 to get the entry address. Since the 57 // Multiply entry offset by 16 to get the entry address. Since the
57 // offset register already holds the entry offset times four, multiply 58 // offset register already holds the entry offset times four, multiply
58 // by a further four. 59 // by a further four.
59 __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0)); 60 __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
60 __ j(not_equal, &miss); 61 __ j(not_equal, &miss);
61 // Get the code entry from the cache. 62 // Get the code entry from the cache.
(...skipping 19 matching lines...) Expand all
81 // must always call a backup property check that is complete. 82 // must always call a backup property check that is complete.
82 // This function is safe to call if the receiver has fast properties. 83 // This function is safe to call if the receiver has fast properties.
83 // Name must be a symbol and receiver must be a heap object. 84 // Name must be a symbol and receiver must be a heap object.
84 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, 85 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
85 Label* miss_label, 86 Label* miss_label,
86 Register receiver, 87 Register receiver,
87 String* name, 88 String* name,
88 Register r0, 89 Register r0,
89 Register r1) { 90 Register r1) {
90 ASSERT(name->IsSymbol()); 91 ASSERT(name->IsSymbol());
91 __ IncrementCounter(&Counters::negative_lookups, 1); 92 __ IncrementCounter(COUNTERS->negative_lookups(), 1);
92 __ IncrementCounter(&Counters::negative_lookups_miss, 1); 93 __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1);
93 94
94 Label done; 95 Label done;
95 __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset)); 96 __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
96 97
97 const int kInterceptorOrAccessCheckNeededMask = 98 const int kInterceptorOrAccessCheckNeededMask =
98 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 99 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
99 100
100 // Bail out if the receiver has a named interceptor or requires access checks. 101 // Bail out if the receiver has a named interceptor or requires access checks.
101 __ testb(FieldOperand(r0, Map::kBitFieldOffset), 102 __ testb(FieldOperand(r0, Map::kBitFieldOffset),
102 Immediate(kInterceptorOrAccessCheckNeededMask)); 103 Immediate(kInterceptorOrAccessCheckNeededMask));
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
144 145
145 // Scale the index by multiplying by the entry size. 146 // Scale the index by multiplying by the entry size.
146 ASSERT(StringDictionary::kEntrySize == 3); 147 ASSERT(StringDictionary::kEntrySize == 3);
147 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. 148 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
148 149
149 Register entity_name = r1; 150 Register entity_name = r1;
150 // Having undefined at this place means the name is not contained. 151 // Having undefined at this place means the name is not contained.
151 ASSERT_EQ(kSmiTagSize, 1); 152 ASSERT_EQ(kSmiTagSize, 1);
152 __ movq(entity_name, Operand(properties, index, times_pointer_size, 153 __ movq(entity_name, Operand(properties, index, times_pointer_size,
153 kElementsStartOffset - kHeapObjectTag)); 154 kElementsStartOffset - kHeapObjectTag));
154 __ Cmp(entity_name, Factory::undefined_value()); 155 __ Cmp(entity_name, FACTORY->undefined_value());
155 // __ jmp(miss_label); 156 // __ jmp(miss_label);
156 if (i != kProbes - 1) { 157 if (i != kProbes - 1) {
157 __ j(equal, &done); 158 __ j(equal, &done);
158 159
159 // Stop if found the property. 160 // Stop if found the property.
160 __ Cmp(entity_name, Handle<String>(name)); 161 __ Cmp(entity_name, Handle<String>(name));
161 __ j(equal, miss_label); 162 __ j(equal, miss_label);
162 163
163 // Check if the entry name is not a symbol. 164 // Check if the entry name is not a symbol.
164 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); 165 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
165 __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset), 166 __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
166 Immediate(kIsSymbolMask)); 167 Immediate(kIsSymbolMask));
167 __ j(zero, miss_label); 168 __ j(zero, miss_label);
168 } else { 169 } else {
169 // Give up probing if still not found the undefined value. 170 // Give up probing if still not found the undefined value.
170 __ j(not_equal, miss_label); 171 __ j(not_equal, miss_label);
171 } 172 }
172 } 173 }
173 174
174 __ bind(&done); 175 __ bind(&done);
175 __ DecrementCounter(&Counters::negative_lookups_miss, 1); 176 __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1);
176 } 177 }
177 178
178 179
179 void StubCache::GenerateProbe(MacroAssembler* masm, 180 void StubCache::GenerateProbe(MacroAssembler* masm,
180 Code::Flags flags, 181 Code::Flags flags,
181 Register receiver, 182 Register receiver,
182 Register name, 183 Register name,
183 Register scratch, 184 Register scratch,
184 Register extra, 185 Register extra,
185 Register extra2) { 186 Register extra2) {
187 Isolate* isolate = Isolate::Current();
186 Label miss; 188 Label miss;
187 USE(extra); // The register extra is not used on the X64 platform. 189 USE(extra); // The register extra is not used on the X64 platform.
188 USE(extra2); // The register extra2 is not used on the X64 platform. 190 USE(extra2); // The register extra2 is not used on the X64 platform.
189 // Make sure that code is valid. The shifting code relies on the 191 // Make sure that code is valid. The shifting code relies on the
190 // entry size being 16. 192 // entry size being 16.
191 ASSERT(sizeof(Entry) == 16); 193 ASSERT(sizeof(Entry) == 16);
192 194
193 // Make sure the flags do not name a specific type. 195 // Make sure the flags do not name a specific type.
194 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 196 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
195 197
196 // Make sure that there are no register conflicts. 198 // Make sure that there are no register conflicts.
197 ASSERT(!scratch.is(receiver)); 199 ASSERT(!scratch.is(receiver));
198 ASSERT(!scratch.is(name)); 200 ASSERT(!scratch.is(name));
199 201
200 // Check scratch register is valid, extra and extra2 are unused. 202 // Check scratch register is valid, extra and extra2 are unused.
201 ASSERT(!scratch.is(no_reg)); 203 ASSERT(!scratch.is(no_reg));
202 ASSERT(extra2.is(no_reg)); 204 ASSERT(extra2.is(no_reg));
203 205
204 // Check that the receiver isn't a smi. 206 // Check that the receiver isn't a smi.
205 __ JumpIfSmi(receiver, &miss); 207 __ JumpIfSmi(receiver, &miss);
206 208
207 // Get the map of the receiver and compute the hash. 209 // Get the map of the receiver and compute the hash.
208 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset)); 210 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
209 // Use only the low 32 bits of the map pointer. 211 // Use only the low 32 bits of the map pointer.
210 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 212 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
211 __ xor_(scratch, Immediate(flags)); 213 __ xor_(scratch, Immediate(flags));
212 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); 214 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
213 215
214 // Probe the primary table. 216 // Probe the primary table.
215 ProbeTable(masm, flags, kPrimary, name, scratch); 217 ProbeTable(isolate, masm, flags, kPrimary, name, scratch);
216 218
217 // Primary miss: Compute hash for secondary probe. 219 // Primary miss: Compute hash for secondary probe.
218 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset)); 220 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
219 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 221 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
220 __ xor_(scratch, Immediate(flags)); 222 __ xor_(scratch, Immediate(flags));
221 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); 223 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
222 __ subl(scratch, name); 224 __ subl(scratch, name);
223 __ addl(scratch, Immediate(flags)); 225 __ addl(scratch, Immediate(flags));
224 __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize)); 226 __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
225 227
226 // Probe the secondary table. 228 // Probe the secondary table.
227 ProbeTable(masm, flags, kSecondary, name, scratch); 229 ProbeTable(isolate, masm, flags, kSecondary, name, scratch);
228 230
229 // Cache miss: Fall-through and let caller handle the miss by 231 // Cache miss: Fall-through and let caller handle the miss by
230 // entering the runtime system. 232 // entering the runtime system.
231 __ bind(&miss); 233 __ bind(&miss);
232 } 234 }
233 235
234 236
235 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 237 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
236 int index, 238 int index,
237 Register prototype) { 239 Register prototype) {
238 // Load the global or builtins object from the current context. 240 // Load the global or builtins object from the current context.
239 __ movq(prototype, 241 __ movq(prototype,
240 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 242 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
241 // Load the global context from the global or builtins object. 243 // Load the global context from the global or builtins object.
242 __ movq(prototype, 244 __ movq(prototype,
243 FieldOperand(prototype, GlobalObject::kGlobalContextOffset)); 245 FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
244 // Load the function from the global context. 246 // Load the function from the global context.
245 __ movq(prototype, Operand(prototype, Context::SlotOffset(index))); 247 __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
246 // Load the initial map. The global functions all have initial maps. 248 // Load the initial map. The global functions all have initial maps.
247 __ movq(prototype, 249 __ movq(prototype,
248 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); 250 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
249 // Load the prototype from the initial map. 251 // Load the prototype from the initial map.
250 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); 252 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
251 } 253 }
252 254
253 255
254 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( 256 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
255 MacroAssembler* masm, int index, Register prototype, Label* miss) { 257 MacroAssembler* masm, int index, Register prototype, Label* miss) {
256 // Check we're still in the same context. 258 // Check we're still in the same context.
257 __ Move(prototype, Top::global()); 259 __ Move(prototype, Isolate::Current()->global());
258 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)), 260 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
259 prototype); 261 prototype);
260 __ j(not_equal, miss); 262 __ j(not_equal, miss);
261 // Get the global function with the given index. 263 // Get the global function with the given index.
262 JSFunction* function = JSFunction::cast(Top::global_context()->get(index)); 264 JSFunction* function = JSFunction::cast(
265 Isolate::Current()->global_context()->get(index));
263 // Load its initial map. The global functions all have initial maps. 266 // Load its initial map. The global functions all have initial maps.
264 __ Move(prototype, Handle<Map>(function->initial_map())); 267 __ Move(prototype, Handle<Map>(function->initial_map()));
265 // Load the prototype from the initial map. 268 // Load the prototype from the initial map.
266 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); 269 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
267 } 270 }
268 271
269 272
270 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 273 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
271 Register receiver, 274 Register receiver,
272 Register scratch, 275 Register scratch,
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
368 } 371 }
369 372
370 373
371 static void PushInterceptorArguments(MacroAssembler* masm, 374 static void PushInterceptorArguments(MacroAssembler* masm,
372 Register receiver, 375 Register receiver,
373 Register holder, 376 Register holder,
374 Register name, 377 Register name,
375 JSObject* holder_obj) { 378 JSObject* holder_obj) {
376 __ push(name); 379 __ push(name);
377 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor(); 380 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
378 ASSERT(!Heap::InNewSpace(interceptor)); 381 ASSERT(!HEAP->InNewSpace(interceptor));
379 __ Move(kScratchRegister, Handle<Object>(interceptor)); 382 __ Move(kScratchRegister, Handle<Object>(interceptor));
380 __ push(kScratchRegister); 383 __ push(kScratchRegister);
381 __ push(receiver); 384 __ push(receiver);
382 __ push(holder); 385 __ push(holder);
383 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset)); 386 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
384 } 387 }
385 388
386 389
387 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, 390 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
388 Register receiver, 391 Register receiver,
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
459 // ----------------------------------- 462 // -----------------------------------
460 // Get the function and setup the context. 463 // Get the function and setup the context.
461 JSFunction* function = optimization.constant_function(); 464 JSFunction* function = optimization.constant_function();
462 __ Move(rdi, Handle<JSFunction>(function)); 465 __ Move(rdi, Handle<JSFunction>(function));
463 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 466 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
464 467
465 // Pass the additional arguments. 468 // Pass the additional arguments.
466 __ movq(Operand(rsp, 2 * kPointerSize), rdi); 469 __ movq(Operand(rsp, 2 * kPointerSize), rdi);
467 Object* call_data = optimization.api_call_info()->data(); 470 Object* call_data = optimization.api_call_info()->data();
468 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info()); 471 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
469 if (Heap::InNewSpace(call_data)) { 472 if (HEAP->InNewSpace(call_data)) {
470 __ Move(rcx, api_call_info_handle); 473 __ Move(rcx, api_call_info_handle);
471 __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset)); 474 __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
472 __ movq(Operand(rsp, 3 * kPointerSize), rbx); 475 __ movq(Operand(rsp, 3 * kPointerSize), rbx);
473 } else { 476 } else {
474 __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data)); 477 __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data));
475 } 478 }
476 479
477 // Prepare arguments. 480 // Prepare arguments.
478 __ lea(rbx, Operand(rsp, 3 * kPointerSize)); 481 __ lea(rbx, Operand(rsp, 3 * kPointerSize));
479 482
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
554 } else { 557 } else {
555 CompileRegular(masm, 558 CompileRegular(masm,
556 object, 559 object,
557 receiver, 560 receiver,
558 scratch1, 561 scratch1,
559 scratch2, 562 scratch2,
560 scratch3, 563 scratch3,
561 name, 564 name,
562 holder, 565 holder,
563 miss); 566 miss);
564 return Heap::undefined_value(); // Success. 567 return HEAP->undefined_value(); // Success.
565 } 568 }
566 } 569 }
567 570
568 private: 571 private:
569 MaybeObject* CompileCacheable(MacroAssembler* masm, 572 MaybeObject* CompileCacheable(MacroAssembler* masm,
570 JSObject* object, 573 JSObject* object,
571 Register receiver, 574 Register receiver,
572 Register scratch1, 575 Register scratch1,
573 Register scratch2, 576 Register scratch2,
574 Register scratch3, 577 Register scratch3,
(...skipping 15 matching lines...) Expand all
590 interceptor_holder); 593 interceptor_holder);
591 if (depth1 == kInvalidProtoDepth) { 594 if (depth1 == kInvalidProtoDepth) {
592 depth2 = 595 depth2 =
593 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder, 596 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
594 lookup->holder()); 597 lookup->holder());
595 } 598 }
596 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) || 599 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
597 (depth2 != kInvalidProtoDepth); 600 (depth2 != kInvalidProtoDepth);
598 } 601 }
599 602
600 __ IncrementCounter(&Counters::call_const_interceptor, 1); 603 __ IncrementCounter(COUNTERS->call_const_interceptor(), 1);
601 604
602 if (can_do_fast_api_call) { 605 if (can_do_fast_api_call) {
603 __ IncrementCounter(&Counters::call_const_interceptor_fast_api, 1); 606 __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1);
604 ReserveSpaceForFastApiCall(masm, scratch1); 607 ReserveSpaceForFastApiCall(masm, scratch1);
605 } 608 }
606 609
607 // Check that the maps from receiver to interceptor's holder 610 // Check that the maps from receiver to interceptor's holder
608 // haven't changed and thus we can invoke interceptor. 611 // haven't changed and thus we can invoke interceptor.
609 Label miss_cleanup; 612 Label miss_cleanup;
610 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; 613 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
611 Register holder = 614 Register holder =
612 stub_compiler_->CheckPrototypes(object, receiver, 615 stub_compiler_->CheckPrototypes(object, receiver,
613 interceptor_holder, scratch1, 616 interceptor_holder, scratch1,
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
653 FreeSpaceForFastApiCall(masm, scratch1); 656 FreeSpaceForFastApiCall(masm, scratch1);
654 __ jmp(miss_label); 657 __ jmp(miss_label);
655 } 658 }
656 659
657 // Invoke a regular function. 660 // Invoke a regular function.
658 __ bind(&regular_invoke); 661 __ bind(&regular_invoke);
659 if (can_do_fast_api_call) { 662 if (can_do_fast_api_call) {
660 FreeSpaceForFastApiCall(masm, scratch1); 663 FreeSpaceForFastApiCall(masm, scratch1);
661 } 664 }
662 665
663 return Heap::undefined_value(); // Success. 666 return HEAP->undefined_value(); // Success.
664 } 667 }
665 668
666 void CompileRegular(MacroAssembler* masm, 669 void CompileRegular(MacroAssembler* masm,
667 JSObject* object, 670 JSObject* object,
668 Register receiver, 671 Register receiver,
669 Register scratch1, 672 Register scratch1,
670 Register scratch2, 673 Register scratch2,
671 Register scratch3, 674 Register scratch3,
672 String* name, 675 String* name,
673 JSObject* interceptor_holder, 676 JSObject* interceptor_holder,
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
722 StubCompiler* stub_compiler_; 725 StubCompiler* stub_compiler_;
723 const ParameterCount& arguments_; 726 const ParameterCount& arguments_;
724 Register name_; 727 Register name_;
725 }; 728 };
726 729
727 730
728 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { 731 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
729 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); 732 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
730 Code* code = NULL; 733 Code* code = NULL;
731 if (kind == Code::LOAD_IC) { 734 if (kind == Code::LOAD_IC) {
732 code = Builtins::builtin(Builtins::LoadIC_Miss); 735 code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss);
733 } else { 736 } else {
734 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss); 737 code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss);
735 } 738 }
736 739
737 Handle<Code> ic(code); 740 Handle<Code> ic(code);
738 __ Jump(ic, RelocInfo::CODE_TARGET); 741 __ Jump(ic, RelocInfo::CODE_TARGET);
739 } 742 }
740 743
741 744
742 // Both name_reg and receiver_reg are preserved on jumps to miss_label, 745 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
743 // but may be destroyed if store is successful. 746 // but may be destroyed if store is successful.
744 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 747 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
829 Register scratch, 832 Register scratch,
830 Label* miss) { 833 Label* miss) {
831 Object* probe; 834 Object* probe;
832 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name); 835 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
833 if (!maybe_probe->ToObject(&probe)) return maybe_probe; 836 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
834 } 837 }
835 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); 838 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
836 ASSERT(cell->value()->IsTheHole()); 839 ASSERT(cell->value()->IsTheHole());
837 __ Move(scratch, Handle<Object>(cell)); 840 __ Move(scratch, Handle<Object>(cell));
838 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), 841 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
839 Factory::the_hole_value()); 842 FACTORY->the_hole_value());
840 __ j(not_equal, miss); 843 __ j(not_equal, miss);
841 return cell; 844 return cell;
842 } 845 }
843 846
844 847
845 #undef __ 848 #undef __
846 #define __ ACCESS_MASM((masm())) 849 #define __ ACCESS_MASM((masm()))
847 850
848 851
849 Register StubCompiler::CheckPrototypes(JSObject* object, 852 Register StubCompiler::CheckPrototypes(JSObject* object,
(...skipping 28 matching lines...) Expand all
878 881
879 // Only global objects and objects that do not require access 882 // Only global objects and objects that do not require access
880 // checks are allowed in stubs. 883 // checks are allowed in stubs.
881 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); 884 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
882 885
883 JSObject* prototype = JSObject::cast(current->GetPrototype()); 886 JSObject* prototype = JSObject::cast(current->GetPrototype());
884 if (!current->HasFastProperties() && 887 if (!current->HasFastProperties() &&
885 !current->IsJSGlobalObject() && 888 !current->IsJSGlobalObject() &&
886 !current->IsJSGlobalProxy()) { 889 !current->IsJSGlobalProxy()) {
887 if (!name->IsSymbol()) { 890 if (!name->IsSymbol()) {
888 MaybeObject* lookup_result = Heap::LookupSymbol(name); 891 MaybeObject* lookup_result = HEAP->LookupSymbol(name);
889 if (lookup_result->IsFailure()) { 892 if (lookup_result->IsFailure()) {
890 set_failure(Failure::cast(lookup_result)); 893 set_failure(Failure::cast(lookup_result));
891 return reg; 894 return reg;
892 } else { 895 } else {
893 name = String::cast(lookup_result->ToObjectUnchecked()); 896 name = String::cast(lookup_result->ToObjectUnchecked());
894 } 897 }
895 } 898 }
896 ASSERT(current->property_dictionary()->FindEntry(name) == 899 ASSERT(current->property_dictionary()->FindEntry(name) ==
897 StringDictionary::kNotFound); 900 StringDictionary::kNotFound);
898 901
899 GenerateDictionaryNegativeLookup(masm(), 902 GenerateDictionaryNegativeLookup(masm(),
900 miss, 903 miss,
901 reg, 904 reg,
902 name, 905 name,
903 scratch1, 906 scratch1,
904 scratch2); 907 scratch2);
905 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 908 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
906 reg = holder_reg; // from now the object is in holder_reg 909 reg = holder_reg; // from now the object is in holder_reg
907 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 910 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
908 } else if (Heap::InNewSpace(prototype)) { 911 } else if (HEAP->InNewSpace(prototype)) {
909 // Get the map of the current object. 912 // Get the map of the current object.
910 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 913 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
911 __ Cmp(scratch1, Handle<Map>(current->map())); 914 __ Cmp(scratch1, Handle<Map>(current->map()));
912 // Branch on the result of the map check. 915 // Branch on the result of the map check.
913 __ j(not_equal, miss); 916 __ j(not_equal, miss);
914 // Check access rights to the global object. This has to happen 917 // Check access rights to the global object. This has to happen
915 // after the map check so that we know that the object is 918 // after the map check so that we know that the object is
916 // actually a global object. 919 // actually a global object.
917 if (current->IsJSGlobalProxy()) { 920 if (current->IsJSGlobalProxy()) {
918 __ CheckAccessGlobalProxy(reg, scratch1, miss); 921 __ CheckAccessGlobalProxy(reg, scratch1, miss);
(...skipping 30 matching lines...) Expand all
949 952
950 // Go to the next object in the prototype chain. 953 // Go to the next object in the prototype chain.
951 current = prototype; 954 current = prototype;
952 } 955 }
953 956
954 // Check the holder map. 957 // Check the holder map.
955 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map())); 958 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
956 __ j(not_equal, miss); 959 __ j(not_equal, miss);
957 960
958 // Log the check depth. 961 // Log the check depth.
959 LOG(IntEvent("check-maps-depth", depth + 1)); 962 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
960 963
961 // Perform security check for access to the global object and return 964 // Perform security check for access to the global object and return
962 // the holder register. 965 // the holder register.
963 ASSERT(current == holder); 966 ASSERT(current == holder);
964 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); 967 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
965 if (current->IsJSGlobalProxy()) { 968 if (current->IsJSGlobalProxy()) {
966 __ CheckAccessGlobalProxy(reg, scratch1, miss); 969 __ CheckAccessGlobalProxy(reg, scratch1, miss);
967 } 970 }
968 971
969 // If we've skipped any global objects, it's not enough to verify 972 // If we've skipped any global objects, it's not enough to verify
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1032 scratch2, scratch3, name, miss); 1035 scratch2, scratch3, name, miss);
1033 1036
1034 Handle<AccessorInfo> callback_handle(callback); 1037 Handle<AccessorInfo> callback_handle(callback);
1035 1038
1036 // Insert additional parameters into the stack frame above return address. 1039 // Insert additional parameters into the stack frame above return address.
1037 ASSERT(!scratch2.is(reg)); 1040 ASSERT(!scratch2.is(reg));
1038 __ pop(scratch2); // Get return address to place it below. 1041 __ pop(scratch2); // Get return address to place it below.
1039 1042
1040 __ push(receiver); // receiver 1043 __ push(receiver); // receiver
1041 __ push(reg); // holder 1044 __ push(reg); // holder
1042 if (Heap::InNewSpace(callback_handle->data())) { 1045 if (HEAP->InNewSpace(callback_handle->data())) {
1043 __ Move(scratch1, callback_handle); 1046 __ Move(scratch1, callback_handle);
1044 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data 1047 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1045 } else { 1048 } else {
1046 __ Push(Handle<Object>(callback_handle->data())); 1049 __ Push(Handle<Object>(callback_handle->data()));
1047 } 1050 }
1048 __ push(name_reg); // name 1051 __ push(name_reg); // name
1049 // Save a pointer to where we pushed the arguments pointer. 1052 // Save a pointer to where we pushed the arguments pointer.
1050 // This will be passed as the const AccessorInfo& to the C++ callback. 1053 // This will be passed as the const AccessorInfo& to the C++ callback.
1051 1054
1052 #ifdef _WIN64 1055 #ifdef _WIN64
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after
1284 1287
1285 1288
1286 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell, 1289 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1287 JSFunction* function, 1290 JSFunction* function,
1288 Label* miss) { 1291 Label* miss) {
1289 // Get the value from the cell. 1292 // Get the value from the cell.
1290 __ Move(rdi, Handle<JSGlobalPropertyCell>(cell)); 1293 __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
1291 __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset)); 1294 __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
1292 1295
1293 // Check that the cell contains the same function. 1296 // Check that the cell contains the same function.
1294 if (Heap::InNewSpace(function)) { 1297 if (HEAP->InNewSpace(function)) {
1295 // We can't embed a pointer to a function in new space so we have 1298 // We can't embed a pointer to a function in new space so we have
1296 // to verify that the shared function info is unchanged. This has 1299 // to verify that the shared function info is unchanged. This has
1297 // the nice side effect that multiple closures based on the same 1300 // the nice side effect that multiple closures based on the same
1298 // function can all use this call IC. Before we load through the 1301 // function can all use this call IC. Before we load through the
1299 // function, we have to verify that it still is a function. 1302 // function, we have to verify that it still is a function.
1300 __ JumpIfSmi(rdi, miss); 1303 __ JumpIfSmi(rdi, miss);
1301 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax); 1304 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
1302 __ j(not_equal, miss); 1305 __ j(not_equal, miss);
1303 1306
1304 // Check the shared function info. Make sure it hasn't changed. 1307 // Check the shared function info. Make sure it hasn't changed.
1305 __ Move(rax, Handle<SharedFunctionInfo>(function->shared())); 1308 __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
1306 __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax); 1309 __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax);
1307 __ j(not_equal, miss); 1310 __ j(not_equal, miss);
1308 } else { 1311 } else {
1309 __ Cmp(rdi, Handle<JSFunction>(function)); 1312 __ Cmp(rdi, Handle<JSFunction>(function));
1310 __ j(not_equal, miss); 1313 __ j(not_equal, miss);
1311 } 1314 }
1312 } 1315 }
1313 1316
1314 1317
1315 MaybeObject* CallStubCompiler::GenerateMissBranch() { 1318 MaybeObject* CallStubCompiler::GenerateMissBranch() {
1316 MaybeObject* maybe_obj = StubCache::ComputeCallMiss(arguments().immediate(), 1319 MaybeObject* maybe_obj = Isolate::Current()->stub_cache()->ComputeCallMiss(
1317 kind_); 1320 arguments().immediate(), kind_);
1318 Object* obj; 1321 Object* obj;
1319 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1322 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1320 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); 1323 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1321 return obj; 1324 return obj;
1322 } 1325 }
1323 1326
1324 1327
1325 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, 1328 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1326 JSObject* holder, 1329 JSObject* holder,
1327 int index, 1330 int index,
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1386 String* name) { 1389 String* name) {
1387 // ----------- S t a t e ------------- 1390 // ----------- S t a t e -------------
1388 // -- rcx : name 1391 // -- rcx : name
1389 // -- rsp[0] : return address 1392 // -- rsp[0] : return address
1390 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1393 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1391 // -- ... 1394 // -- ...
1392 // -- rsp[(argc + 1) * 8] : receiver 1395 // -- rsp[(argc + 1) * 8] : receiver
1393 // ----------------------------------- 1396 // -----------------------------------
1394 1397
1395 // If object is not an array, bail out to regular call. 1398 // If object is not an array, bail out to regular call.
1396 if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value(); 1399 if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value();
1397 1400
1398 Label miss; 1401 Label miss;
1399 1402
1400 GenerateNameCheck(name, &miss); 1403 GenerateNameCheck(name, &miss);
1401 1404
1402 // Get the receiver from the stack. 1405 // Get the receiver from the stack.
1403 const int argc = arguments().immediate(); 1406 const int argc = arguments().immediate();
1404 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1407 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1405 1408
1406 // Check that the receiver isn't a smi. 1409 // Check that the receiver isn't a smi.
(...skipping 13 matching lines...) Expand all
1420 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); 1423 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1421 __ ret((argc + 1) * kPointerSize); 1424 __ ret((argc + 1) * kPointerSize);
1422 } else { 1425 } else {
1423 Label call_builtin; 1426 Label call_builtin;
1424 1427
1425 // Get the elements array of the object. 1428 // Get the elements array of the object.
1426 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); 1429 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1427 1430
1428 // Check that the elements are in fast mode and writable. 1431 // Check that the elements are in fast mode and writable.
1429 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 1432 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1430 Factory::fixed_array_map()); 1433 FACTORY->fixed_array_map());
1431 __ j(not_equal, &call_builtin); 1434 __ j(not_equal, &call_builtin);
1432 1435
1433 if (argc == 1) { // Otherwise fall through to call builtin. 1436 if (argc == 1) { // Otherwise fall through to call builtin.
1434 Label exit, with_write_barrier, attempt_to_grow_elements; 1437 Label exit, with_write_barrier, attempt_to_grow_elements;
1435 1438
1436 // Get the array's length into rax and calculate new length. 1439 // Get the array's length into rax and calculate new length.
1437 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); 1440 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1438 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); 1441 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1439 __ addl(rax, Immediate(argc)); 1442 __ addl(rax, Immediate(argc));
1440 1443
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
1549 String* name) { 1552 String* name) {
1550 // ----------- S t a t e ------------- 1553 // ----------- S t a t e -------------
1551 // -- rcx : name 1554 // -- rcx : name
1552 // -- rsp[0] : return address 1555 // -- rsp[0] : return address
1553 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1556 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1554 // -- ... 1557 // -- ...
1555 // -- rsp[(argc + 1) * 8] : receiver 1558 // -- rsp[(argc + 1) * 8] : receiver
1556 // ----------------------------------- 1559 // -----------------------------------
1557 1560
1558 // If object is not an array, bail out to regular call. 1561 // If object is not an array, bail out to regular call.
1559 if (!object->IsJSArray() || cell != NULL) return Heap::undefined_value(); 1562 if (!object->IsJSArray() || cell != NULL) return HEAP->undefined_value();
1560 1563
1561 Label miss, return_undefined, call_builtin; 1564 Label miss, return_undefined, call_builtin;
1562 1565
1563 GenerateNameCheck(name, &miss); 1566 GenerateNameCheck(name, &miss);
1564 1567
1565 // Get the receiver from the stack. 1568 // Get the receiver from the stack.
1566 const int argc = arguments().immediate(); 1569 const int argc = arguments().immediate();
1567 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1570 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1568 1571
1569 // Check that the receiver isn't a smi. 1572 // Check that the receiver isn't a smi.
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
1634 String* name) { 1637 String* name) {
1635 // ----------- S t a t e ------------- 1638 // ----------- S t a t e -------------
1636 // -- rcx : function name 1639 // -- rcx : function name
1637 // -- rsp[0] : return address 1640 // -- rsp[0] : return address
1638 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1641 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1639 // -- ... 1642 // -- ...
1640 // -- rsp[(argc + 1) * 8] : receiver 1643 // -- rsp[(argc + 1) * 8] : receiver
1641 // ----------------------------------- 1644 // -----------------------------------
1642 1645
1643 // If object is not a string, bail out to regular call. 1646 // If object is not a string, bail out to regular call.
1644 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); 1647 if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
1645 1648
1646 const int argc = arguments().immediate(); 1649 const int argc = arguments().immediate();
1647 1650
1648 Label miss; 1651 Label miss;
1649 Label name_miss; 1652 Label name_miss;
1650 Label index_out_of_range; 1653 Label index_out_of_range;
1651 Label* index_out_of_range_label = &index_out_of_range; 1654 Label* index_out_of_range_label = &index_out_of_range;
1652 1655
1653 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { 1656 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1654 index_out_of_range_label = &miss; 1657 index_out_of_range_label = &miss;
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
1718 String* name) { 1721 String* name) {
1719 // ----------- S t a t e ------------- 1722 // ----------- S t a t e -------------
1720 // -- rcx : function name 1723 // -- rcx : function name
1721 // -- rsp[0] : return address 1724 // -- rsp[0] : return address
1722 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1725 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1723 // -- ... 1726 // -- ...
1724 // -- rsp[(argc + 1) * 8] : receiver 1727 // -- rsp[(argc + 1) * 8] : receiver
1725 // ----------------------------------- 1728 // -----------------------------------
1726 1729
1727 // If object is not a string, bail out to regular call. 1730 // If object is not a string, bail out to regular call.
1728 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); 1731 if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
1729 1732
1730 const int argc = arguments().immediate(); 1733 const int argc = arguments().immediate();
1731 1734
1732 Label miss; 1735 Label miss;
1733 Label name_miss; 1736 Label name_miss;
1734 Label index_out_of_range; 1737 Label index_out_of_range;
1735 Label* index_out_of_range_label = &index_out_of_range; 1738 Label* index_out_of_range_label = &index_out_of_range;
1736 1739
1737 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) { 1740 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1738 index_out_of_range_label = &miss; 1741 index_out_of_range_label = &miss;
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
1807 // -- rsp[0] : return address 1810 // -- rsp[0] : return address
1808 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1811 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1809 // -- ... 1812 // -- ...
1810 // -- rsp[(argc + 1) * 8] : receiver 1813 // -- rsp[(argc + 1) * 8] : receiver
1811 // ----------------------------------- 1814 // -----------------------------------
1812 1815
1813 const int argc = arguments().immediate(); 1816 const int argc = arguments().immediate();
1814 1817
1815 // If the object is not a JSObject or we got an unexpected number of 1818 // If the object is not a JSObject or we got an unexpected number of
1816 // arguments, bail out to the regular call. 1819 // arguments, bail out to the regular call.
1817 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); 1820 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
1818 1821
1819 Label miss; 1822 Label miss;
1820 GenerateNameCheck(name, &miss); 1823 GenerateNameCheck(name, &miss);
1821 1824
1822 if (cell == NULL) { 1825 if (cell == NULL) {
1823 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 1826 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1824 1827
1825 __ JumpIfSmi(rdx, &miss); 1828 __ JumpIfSmi(rdx, &miss);
1826 1829
1827 CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name, 1830 CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
1866 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); 1869 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1867 } 1870 }
1868 1871
1869 1872
1870 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object, 1873 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1871 JSObject* holder, 1874 JSObject* holder,
1872 JSGlobalPropertyCell* cell, 1875 JSGlobalPropertyCell* cell,
1873 JSFunction* function, 1876 JSFunction* function,
1874 String* name) { 1877 String* name) {
1875 // TODO(872): implement this. 1878 // TODO(872): implement this.
1876 return Heap::undefined_value(); 1879 return HEAP->undefined_value();
1877 } 1880 }
1878 1881
1879 1882
1880 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object, 1883 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1881 JSObject* holder, 1884 JSObject* holder,
1882 JSGlobalPropertyCell* cell, 1885 JSGlobalPropertyCell* cell,
1883 JSFunction* function, 1886 JSFunction* function,
1884 String* name) { 1887 String* name) {
1885 // ----------- S t a t e ------------- 1888 // ----------- S t a t e -------------
1886 // -- rcx : function name 1889 // -- rcx : function name
1887 // -- rsp[0] : return address 1890 // -- rsp[0] : return address
1888 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1891 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1889 // -- ... 1892 // -- ...
1890 // -- rsp[(argc + 1) * 8] : receiver 1893 // -- rsp[(argc + 1) * 8] : receiver
1891 // ----------------------------------- 1894 // -----------------------------------
1892 1895
1893 const int argc = arguments().immediate(); 1896 const int argc = arguments().immediate();
1894 1897
1895 // If the object is not a JSObject or we got an unexpected number of 1898 // If the object is not a JSObject or we got an unexpected number of
1896 // arguments, bail out to the regular call. 1899 // arguments, bail out to the regular call.
1897 if (!object->IsJSObject() || argc != 1) return Heap::undefined_value(); 1900 if (!object->IsJSObject() || argc != 1) return HEAP->undefined_value();
1898 1901
1899 Label miss; 1902 Label miss;
1900 GenerateNameCheck(name, &miss); 1903 GenerateNameCheck(name, &miss);
1901 1904
1902 if (cell == NULL) { 1905 if (cell == NULL) {
1903 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 1906 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1904 1907
1905 __ JumpIfSmi(rdx, &miss); 1908 __ JumpIfSmi(rdx, &miss);
1906 1909
1907 CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name, 1910 CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
(...skipping 28 matching lines...) Expand all
1936 // This only happens for the most negative smi. 1939 // This only happens for the most negative smi.
1937 Label slow; 1940 Label slow;
1938 __ j(negative, &slow); 1941 __ j(negative, &slow);
1939 1942
1940 // Smi case done. 1943 // Smi case done.
1941 __ Integer32ToSmi(rax, rax); 1944 __ Integer32ToSmi(rax, rax);
1942 __ ret(2 * kPointerSize); 1945 __ ret(2 * kPointerSize);
1943 1946
1944 // Check if the argument is a heap number and load its value. 1947 // Check if the argument is a heap number and load its value.
1945 __ bind(&not_smi); 1948 __ bind(&not_smi);
1946 __ CheckMap(rax, Factory::heap_number_map(), &slow, true); 1949 __ CheckMap(rax, FACTORY->heap_number_map(), &slow, true);
1947 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); 1950 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1948 1951
1949 // Check the sign of the argument. If the argument is positive, 1952 // Check the sign of the argument. If the argument is positive,
1950 // just return it. 1953 // just return it.
1951 Label negative_sign; 1954 Label negative_sign;
1952 const int sign_mask_shift = 1955 const int sign_mask_shift =
1953 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte; 1956 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte;
1954 __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift, 1957 __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
1955 RelocInfo::NONE); 1958 RelocInfo::NONE);
1956 __ testq(rbx, rdi); 1959 __ testq(rbx, rdi);
(...skipping 28 matching lines...) Expand all
1985 MaybeObject* CallStubCompiler::CompileFastApiCall( 1988 MaybeObject* CallStubCompiler::CompileFastApiCall(
1986 const CallOptimization& optimization, 1989 const CallOptimization& optimization,
1987 Object* object, 1990 Object* object,
1988 JSObject* holder, 1991 JSObject* holder,
1989 JSGlobalPropertyCell* cell, 1992 JSGlobalPropertyCell* cell,
1990 JSFunction* function, 1993 JSFunction* function,
1991 String* name) { 1994 String* name) {
1992 ASSERT(optimization.is_simple_api_call()); 1995 ASSERT(optimization.is_simple_api_call());
1993 // Bail out if object is a global object as we don't want to 1996 // Bail out if object is a global object as we don't want to
1994 // repatch it to global receiver. 1997 // repatch it to global receiver.
1995 if (object->IsGlobalObject()) return Heap::undefined_value(); 1998 if (object->IsGlobalObject()) return HEAP->undefined_value();
1996 if (cell != NULL) return Heap::undefined_value(); 1999 if (cell != NULL) return HEAP->undefined_value();
1997 int depth = optimization.GetPrototypeDepthOfExpectedType( 2000 int depth = optimization.GetPrototypeDepthOfExpectedType(
1998 JSObject::cast(object), holder); 2001 JSObject::cast(object), holder);
1999 if (depth == kInvalidProtoDepth) return Heap::undefined_value(); 2002 if (depth == kInvalidProtoDepth) return HEAP->undefined_value();
2000 2003
2001 Label miss, miss_before_stack_reserved; 2004 Label miss, miss_before_stack_reserved;
2002 2005
2003 GenerateNameCheck(name, &miss_before_stack_reserved); 2006 GenerateNameCheck(name, &miss_before_stack_reserved);
2004 2007
2005 // Get the receiver from the stack. 2008 // Get the receiver from the stack.
2006 const int argc = arguments().immediate(); 2009 const int argc = arguments().immediate();
2007 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 2010 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2008 2011
2009 // Check that the receiver isn't a smi. 2012 // Check that the receiver isn't a smi.
2010 __ JumpIfSmi(rdx, &miss_before_stack_reserved); 2013 __ JumpIfSmi(rdx, &miss_before_stack_reserved);
2011 2014
2012 __ IncrementCounter(&Counters::call_const, 1); 2015 __ IncrementCounter(COUNTERS->call_const(), 1);
2013 __ IncrementCounter(&Counters::call_const_fast_api, 1); 2016 __ IncrementCounter(COUNTERS->call_const_fast_api(), 1);
2014 2017
2015 // Allocate space for v8::Arguments implicit values. Must be initialized 2018 // Allocate space for v8::Arguments implicit values. Must be initialized
2016 // before calling any runtime function. 2019 // before calling any runtime function.
2017 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); 2020 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2018 2021
2019 // Check that the maps haven't changed and find a Holder as a side effect. 2022 // Check that the maps haven't changed and find a Holder as a side effect.
2020 CheckPrototypes(JSObject::cast(object), rdx, holder, 2023 CheckPrototypes(JSObject::cast(object), rdx, holder,
2021 rbx, rax, rdi, name, depth, &miss); 2024 rbx, rax, rdi, name, depth, &miss);
2022 2025
2023 // Move the return address on top of the stack. 2026 // Move the return address on top of the stack.
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
2078 __ JumpIfSmi(rdx, &miss); 2081 __ JumpIfSmi(rdx, &miss);
2079 } 2082 }
2080 2083
2081 // Make sure that it's okay not to patch the on stack receiver 2084 // Make sure that it's okay not to patch the on stack receiver
2082 // unless we're doing a receiver map check. 2085 // unless we're doing a receiver map check.
2083 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); 2086 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2084 2087
2085 SharedFunctionInfo* function_info = function->shared(); 2088 SharedFunctionInfo* function_info = function->shared();
2086 switch (check) { 2089 switch (check) {
2087 case RECEIVER_MAP_CHECK: 2090 case RECEIVER_MAP_CHECK:
2088 __ IncrementCounter(&Counters::call_const, 1); 2091 __ IncrementCounter(COUNTERS->call_const(), 1);
2089 2092
2090 // Check that the maps haven't changed. 2093 // Check that the maps haven't changed.
2091 CheckPrototypes(JSObject::cast(object), rdx, holder, 2094 CheckPrototypes(JSObject::cast(object), rdx, holder,
2092 rbx, rax, rdi, name, &miss); 2095 rbx, rax, rdi, name, &miss);
2093 2096
2094 // Patch the receiver on the stack with the global proxy if 2097 // Patch the receiver on the stack with the global proxy if
2095 // necessary. 2098 // necessary.
2096 if (object->IsGlobalObject()) { 2099 if (object->IsGlobalObject()) {
2097 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 2100 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2098 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 2101 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
2284 // Patch the receiver on the stack with the global proxy. 2287 // Patch the receiver on the stack with the global proxy.
2285 if (object->IsGlobalObject()) { 2288 if (object->IsGlobalObject()) {
2286 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 2289 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2287 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 2290 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2288 } 2291 }
2289 2292
2290 // Setup the context (function already in rdi). 2293 // Setup the context (function already in rdi).
2291 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 2294 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2292 2295
2293 // Jump to the cached code (tail call). 2296 // Jump to the cached code (tail call).
2294 __ IncrementCounter(&Counters::call_global_inline, 1); 2297 __ IncrementCounter(COUNTERS->call_global_inline(), 1);
2295 ASSERT(function->is_compiled()); 2298 ASSERT(function->is_compiled());
2296 ParameterCount expected(function->shared()->formal_parameter_count()); 2299 ParameterCount expected(function->shared()->formal_parameter_count());
2297 if (V8::UseCrankshaft()) { 2300 if (V8::UseCrankshaft()) {
2298 // TODO(kasperl): For now, we always call indirectly through the 2301 // TODO(kasperl): For now, we always call indirectly through the
2299 // code field in the function to allow recompilation to take effect 2302 // code field in the function to allow recompilation to take effect
2300 // without changing any of the call sites. 2303 // without changing any of the call sites.
2301 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 2304 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2302 __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION); 2305 __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION);
2303 } else { 2306 } else {
2304 Handle<Code> code(function->code()); 2307 Handle<Code> code(function->code());
2305 __ InvokeCode(code, expected, arguments(), 2308 __ InvokeCode(code, expected, arguments(),
2306 RelocInfo::CODE_TARGET, JUMP_FUNCTION); 2309 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2307 } 2310 }
2308 // Handle call cache miss. 2311 // Handle call cache miss.
2309 __ bind(&miss); 2312 __ bind(&miss);
2310 __ IncrementCounter(&Counters::call_global_inline_miss, 1); 2313 __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1);
2311 Object* obj; 2314 Object* obj;
2312 { MaybeObject* maybe_obj = GenerateMissBranch(); 2315 { MaybeObject* maybe_obj = GenerateMissBranch();
2313 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2316 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2314 } 2317 }
2315 2318
2316 // Return the generated code. 2319 // Return the generated code.
2317 return GetCode(NORMAL, name); 2320 return GetCode(NORMAL, name);
2318 } 2321 }
2319 2322
2320 2323
(...skipping 12 matching lines...) Expand all
2333 // Generate store field code. Preserves receiver and name on jump to miss. 2336 // Generate store field code. Preserves receiver and name on jump to miss.
2334 GenerateStoreField(masm(), 2337 GenerateStoreField(masm(),
2335 object, 2338 object,
2336 index, 2339 index,
2337 transition, 2340 transition,
2338 rdx, rcx, rbx, 2341 rdx, rcx, rbx,
2339 &miss); 2342 &miss);
2340 2343
2341 // Handle store cache miss. 2344 // Handle store cache miss.
2342 __ bind(&miss); 2345 __ bind(&miss);
2343 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 2346 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2347 Builtins::StoreIC_Miss));
2344 __ Jump(ic, RelocInfo::CODE_TARGET); 2348 __ Jump(ic, RelocInfo::CODE_TARGET);
2345 2349
2346 // Return the generated code. 2350 // Return the generated code.
2347 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); 2351 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2348 } 2352 }
2349 2353
2350 2354
2351 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, 2355 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2352 AccessorInfo* callback, 2356 AccessorInfo* callback,
2353 String* name) { 2357 String* name) {
(...skipping 29 matching lines...) Expand all
2383 __ push(rax); // value 2387 __ push(rax); // value
2384 __ push(rbx); // restore return address 2388 __ push(rbx); // restore return address
2385 2389
2386 // Do tail-call to the runtime system. 2390 // Do tail-call to the runtime system.
2387 ExternalReference store_callback_property = 2391 ExternalReference store_callback_property =
2388 ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); 2392 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2389 __ TailCallExternalReference(store_callback_property, 4, 1); 2393 __ TailCallExternalReference(store_callback_property, 4, 1);
2390 2394
2391 // Handle store cache miss. 2395 // Handle store cache miss.
2392 __ bind(&miss); 2396 __ bind(&miss);
2393 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 2397 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2398 Builtins::StoreIC_Miss));
2394 __ Jump(ic, RelocInfo::CODE_TARGET); 2399 __ Jump(ic, RelocInfo::CODE_TARGET);
2395 2400
2396 // Return the generated code. 2401 // Return the generated code.
2397 return GetCode(CALLBACKS, name); 2402 return GetCode(CALLBACKS, name);
2398 } 2403 }
2399 2404
2400 2405
2401 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, 2406 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2402 String* name) { 2407 String* name) {
2403 // ----------- S t a t e ------------- 2408 // ----------- S t a t e -------------
(...skipping 28 matching lines...) Expand all
2432 __ Push(Smi::FromInt(strict_mode_)); 2437 __ Push(Smi::FromInt(strict_mode_));
2433 __ push(rbx); // restore return address 2438 __ push(rbx); // restore return address
2434 2439
2435 // Do tail-call to the runtime system. 2440 // Do tail-call to the runtime system.
2436 ExternalReference store_ic_property = 2441 ExternalReference store_ic_property =
2437 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); 2442 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2438 __ TailCallExternalReference(store_ic_property, 4, 1); 2443 __ TailCallExternalReference(store_ic_property, 4, 1);
2439 2444
2440 // Handle store cache miss. 2445 // Handle store cache miss.
2441 __ bind(&miss); 2446 __ bind(&miss);
2442 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 2447 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2448 Builtins::StoreIC_Miss));
2443 __ Jump(ic, RelocInfo::CODE_TARGET); 2449 __ Jump(ic, RelocInfo::CODE_TARGET);
2444 2450
2445 // Return the generated code. 2451 // Return the generated code.
2446 return GetCode(INTERCEPTOR, name); 2452 return GetCode(INTERCEPTOR, name);
2447 } 2453 }
2448 2454
2449 2455
2450 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, 2456 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2451 JSGlobalPropertyCell* cell, 2457 JSGlobalPropertyCell* cell,
2452 String* name) { 2458 String* name) {
(...skipping 16 matching lines...) Expand all
2469 // global object. We bail out to the runtime system to do that. 2475 // global object. We bail out to the runtime system to do that.
2470 __ Move(rbx, Handle<JSGlobalPropertyCell>(cell)); 2476 __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2471 __ CompareRoot(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), 2477 __ CompareRoot(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
2472 Heap::kTheHoleValueRootIndex); 2478 Heap::kTheHoleValueRootIndex);
2473 __ j(equal, &miss); 2479 __ j(equal, &miss);
2474 2480
2475 // Store the value in the cell. 2481 // Store the value in the cell.
2476 __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax); 2482 __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax);
2477 2483
2478 // Return the value (register rax). 2484 // Return the value (register rax).
2479 __ IncrementCounter(&Counters::named_store_global_inline, 1); 2485 __ IncrementCounter(COUNTERS->named_store_global_inline(), 1);
2480 __ ret(0); 2486 __ ret(0);
2481 2487
2482 // Handle store cache miss. 2488 // Handle store cache miss.
2483 __ bind(&miss); 2489 __ bind(&miss);
2484 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1); 2490 __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1);
2485 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); 2491 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2492 Builtins::StoreIC_Miss));
2486 __ Jump(ic, RelocInfo::CODE_TARGET); 2493 __ Jump(ic, RelocInfo::CODE_TARGET);
2487 2494
2488 // Return the generated code. 2495 // Return the generated code.
2489 return GetCode(NORMAL, name); 2496 return GetCode(NORMAL, name);
2490 } 2497 }
2491 2498
2492 2499
2493 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, 2500 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2494 int index, 2501 int index,
2495 Map* transition, 2502 Map* transition,
2496 String* name) { 2503 String* name) {
2497 // ----------- S t a t e ------------- 2504 // ----------- S t a t e -------------
2498 // -- rax : value 2505 // -- rax : value
2499 // -- rcx : key 2506 // -- rcx : key
2500 // -- rdx : receiver 2507 // -- rdx : receiver
2501 // -- rsp[0] : return address 2508 // -- rsp[0] : return address
2502 // ----------------------------------- 2509 // -----------------------------------
2503 Label miss; 2510 Label miss;
2504 2511
2505 __ IncrementCounter(&Counters::keyed_store_field, 1); 2512 __ IncrementCounter(COUNTERS->keyed_store_field(), 1);
2506 2513
2507 // Check that the name has not changed. 2514 // Check that the name has not changed.
2508 __ Cmp(rcx, Handle<String>(name)); 2515 __ Cmp(rcx, Handle<String>(name));
2509 __ j(not_equal, &miss); 2516 __ j(not_equal, &miss);
2510 2517
2511 // Generate store field code. Preserves receiver and name on jump to miss. 2518 // Generate store field code. Preserves receiver and name on jump to miss.
2512 GenerateStoreField(masm(), 2519 GenerateStoreField(masm(),
2513 object, 2520 object,
2514 index, 2521 index,
2515 transition, 2522 transition,
2516 rdx, rcx, rbx, 2523 rdx, rcx, rbx,
2517 &miss); 2524 &miss);
2518 2525
2519 // Handle store cache miss. 2526 // Handle store cache miss.
2520 __ bind(&miss); 2527 __ bind(&miss);
2521 __ DecrementCounter(&Counters::keyed_store_field, 1); 2528 __ DecrementCounter(COUNTERS->keyed_store_field(), 1);
2522 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); 2529 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2530 Builtins::KeyedStoreIC_Miss));
2523 __ Jump(ic, RelocInfo::CODE_TARGET); 2531 __ Jump(ic, RelocInfo::CODE_TARGET);
2524 2532
2525 // Return the generated code. 2533 // Return the generated code.
2526 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name); 2534 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2527 } 2535 }
2528 2536
2529 2537
2530 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized( 2538 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2531 JSObject* receiver) { 2539 JSObject* receiver) {
2532 // ----------- S t a t e ------------- 2540 // ----------- S t a t e -------------
(...skipping 11 matching lines...) Expand all
2544 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 2552 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2545 Handle<Map>(receiver->map())); 2553 Handle<Map>(receiver->map()));
2546 __ j(not_equal, &miss); 2554 __ j(not_equal, &miss);
2547 2555
2548 // Check that the key is a smi. 2556 // Check that the key is a smi.
2549 __ JumpIfNotSmi(rcx, &miss); 2557 __ JumpIfNotSmi(rcx, &miss);
2550 2558
2551 // Get the elements array and make sure it is a fast element array, not 'cow'. 2559 // Get the elements array and make sure it is a fast element array, not 'cow'.
2552 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 2560 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2553 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), 2561 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2554 Factory::fixed_array_map()); 2562 FACTORY->fixed_array_map());
2555 __ j(not_equal, &miss); 2563 __ j(not_equal, &miss);
2556 2564
2557 // Check that the key is within bounds. 2565 // Check that the key is within bounds.
2558 if (receiver->IsJSArray()) { 2566 if (receiver->IsJSArray()) {
2559 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); 2567 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
2560 __ j(above_equal, &miss); 2568 __ j(above_equal, &miss);
2561 } else { 2569 } else {
2562 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); 2570 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2563 __ j(above_equal, &miss); 2571 __ j(above_equal, &miss);
2564 } 2572 }
2565 2573
2566 // Do the store and update the write barrier. Make sure to preserve 2574 // Do the store and update the write barrier. Make sure to preserve
2567 // the value in register eax. 2575 // the value in register eax.
2568 __ movq(rdx, rax); 2576 __ movq(rdx, rax);
2569 __ SmiToInteger32(rcx, rcx); 2577 __ SmiToInteger32(rcx, rcx);
2570 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize), 2578 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
2571 rax); 2579 rax);
2572 __ RecordWrite(rdi, 0, rdx, rcx); 2580 __ RecordWrite(rdi, 0, rdx, rcx);
2573 2581
2574 // Done. 2582 // Done.
2575 __ ret(0); 2583 __ ret(0);
2576 2584
2577 // Handle store cache miss. 2585 // Handle store cache miss.
2578 __ bind(&miss); 2586 __ bind(&miss);
2579 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); 2587 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2588 Builtins::KeyedStoreIC_Miss));
2580 __ jmp(ic, RelocInfo::CODE_TARGET); 2589 __ jmp(ic, RelocInfo::CODE_TARGET);
2581 2590
2582 // Return the generated code. 2591 // Return the generated code.
2583 return GetCode(NORMAL, NULL); 2592 return GetCode(NORMAL, NULL);
2584 } 2593 }
2585 2594
2586 2595
2587 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, 2596 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2588 JSObject* object, 2597 JSObject* object,
2589 JSObject* last) { 2598 JSObject* last) {
(...skipping 28 matching lines...) Expand all
2618 2627
2619 // Return undefined if maps of the full prototype chain are still the 2628 // Return undefined if maps of the full prototype chain are still the
2620 // same and no global property with this name contains a value. 2629 // same and no global property with this name contains a value.
2621 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 2630 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2622 __ ret(0); 2631 __ ret(0);
2623 2632
2624 __ bind(&miss); 2633 __ bind(&miss);
2625 GenerateLoadMiss(masm(), Code::LOAD_IC); 2634 GenerateLoadMiss(masm(), Code::LOAD_IC);
2626 2635
2627 // Return the generated code. 2636 // Return the generated code.
2628 return GetCode(NONEXISTENT, Heap::empty_string()); 2637 return GetCode(NONEXISTENT, HEAP->empty_string());
2629 } 2638 }
2630 2639
2631 2640
2632 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object, 2641 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2633 JSObject* holder, 2642 JSObject* holder,
2634 int index, 2643 int index,
2635 String* name) { 2644 String* name) {
2636 // ----------- S t a t e ------------- 2645 // ----------- S t a t e -------------
2637 // -- rax : receiver 2646 // -- rax : receiver
2638 // -- rcx : name 2647 // -- rcx : name
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
2757 2766
2758 // Check for deleted property if property can actually be deleted. 2767 // Check for deleted property if property can actually be deleted.
2759 if (!is_dont_delete) { 2768 if (!is_dont_delete) {
2760 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); 2769 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2761 __ j(equal, &miss); 2770 __ j(equal, &miss);
2762 } else if (FLAG_debug_code) { 2771 } else if (FLAG_debug_code) {
2763 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); 2772 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2764 __ Check(not_equal, "DontDelete cells can't contain the hole"); 2773 __ Check(not_equal, "DontDelete cells can't contain the hole");
2765 } 2774 }
2766 2775
2767 __ IncrementCounter(&Counters::named_load_global_stub, 1); 2776 __ IncrementCounter(COUNTERS->named_load_global_stub(), 1);
2768 __ movq(rax, rbx); 2777 __ movq(rax, rbx);
2769 __ ret(0); 2778 __ ret(0);
2770 2779
2771 __ bind(&miss); 2780 __ bind(&miss);
2772 __ IncrementCounter(&Counters::named_load_global_stub_miss, 1); 2781 __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1);
2773 GenerateLoadMiss(masm(), Code::LOAD_IC); 2782 GenerateLoadMiss(masm(), Code::LOAD_IC);
2774 2783
2775 // Return the generated code. 2784 // Return the generated code.
2776 return GetCode(NORMAL, name); 2785 return GetCode(NORMAL, name);
2777 } 2786 }
2778 2787
2779 2788
2780 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name, 2789 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2781 JSObject* receiver, 2790 JSObject* receiver,
2782 JSObject* holder, 2791 JSObject* holder,
2783 int index) { 2792 int index) {
2784 // ----------- S t a t e ------------- 2793 // ----------- S t a t e -------------
2785 // -- rax : key 2794 // -- rax : key
2786 // -- rdx : receiver 2795 // -- rdx : receiver
2787 // -- rsp[0] : return address 2796 // -- rsp[0] : return address
2788 // ----------------------------------- 2797 // -----------------------------------
2789 Label miss; 2798 Label miss;
2790 2799
2791 __ IncrementCounter(&Counters::keyed_load_field, 1); 2800 __ IncrementCounter(COUNTERS->keyed_load_field(), 1);
2792 2801
2793 // Check that the name has not changed. 2802 // Check that the name has not changed.
2794 __ Cmp(rax, Handle<String>(name)); 2803 __ Cmp(rax, Handle<String>(name));
2795 __ j(not_equal, &miss); 2804 __ j(not_equal, &miss);
2796 2805
2797 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss); 2806 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2798 2807
2799 __ bind(&miss); 2808 __ bind(&miss);
2800 __ DecrementCounter(&Counters::keyed_load_field, 1); 2809 __ DecrementCounter(COUNTERS->keyed_load_field(), 1);
2801 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2810 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2802 2811
2803 // Return the generated code. 2812 // Return the generated code.
2804 return GetCode(FIELD, name); 2813 return GetCode(FIELD, name);
2805 } 2814 }
2806 2815
2807 2816
2808 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( 2817 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2809 String* name, 2818 String* name,
2810 JSObject* receiver, 2819 JSObject* receiver,
2811 JSObject* holder, 2820 JSObject* holder,
2812 AccessorInfo* callback) { 2821 AccessorInfo* callback) {
2813 // ----------- S t a t e ------------- 2822 // ----------- S t a t e -------------
2814 // -- rax : key 2823 // -- rax : key
2815 // -- rdx : receiver 2824 // -- rdx : receiver
2816 // -- rsp[0] : return address 2825 // -- rsp[0] : return address
2817 // ----------------------------------- 2826 // -----------------------------------
2818 Label miss; 2827 Label miss;
2819 2828
2820 __ IncrementCounter(&Counters::keyed_load_callback, 1); 2829 __ IncrementCounter(COUNTERS->keyed_load_callback(), 1);
2821 2830
2822 // Check that the name has not changed. 2831 // Check that the name has not changed.
2823 __ Cmp(rax, Handle<String>(name)); 2832 __ Cmp(rax, Handle<String>(name));
2824 __ j(not_equal, &miss); 2833 __ j(not_equal, &miss);
2825 2834
2826 MaybeObject* result = GenerateLoadCallback(receiver, holder, rdx, rax, rbx, 2835 MaybeObject* result = GenerateLoadCallback(receiver, holder, rdx, rax, rbx,
2827 rcx, rdi, callback, name, &miss); 2836 rcx, rdi, callback, name, &miss);
2828 if (result->IsFailure()) { 2837 if (result->IsFailure()) {
2829 miss.Unuse(); 2838 miss.Unuse();
2830 return result; 2839 return result;
2831 } 2840 }
2832 2841
2833 __ bind(&miss); 2842 __ bind(&miss);
2834 2843
2835 __ DecrementCounter(&Counters::keyed_load_callback, 1); 2844 __ DecrementCounter(COUNTERS->keyed_load_callback(), 1);
2836 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2845 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2837 2846
2838 // Return the generated code. 2847 // Return the generated code.
2839 return GetCode(CALLBACKS, name); 2848 return GetCode(CALLBACKS, name);
2840 } 2849 }
2841 2850
2842 2851
2843 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 2852 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2844 JSObject* receiver, 2853 JSObject* receiver,
2845 JSObject* holder, 2854 JSObject* holder,
2846 Object* value) { 2855 Object* value) {
2847 // ----------- S t a t e ------------- 2856 // ----------- S t a t e -------------
2848 // -- rax : key 2857 // -- rax : key
2849 // -- rdx : receiver 2858 // -- rdx : receiver
2850 // -- rsp[0] : return address 2859 // -- rsp[0] : return address
2851 // ----------------------------------- 2860 // -----------------------------------
2852 Label miss; 2861 Label miss;
2853 2862
2854 __ IncrementCounter(&Counters::keyed_load_constant_function, 1); 2863 __ IncrementCounter(COUNTERS->keyed_load_constant_function(), 1);
2855 2864
2856 // Check that the name has not changed. 2865 // Check that the name has not changed.
2857 __ Cmp(rax, Handle<String>(name)); 2866 __ Cmp(rax, Handle<String>(name));
2858 __ j(not_equal, &miss); 2867 __ j(not_equal, &miss);
2859 2868
2860 GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi, 2869 GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
2861 value, name, &miss); 2870 value, name, &miss);
2862 __ bind(&miss); 2871 __ bind(&miss);
2863 __ DecrementCounter(&Counters::keyed_load_constant_function, 1); 2872 __ DecrementCounter(COUNTERS->keyed_load_constant_function(), 1);
2864 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2873 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2865 2874
2866 // Return the generated code. 2875 // Return the generated code.
2867 return GetCode(CONSTANT_FUNCTION, name); 2876 return GetCode(CONSTANT_FUNCTION, name);
2868 } 2877 }
2869 2878
2870 2879
2871 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2880 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2872 JSObject* holder, 2881 JSObject* holder,
2873 String* name) { 2882 String* name) {
2874 // ----------- S t a t e ------------- 2883 // ----------- S t a t e -------------
2875 // -- rax : key 2884 // -- rax : key
2876 // -- rdx : receiver 2885 // -- rdx : receiver
2877 // -- rsp[0] : return address 2886 // -- rsp[0] : return address
2878 // ----------------------------------- 2887 // -----------------------------------
2879 Label miss; 2888 Label miss;
2880 2889
2881 __ IncrementCounter(&Counters::keyed_load_interceptor, 1); 2890 __ IncrementCounter(COUNTERS->keyed_load_interceptor(), 1);
2882 2891
2883 // Check that the name has not changed. 2892 // Check that the name has not changed.
2884 __ Cmp(rax, Handle<String>(name)); 2893 __ Cmp(rax, Handle<String>(name));
2885 __ j(not_equal, &miss); 2894 __ j(not_equal, &miss);
2886 2895
2887 LookupResult lookup; 2896 LookupResult lookup;
2888 LookupPostInterceptor(holder, name, &lookup); 2897 LookupPostInterceptor(holder, name, &lookup);
2889 GenerateLoadInterceptor(receiver, 2898 GenerateLoadInterceptor(receiver,
2890 holder, 2899 holder,
2891 &lookup, 2900 &lookup,
2892 rdx, 2901 rdx,
2893 rax, 2902 rax,
2894 rcx, 2903 rcx,
2895 rbx, 2904 rbx,
2896 rdi, 2905 rdi,
2897 name, 2906 name,
2898 &miss); 2907 &miss);
2899 __ bind(&miss); 2908 __ bind(&miss);
2900 __ DecrementCounter(&Counters::keyed_load_interceptor, 1); 2909 __ DecrementCounter(COUNTERS->keyed_load_interceptor(), 1);
2901 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2910 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2902 2911
2903 // Return the generated code. 2912 // Return the generated code.
2904 return GetCode(INTERCEPTOR, name); 2913 return GetCode(INTERCEPTOR, name);
2905 } 2914 }
2906 2915
2907 2916
2908 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { 2917 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2909 // ----------- S t a t e ------------- 2918 // ----------- S t a t e -------------
2910 // -- rax : key 2919 // -- rax : key
2911 // -- rdx : receiver 2920 // -- rdx : receiver
2912 // -- rsp[0] : return address 2921 // -- rsp[0] : return address
2913 // ----------------------------------- 2922 // -----------------------------------
2914 Label miss; 2923 Label miss;
2915 2924
2916 __ IncrementCounter(&Counters::keyed_load_array_length, 1); 2925 __ IncrementCounter(COUNTERS->keyed_load_array_length(), 1);
2917 2926
2918 // Check that the name has not changed. 2927 // Check that the name has not changed.
2919 __ Cmp(rax, Handle<String>(name)); 2928 __ Cmp(rax, Handle<String>(name));
2920 __ j(not_equal, &miss); 2929 __ j(not_equal, &miss);
2921 2930
2922 GenerateLoadArrayLength(masm(), rdx, rcx, &miss); 2931 GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2923 __ bind(&miss); 2932 __ bind(&miss);
2924 __ DecrementCounter(&Counters::keyed_load_array_length, 1); 2933 __ DecrementCounter(COUNTERS->keyed_load_array_length(), 1);
2925 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2934 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2926 2935
2927 // Return the generated code. 2936 // Return the generated code.
2928 return GetCode(CALLBACKS, name); 2937 return GetCode(CALLBACKS, name);
2929 } 2938 }
2930 2939
2931 2940
2932 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 2941 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2933 // ----------- S t a t e ------------- 2942 // ----------- S t a t e -------------
2934 // -- rax : key 2943 // -- rax : key
2935 // -- rdx : receiver 2944 // -- rdx : receiver
2936 // -- rsp[0] : return address 2945 // -- rsp[0] : return address
2937 // ----------------------------------- 2946 // -----------------------------------
2938 Label miss; 2947 Label miss;
2939 2948
2940 __ IncrementCounter(&Counters::keyed_load_string_length, 1); 2949 __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1);
2941 2950
2942 // Check that the name has not changed. 2951 // Check that the name has not changed.
2943 __ Cmp(rax, Handle<String>(name)); 2952 __ Cmp(rax, Handle<String>(name));
2944 __ j(not_equal, &miss); 2953 __ j(not_equal, &miss);
2945 2954
2946 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true); 2955 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
2947 __ bind(&miss); 2956 __ bind(&miss);
2948 __ DecrementCounter(&Counters::keyed_load_string_length, 1); 2957 __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1);
2949 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2958 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2950 2959
2951 // Return the generated code. 2960 // Return the generated code.
2952 return GetCode(CALLBACKS, name); 2961 return GetCode(CALLBACKS, name);
2953 } 2962 }
2954 2963
2955 2964
2956 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { 2965 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2957 // ----------- S t a t e ------------- 2966 // ----------- S t a t e -------------
2958 // -- rax : key 2967 // -- rax : key
2959 // -- rdx : receiver 2968 // -- rdx : receiver
2960 // -- rsp[0] : return address 2969 // -- rsp[0] : return address
2961 // ----------------------------------- 2970 // -----------------------------------
2962 Label miss; 2971 Label miss;
2963 2972
2964 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1); 2973 __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
2965 2974
2966 // Check that the name has not changed. 2975 // Check that the name has not changed.
2967 __ Cmp(rax, Handle<String>(name)); 2976 __ Cmp(rax, Handle<String>(name));
2968 __ j(not_equal, &miss); 2977 __ j(not_equal, &miss);
2969 2978
2970 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss); 2979 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2971 __ bind(&miss); 2980 __ bind(&miss);
2972 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1); 2981 __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
2973 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2982 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2974 2983
2975 // Return the generated code. 2984 // Return the generated code.
2976 return GetCode(CALLBACKS, name); 2985 return GetCode(CALLBACKS, name);
2977 } 2986 }
2978 2987
2979 2988
2980 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { 2989 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2981 // ----------- S t a t e ------------- 2990 // ----------- S t a t e -------------
2982 // -- rax : key 2991 // -- rax : key
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3028 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { 3037 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3029 // ----------- S t a t e ------------- 3038 // ----------- S t a t e -------------
3030 // -- rax : argc 3039 // -- rax : argc
3031 // -- rdi : constructor 3040 // -- rdi : constructor
3032 // -- rsp[0] : return address 3041 // -- rsp[0] : return address
3033 // -- rsp[4] : last argument 3042 // -- rsp[4] : last argument
3034 // ----------------------------------- 3043 // -----------------------------------
3035 Label generic_stub_call; 3044 Label generic_stub_call;
3036 3045
3037 // Use r8 for holding undefined which is used in several places below. 3046 // Use r8 for holding undefined which is used in several places below.
3038 __ Move(r8, Factory::undefined_value()); 3047 __ Move(r8, FACTORY->undefined_value());
3039 3048
3040 #ifdef ENABLE_DEBUGGER_SUPPORT 3049 #ifdef ENABLE_DEBUGGER_SUPPORT
3041 // Check to see whether there are any break points in the function code. If 3050 // Check to see whether there are any break points in the function code. If
3042 // there are jump to the generic constructor stub which calls the actual 3051 // there are jump to the generic constructor stub which calls the actual
3043 // code for the function thereby hitting the break points. 3052 // code for the function thereby hitting the break points.
3044 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 3053 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
3045 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kDebugInfoOffset)); 3054 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kDebugInfoOffset));
3046 __ cmpq(rbx, r8); 3055 __ cmpq(rbx, r8);
3047 __ j(not_equal, &generic_stub_call); 3056 __ j(not_equal, &generic_stub_call);
3048 #endif 3057 #endif
(...skipping 23 matching lines...) Expand all
3072 rdx, 3081 rdx,
3073 rcx, 3082 rcx,
3074 no_reg, 3083 no_reg,
3075 &generic_stub_call, 3084 &generic_stub_call,
3076 NO_ALLOCATION_FLAGS); 3085 NO_ALLOCATION_FLAGS);
3077 3086
3078 // Allocated the JSObject, now initialize the fields and add the heap tag. 3087 // Allocated the JSObject, now initialize the fields and add the heap tag.
3079 // rbx: initial map 3088 // rbx: initial map
3080 // rdx: JSObject (untagged) 3089 // rdx: JSObject (untagged)
3081 __ movq(Operand(rdx, JSObject::kMapOffset), rbx); 3090 __ movq(Operand(rdx, JSObject::kMapOffset), rbx);
3082 __ Move(rbx, Factory::empty_fixed_array()); 3091 __ Move(rbx, FACTORY->empty_fixed_array());
3083 __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx); 3092 __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx);
3084 __ movq(Operand(rdx, JSObject::kElementsOffset), rbx); 3093 __ movq(Operand(rdx, JSObject::kElementsOffset), rbx);
3085 3094
3086 // rax: argc 3095 // rax: argc
3087 // rdx: JSObject (untagged) 3096 // rdx: JSObject (untagged)
3088 // Load the address of the first in-object property into r9. 3097 // Load the address of the first in-object property into r9.
3089 __ lea(r9, Operand(rdx, JSObject::kHeaderSize)); 3098 __ lea(r9, Operand(rdx, JSObject::kHeaderSize));
3090 // Calculate the location of the first argument. The stack contains only the 3099 // Calculate the location of the first argument. The stack contains only the
3091 // return address on top of the argc arguments. 3100 // return address on top of the argc arguments.
3092 __ lea(rcx, Operand(rsp, rax, times_pointer_size, 0)); 3101 __ lea(rcx, Operand(rsp, rax, times_pointer_size, 0));
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
3131 __ movq(rbx, rax); 3140 __ movq(rbx, rax);
3132 __ movq(rax, rdx); 3141 __ movq(rax, rdx);
3133 __ or_(rax, Immediate(kHeapObjectTag)); 3142 __ or_(rax, Immediate(kHeapObjectTag));
3134 3143
3135 // rax: JSObject 3144 // rax: JSObject
3136 // rbx: argc 3145 // rbx: argc
3137 // Remove caller arguments and receiver from the stack and return. 3146 // Remove caller arguments and receiver from the stack and return.
3138 __ pop(rcx); 3147 __ pop(rcx);
3139 __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize)); 3148 __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
3140 __ push(rcx); 3149 __ push(rcx);
3141 __ IncrementCounter(&Counters::constructed_objects, 1); 3150 __ IncrementCounter(COUNTERS->constructed_objects(), 1);
3142 __ IncrementCounter(&Counters::constructed_objects_stub, 1); 3151 __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1);
3143 __ ret(0); 3152 __ ret(0);
3144 3153
3145 // Jump to the generic stub in case the specialized code cannot handle the 3154 // Jump to the generic stub in case the specialized code cannot handle the
3146 // construction. 3155 // construction.
3147 __ bind(&generic_stub_call); 3156 __ bind(&generic_stub_call);
3148 Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric); 3157 Code* code = Isolate::Current()->builtins()->builtin(
3158 Builtins::JSConstructStubGeneric);
3149 Handle<Code> generic_construct_stub(code); 3159 Handle<Code> generic_construct_stub(code);
3150 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); 3160 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3151 3161
3152 // Return the generated code. 3162 // Return the generated code.
3153 return GetCode(); 3163 return GetCode();
3154 } 3164 }
3155 3165
3156 3166
3157 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub( 3167 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3158 JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) { 3168 JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
3253 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); 3263 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3254 __ movq(rax, rcx); 3264 __ movq(rax, rcx);
3255 __ ret(0); 3265 __ ret(0);
3256 } else { 3266 } else {
3257 __ Integer32ToSmi(rax, rcx); 3267 __ Integer32ToSmi(rax, rcx);
3258 __ ret(0); 3268 __ ret(0);
3259 } 3269 }
3260 3270
3261 // Slow case: Jump to runtime. 3271 // Slow case: Jump to runtime.
3262 __ bind(&slow); 3272 __ bind(&slow);
3263 __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1); 3273 __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1);
3264 3274
3265 // ----------- S t a t e ------------- 3275 // ----------- S t a t e -------------
3266 // -- rax : key 3276 // -- rax : key
3267 // -- rdx : receiver 3277 // -- rdx : receiver
3268 // -- rsp[0] : return address 3278 // -- rsp[0] : return address
3269 // ----------------------------------- 3279 // -----------------------------------
3270 3280
3271 __ pop(rbx); 3281 __ pop(rbx);
3272 __ push(rdx); // receiver 3282 __ push(rdx); // receiver
3273 __ push(rax); // name 3283 __ push(rax); // name
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
3444 __ TailCallRuntime(Runtime::kSetProperty, 5, 1); 3454 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
3445 3455
3446 return GetCode(flags); 3456 return GetCode(flags);
3447 } 3457 }
3448 3458
3449 #undef __ 3459 #undef __
3450 3460
3451 } } // namespace v8::internal 3461 } } // namespace v8::internal
3452 3462
3453 #endif // V8_TARGET_ARCH_X64 3463 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/simulator-x64.h ('k') | src/x64/virtual-frame-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698