Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(348)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 6085006: Reorder the functions in stub-cache-x64.cc, so they are in the same order as ... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/stub-cache-ia32.cc ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28
29 #include "v8.h" 28 #include "v8.h"
30 29
31 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
32 31
33 #include "ic-inl.h" 32 #include "ic-inl.h"
34 #include "code-stubs.h"
35 #include "codegen-inl.h" 33 #include "codegen-inl.h"
36 #include "stub-cache.h" 34 #include "stub-cache.h"
37 #include "macro-assembler.h"
38 35
39 namespace v8 { 36 namespace v8 {
40 namespace internal { 37 namespace internal {
41 38
42 //-----------------------------------------------------------------------------
43 // StubCompiler static helper functions
44
45 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
46 40
47 41
48 static void ProbeTable(MacroAssembler* masm, 42 static void ProbeTable(MacroAssembler* masm,
49 Code::Flags flags, 43 Code::Flags flags,
50 StubCache::Table table, 44 StubCache::Table table,
51 Register name, 45 Register name,
52 Register offset) { 46 Register offset) {
53 ASSERT_EQ(8, kPointerSize); 47 ASSERT_EQ(8, kPointerSize);
54 ASSERT_EQ(16, sizeof(StubCache::Entry)); 48 ASSERT_EQ(16, sizeof(StubCache::Entry));
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
175 // Give up probing if still not found the undefined value. 169 // Give up probing if still not found the undefined value.
176 __ j(not_equal, miss_label); 170 __ j(not_equal, miss_label);
177 } 171 }
178 } 172 }
179 173
180 __ bind(&done); 174 __ bind(&done);
181 __ DecrementCounter(&Counters::negative_lookups_miss, 1); 175 __ DecrementCounter(&Counters::negative_lookups_miss, 1);
182 } 176 }
183 177
184 178
185 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
186 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
187 Code* code = NULL;
188 if (kind == Code::LOAD_IC) {
189 code = Builtins::builtin(Builtins::LoadIC_Miss);
190 } else {
191 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
192 }
193
194 Handle<Code> ic(code);
195 __ Jump(ic, RelocInfo::CODE_TARGET);
196 }
197
198
199 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
200 int index,
201 Register prototype) {
202 // Load the global or builtins object from the current context.
203 __ movq(prototype,
204 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
205 // Load the global context from the global or builtins object.
206 __ movq(prototype,
207 FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
208 // Load the function from the global context.
209 __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
210 // Load the initial map. The global functions all have initial maps.
211 __ movq(prototype,
212 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
213 // Load the prototype from the initial map.
214 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
215 }
216
217
218 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
219 MacroAssembler* masm, int index, Register prototype, Label* miss) {
220 // Check we're still in the same context.
221 __ Move(prototype, Top::global());
222 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
223 prototype);
224 __ j(not_equal, miss);
225 // Get the global function with the given index.
226 JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
227 // Load its initial map. The global functions all have initial maps.
228 __ Move(prototype, Handle<Map>(function->initial_map()));
229 // Load the prototype from the initial map.
230 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
231 }
232
233
234 // Load a fast property out of a holder object (src). In-object properties
235 // are loaded directly otherwise the property is loaded from the properties
236 // fixed array.
237 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
238 Register dst, Register src,
239 JSObject* holder, int index) {
240 // Adjust for the number of properties stored in the holder.
241 index -= holder->map()->inobject_properties();
242 if (index < 0) {
243 // Get the property straight out of the holder.
244 int offset = holder->map()->instance_size() + (index * kPointerSize);
245 __ movq(dst, FieldOperand(src, offset));
246 } else {
247 // Calculate the offset into the properties array.
248 int offset = index * kPointerSize + FixedArray::kHeaderSize;
249 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
250 __ movq(dst, FieldOperand(dst, offset));
251 }
252 }
253
254
255 static void PushInterceptorArguments(MacroAssembler* masm,
256 Register receiver,
257 Register holder,
258 Register name,
259 JSObject* holder_obj) {
260 __ push(name);
261 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
262 ASSERT(!Heap::InNewSpace(interceptor));
263 __ Move(kScratchRegister, Handle<Object>(interceptor));
264 __ push(kScratchRegister);
265 __ push(receiver);
266 __ push(holder);
267 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
268 }
269
270
271 void StubCache::GenerateProbe(MacroAssembler* masm, 179 void StubCache::GenerateProbe(MacroAssembler* masm,
272 Code::Flags flags, 180 Code::Flags flags,
273 Register receiver, 181 Register receiver,
274 Register name, 182 Register name,
275 Register scratch, 183 Register scratch,
276 Register extra, 184 Register extra,
277 Register extra2) { 185 Register extra2) {
278 Label miss; 186 Label miss;
279 USE(extra); // The register extra is not used on the X64 platform. 187 USE(extra); // The register extra is not used on the X64 platform.
280 USE(extra2); // The register extra2 is not used on the X64 platform. 188 USE(extra2); // The register extra2 is not used on the X64 platform.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
317 225
318 // Probe the secondary table. 226 // Probe the secondary table.
319 ProbeTable(masm, flags, kSecondary, name, scratch); 227 ProbeTable(masm, flags, kSecondary, name, scratch);
320 228
321 // Cache miss: Fall-through and let caller handle the miss by 229 // Cache miss: Fall-through and let caller handle the miss by
322 // entering the runtime system. 230 // entering the runtime system.
323 __ bind(&miss); 231 __ bind(&miss);
324 } 232 }
325 233
326 234
327 // Both name_reg and receiver_reg are preserved on jumps to miss_label, 235 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
328 // but may be destroyed if store is successful. 236 int index,
329 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 237 Register prototype) {
330 JSObject* object, 238 // Load the global or builtins object from the current context.
331 int index, 239 __ movq(prototype,
332 Map* transition, 240 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
333 Register receiver_reg, 241 // Load the global context from the global or builtins object.
334 Register name_reg, 242 __ movq(prototype,
335 Register scratch, 243 FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
336 Label* miss_label) { 244 // Load the function from the global context.
337 // Check that the object isn't a smi. 245 __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
338 __ JumpIfSmi(receiver_reg, miss_label); 246 // Load the initial map. The global functions all have initial maps.
339 247 __ movq(prototype,
340 // Check that the map of the object hasn't changed. 248 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
341 __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset), 249 // Load the prototype from the initial map.
342 Handle<Map>(object->map())); 250 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
343 __ j(not_equal, miss_label);
344
345 // Perform global security token check if needed.
346 if (object->IsJSGlobalProxy()) {
347 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
348 }
349
350 // Stub never generated for non-global objects that require access
351 // checks.
352 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
353
354 // Perform map transition for the receiver if necessary.
355 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
356 // The properties must be extended before we can store the value.
357 // We jump to a runtime call that extends the properties array.
358 __ pop(scratch); // Return address.
359 __ push(receiver_reg);
360 __ Push(Handle<Map>(transition));
361 __ push(rax);
362 __ push(scratch);
363 __ TailCallExternalReference(
364 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
365 return;
366 }
367
368 if (transition != NULL) {
369 // Update the map of the object; no write barrier updating is
370 // needed because the map is never in new space.
371 __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
372 Handle<Map>(transition));
373 }
374
375 // Adjust for the number of properties stored in the object. Even in the
376 // face of a transition we can use the old map here because the size of the
377 // object and the number of in-object properties is not going to change.
378 index -= object->map()->inobject_properties();
379
380 if (index < 0) {
381 // Set the property straight into the object.
382 int offset = object->map()->instance_size() + (index * kPointerSize);
383 __ movq(FieldOperand(receiver_reg, offset), rax);
384
385 // Update the write barrier for the array address.
386 // Pass the value being stored in the now unused name_reg.
387 __ movq(name_reg, rax);
388 __ RecordWrite(receiver_reg, offset, name_reg, scratch);
389 } else {
390 // Write to the properties array.
391 int offset = index * kPointerSize + FixedArray::kHeaderSize;
392 // Get the properties array (optimistically).
393 __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
394 __ movq(FieldOperand(scratch, offset), rax);
395
396 // Update the write barrier for the array address.
397 // Pass the value being stored in the now unused name_reg.
398 __ movq(name_reg, rax);
399 __ RecordWrite(scratch, offset, name_reg, receiver_reg);
400 }
401
402 // Return the value (register rax).
403 __ ret(0);
404 } 251 }
405 252
406 253
254 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
255 MacroAssembler* masm, int index, Register prototype, Label* miss) {
256 // Check we're still in the same context.
257 __ Move(prototype, Top::global());
258 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
259 prototype);
260 __ j(not_equal, miss);
261 // Get the global function with the given index.
262 JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
263 // Load its initial map. The global functions all have initial maps.
264 __ Move(prototype, Handle<Map>(function->initial_map()));
265 // Load the prototype from the initial map.
266 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
267 }
268
269
407 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 270 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
408 Register receiver, 271 Register receiver,
409 Register scratch, 272 Register scratch,
410 Label* miss_label) { 273 Label* miss_label) {
411 // Check that the receiver isn't a smi. 274 // Check that the receiver isn't a smi.
412 __ JumpIfSmi(receiver, miss_label); 275 __ JumpIfSmi(receiver, miss_label);
413 276
414 // Check that the object is a JS array. 277 // Check that the object is a JS array.
415 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); 278 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
416 __ j(not_equal, miss_label); 279 __ j(not_equal, miss_label);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
462 325
463 // Check if the wrapped value is a string and load the length 326 // Check if the wrapped value is a string and load the length
464 // directly if it is. 327 // directly if it is.
465 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); 328 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
466 GenerateStringCheck(masm, scratch2, scratch1, miss, miss); 329 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
467 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset)); 330 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
468 __ ret(0); 331 __ ret(0);
469 } 332 }
470 333
471 334
335 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
336 Register receiver,
337 Register result,
338 Register scratch,
339 Label* miss_label) {
340 __ TryGetFunctionPrototype(receiver, result, miss_label);
341 if (!result.is(rax)) __ movq(rax, result);
342 __ ret(0);
343 }
344
345
346 // Load a fast property out of a holder object (src). In-object properties
347 // are loaded directly otherwise the property is loaded from the properties
348 // fixed array.
349 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
350 Register dst, Register src,
351 JSObject* holder, int index) {
352 // Adjust for the number of properties stored in the holder.
353 index -= holder->map()->inobject_properties();
354 if (index < 0) {
355 // Get the property straight out of the holder.
356 int offset = holder->map()->instance_size() + (index * kPointerSize);
357 __ movq(dst, FieldOperand(src, offset));
358 } else {
359 // Calculate the offset into the properties array.
360 int offset = index * kPointerSize + FixedArray::kHeaderSize;
361 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
362 __ movq(dst, FieldOperand(dst, offset));
363 }
364 }
365
366
367 static void PushInterceptorArguments(MacroAssembler* masm,
368 Register receiver,
369 Register holder,
370 Register name,
371 JSObject* holder_obj) {
372 __ push(name);
373 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
374 ASSERT(!Heap::InNewSpace(interceptor));
375 __ Move(kScratchRegister, Handle<Object>(interceptor));
376 __ push(kScratchRegister);
377 __ push(receiver);
378 __ push(holder);
379 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
380 }
381
382
472 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, 383 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
473 Register receiver, 384 Register receiver,
474 Register holder, 385 Register holder,
475 Register name, 386 Register name,
476 JSObject* holder_obj) { 387 JSObject* holder_obj) {
477 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 388 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
478 389
479 ExternalReference ref = 390 ExternalReference ref =
480 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)); 391 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
481 __ movq(rax, Immediate(5)); 392 __ movq(rax, Immediate(5));
482 __ movq(rbx, ref); 393 __ movq(rbx, ref);
483 394
484 CEntryStub stub(1); 395 CEntryStub stub(1);
485 __ CallStub(&stub); 396 __ CallStub(&stub);
486 } 397 }
487 398
488 399
489
490 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
491 Register receiver,
492 Register result,
493 Register scratch,
494 Label* miss_label) {
495 __ TryGetFunctionPrototype(receiver, result, miss_label);
496 if (!result.is(rax)) __ movq(rax, result);
497 __ ret(0);
498 }
499
500 // Number of pointers to be reserved on stack for fast API call. 400 // Number of pointers to be reserved on stack for fast API call.
501 static const int kFastApiCallArguments = 3; 401 static const int kFastApiCallArguments = 3;
502 402
403
503 // Reserves space for the extra arguments to API function in the 404 // Reserves space for the extra arguments to API function in the
504 // caller's frame. 405 // caller's frame.
505 // 406 //
506 // These arguments are set by CheckPrototypes and GenerateFastApiCall. 407 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
507 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 408 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
508 // ----------- S t a t e ------------- 409 // ----------- S t a t e -------------
509 // -- rsp[0] : return address 410 // -- rsp[0] : return address
510 // -- rsp[8] : last argument in the internal frame of the caller 411 // -- rsp[8] : last argument in the internal frame of the caller
511 // ----------------------------------- 412 // -----------------------------------
512 __ movq(scratch, Operand(rsp, 0)); 413 __ movq(scratch, Operand(rsp, 0));
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
546 // (last fast api call extra argument, 447 // (last fast api call extra argument,
547 // set by CheckPrototypes) 448 // set by CheckPrototypes)
548 // -- rsp[16] : api function 449 // -- rsp[16] : api function
549 // (first fast api call extra argument) 450 // (first fast api call extra argument)
550 // -- rsp[24] : api call data 451 // -- rsp[24] : api call data
551 // -- rsp[32] : last argument 452 // -- rsp[32] : last argument
552 // -- ... 453 // -- ...
553 // -- rsp[(argc + 3) * 8] : first argument 454 // -- rsp[(argc + 3) * 8] : first argument
554 // -- rsp[(argc + 4) * 8] : receiver 455 // -- rsp[(argc + 4) * 8] : receiver
555 // ----------------------------------- 456 // -----------------------------------
556
557 // Get the function and setup the context. 457 // Get the function and setup the context.
558 JSFunction* function = optimization.constant_function(); 458 JSFunction* function = optimization.constant_function();
559 __ Move(rdi, Handle<JSFunction>(function)); 459 __ Move(rdi, Handle<JSFunction>(function));
560 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 460 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
561 461
562 // Pass the additional arguments. 462 // Pass the additional arguments.
563 __ movq(Operand(rsp, 2 * kPointerSize), rdi); 463 __ movq(Operand(rsp, 2 * kPointerSize), rdi);
564 Object* call_data = optimization.api_call_info()->data(); 464 Object* call_data = optimization.api_call_info()->data();
565 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info()); 465 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
566 if (Heap::InNewSpace(call_data)) { 466 if (Heap::InNewSpace(call_data)) {
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
826 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex); 726 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
827 __ j(not_equal, interceptor_succeeded); 727 __ j(not_equal, interceptor_succeeded);
828 } 728 }
829 729
830 StubCompiler* stub_compiler_; 730 StubCompiler* stub_compiler_;
831 const ParameterCount& arguments_; 731 const ParameterCount& arguments_;
832 Register name_; 732 Register name_;
833 }; 733 };
834 734
835 735
736 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
737 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
738 Code* code = NULL;
739 if (kind == Code::LOAD_IC) {
740 code = Builtins::builtin(Builtins::LoadIC_Miss);
741 } else {
742 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
743 }
744
745 Handle<Code> ic(code);
746 __ Jump(ic, RelocInfo::CODE_TARGET);
747 }
748
749
750 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
751 // but may be destroyed if store is successful.
752 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
753 JSObject* object,
754 int index,
755 Map* transition,
756 Register receiver_reg,
757 Register name_reg,
758 Register scratch,
759 Label* miss_label) {
760 // Check that the object isn't a smi.
761 __ JumpIfSmi(receiver_reg, miss_label);
762
763 // Check that the map of the object hasn't changed.
764 __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
765 Handle<Map>(object->map()));
766 __ j(not_equal, miss_label);
767
768 // Perform global security token check if needed.
769 if (object->IsJSGlobalProxy()) {
770 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
771 }
772
773 // Stub never generated for non-global objects that require access
774 // checks.
775 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
776
777 // Perform map transition for the receiver if necessary.
778 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
779 // The properties must be extended before we can store the value.
780 // We jump to a runtime call that extends the properties array.
781 __ pop(scratch); // Return address.
782 __ push(receiver_reg);
783 __ Push(Handle<Map>(transition));
784 __ push(rax);
785 __ push(scratch);
786 __ TailCallExternalReference(
787 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
788 return;
789 }
790
791 if (transition != NULL) {
792 // Update the map of the object; no write barrier updating is
793 // needed because the map is never in new space.
794 __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
795 Handle<Map>(transition));
796 }
797
798 // Adjust for the number of properties stored in the object. Even in the
799 // face of a transition we can use the old map here because the size of the
800 // object and the number of in-object properties is not going to change.
801 index -= object->map()->inobject_properties();
802
803 if (index < 0) {
804 // Set the property straight into the object.
805 int offset = object->map()->instance_size() + (index * kPointerSize);
806 __ movq(FieldOperand(receiver_reg, offset), rax);
807
808 // Update the write barrier for the array address.
809 // Pass the value being stored in the now unused name_reg.
810 __ movq(name_reg, rax);
811 __ RecordWrite(receiver_reg, offset, name_reg, scratch);
812 } else {
813 // Write to the properties array.
814 int offset = index * kPointerSize + FixedArray::kHeaderSize;
815 // Get the properties array (optimistically).
816 __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
817 __ movq(FieldOperand(scratch, offset), rax);
818
819 // Update the write barrier for the array address.
820 // Pass the value being stored in the now unused name_reg.
821 __ movq(name_reg, rax);
822 __ RecordWrite(scratch, offset, name_reg, receiver_reg);
823 }
824
825 // Return the value (register rax).
826 __ ret(0);
827 }
828
829
836 // Generate code to check that a global property cell is empty. Create 830 // Generate code to check that a global property cell is empty. Create
837 // the property cell at compilation time if no cell exists for the 831 // the property cell at compilation time if no cell exists for the
838 // property. 832 // property.
839 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell( 833 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
840 MacroAssembler* masm, 834 MacroAssembler* masm,
841 GlobalObject* global, 835 GlobalObject* global,
842 String* name, 836 String* name,
843 Register scratch, 837 Register scratch,
844 Label* miss) { 838 Label* miss) {
845 Object* probe; 839 Object* probe;
846 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name); 840 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
847 if (!maybe_probe->ToObject(&probe)) return maybe_probe; 841 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
848 } 842 }
849 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); 843 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
850 ASSERT(cell->value()->IsTheHole()); 844 ASSERT(cell->value()->IsTheHole());
851 __ Move(scratch, Handle<Object>(cell)); 845 __ Move(scratch, Handle<Object>(cell));
852 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), 846 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
853 Factory::the_hole_value()); 847 Factory::the_hole_value());
854 __ j(not_equal, miss); 848 __ j(not_equal, miss);
855 return cell; 849 return cell;
856 } 850 }
857 851
858 852
859 #undef __ 853 #undef __
860
861 #define __ ACCESS_MASM((masm())) 854 #define __ ACCESS_MASM((masm()))
862 855
863 856
857 Register StubCompiler::CheckPrototypes(JSObject* object,
858 Register object_reg,
859 JSObject* holder,
860 Register holder_reg,
861 Register scratch1,
862 Register scratch2,
863 String* name,
864 int save_at_depth,
865 Label* miss) {
866 // Make sure there's no overlap between holder and object registers.
867 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
868 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
869 && !scratch2.is(scratch1));
870
871 // Keep track of the current object in register reg. On the first
872 // iteration, reg is an alias for object_reg, on later iterations,
873 // it is an alias for holder_reg.
874 Register reg = object_reg;
875 int depth = 0;
876
877 if (save_at_depth == depth) {
878 __ movq(Operand(rsp, kPointerSize), object_reg);
879 }
880
881 // Check the maps in the prototype chain.
882 // Traverse the prototype chain from the object and do map checks.
883 JSObject* current = object;
884 while (current != holder) {
885 depth++;
886
887 // Only global objects and objects that do not require access
888 // checks are allowed in stubs.
889 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
890
891 JSObject* prototype = JSObject::cast(current->GetPrototype());
892 if (!current->HasFastProperties() &&
893 !current->IsJSGlobalObject() &&
894 !current->IsJSGlobalProxy()) {
895 if (!name->IsSymbol()) {
896 MaybeObject* lookup_result = Heap::LookupSymbol(name);
897 if (lookup_result->IsFailure()) {
898 set_failure(Failure::cast(lookup_result));
899 return reg;
900 } else {
901 name = String::cast(lookup_result->ToObjectUnchecked());
902 }
903 }
904 ASSERT(current->property_dictionary()->FindEntry(name) ==
905 StringDictionary::kNotFound);
906
907 GenerateDictionaryNegativeLookup(masm(),
908 miss,
909 reg,
910 name,
911 scratch1,
912 scratch2);
913 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
914 reg = holder_reg; // from now the object is in holder_reg
915 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
916 } else if (Heap::InNewSpace(prototype)) {
917 // Get the map of the current object.
918 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
919 __ Cmp(scratch1, Handle<Map>(current->map()));
920 // Branch on the result of the map check.
921 __ j(not_equal, miss);
922 // Check access rights to the global object. This has to happen
923 // after the map check so that we know that the object is
924 // actually a global object.
925 if (current->IsJSGlobalProxy()) {
926 __ CheckAccessGlobalProxy(reg, scratch1, miss);
927
928 // Restore scratch register to be the map of the object.
929 // We load the prototype from the map in the scratch register.
930 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
931 }
932 // The prototype is in new space; we cannot store a reference
933 // to it in the code. Load it from the map.
934 reg = holder_reg; // from now the object is in holder_reg
935 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
936
937 } else {
938 // Check the map of the current object.
939 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
940 Handle<Map>(current->map()));
941 // Branch on the result of the map check.
942 __ j(not_equal, miss);
943 // Check access rights to the global object. This has to happen
944 // after the map check so that we know that the object is
945 // actually a global object.
946 if (current->IsJSGlobalProxy()) {
947 __ CheckAccessGlobalProxy(reg, scratch1, miss);
948 }
949 // The prototype is in old space; load it directly.
950 reg = holder_reg; // from now the object is in holder_reg
951 __ Move(reg, Handle<JSObject>(prototype));
952 }
953
954 if (save_at_depth == depth) {
955 __ movq(Operand(rsp, kPointerSize), reg);
956 }
957
958 // Go to the next object in the prototype chain.
959 current = prototype;
960 }
961
962 // Check the holder map.
963 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
964 __ j(not_equal, miss);
965
966 // Log the check depth.
967 LOG(IntEvent("check-maps-depth", depth + 1));
968
969 // Perform security check for access to the global object and return
970 // the holder register.
971 ASSERT(current == holder);
972 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
973 if (current->IsJSGlobalProxy()) {
974 __ CheckAccessGlobalProxy(reg, scratch1, miss);
975 }
976
977 // If we've skipped any global objects, it's not enough to verify
978 // that their maps haven't changed. We also need to check that the
979 // property cell for the property is still empty.
980 current = object;
981 while (current != holder) {
982 if (current->IsGlobalObject()) {
983 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
984 GlobalObject::cast(current),
985 name,
986 scratch1,
987 miss);
988 if (cell->IsFailure()) {
989 set_failure(Failure::cast(cell));
990 return reg;
991 }
992 }
993 current = JSObject::cast(current->GetPrototype());
994 }
995
996 // Return the register containing the holder.
997 return reg;
998 }
999
1000
1001 void StubCompiler::GenerateLoadField(JSObject* object,
1002 JSObject* holder,
1003 Register receiver,
1004 Register scratch1,
1005 Register scratch2,
1006 Register scratch3,
1007 int index,
1008 String* name,
1009 Label* miss) {
1010 // Check that the receiver isn't a smi.
1011 __ JumpIfSmi(receiver, miss);
1012
1013 // Check the prototype chain.
1014 Register reg =
1015 CheckPrototypes(object, receiver, holder,
1016 scratch1, scratch2, scratch3, name, miss);
1017
1018 // Get the value from the properties.
1019 GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1020 __ ret(0);
1021 }
1022
1023
1024 bool StubCompiler::GenerateLoadCallback(JSObject* object,
1025 JSObject* holder,
1026 Register receiver,
1027 Register name_reg,
1028 Register scratch1,
1029 Register scratch2,
1030 Register scratch3,
1031 AccessorInfo* callback,
1032 String* name,
1033 Label* miss,
1034 Failure** failure) {
1035 // Check that the receiver isn't a smi.
1036 __ JumpIfSmi(receiver, miss);
1037
1038 // Check that the maps haven't changed.
1039 Register reg =
1040 CheckPrototypes(object, receiver, holder, scratch1,
1041 scratch2, scratch3, name, miss);
1042
1043 Handle<AccessorInfo> callback_handle(callback);
1044
1045 // Insert additional parameters into the stack frame above return address.
1046 ASSERT(!scratch2.is(reg));
1047 __ pop(scratch2); // Get return address to place it below.
1048
1049 __ push(receiver); // receiver
1050 __ push(reg); // holder
1051 if (Heap::InNewSpace(callback_handle->data())) {
1052 __ Move(scratch1, callback_handle);
1053 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1054 } else {
1055 __ Push(Handle<Object>(callback_handle->data()));
1056 }
1057 __ push(name_reg); // name
1058 // Save a pointer to where we pushed the arguments pointer.
1059 // This will be passed as the const AccessorInfo& to the C++ callback.
1060
1061 #ifdef _WIN64
1062 // Win64 uses first register--rcx--for returned value.
1063 Register accessor_info_arg = r8;
1064 Register name_arg = rdx;
1065 #else
1066 Register accessor_info_arg = rsi;
1067 Register name_arg = rdi;
1068 #endif
1069
1070 ASSERT(!name_arg.is(scratch2));
1071 __ movq(name_arg, rsp);
1072 __ push(scratch2); // Restore return address.
1073
1074 // Do call through the api.
1075 Address getter_address = v8::ToCData<Address>(callback->getter());
1076 ApiFunction fun(getter_address);
1077
1078 // 3 elements array for v8::Agruments::values_ and handler for name.
1079 const int kStackSpace = 4;
1080
1081 // Allocate v8::AccessorInfo in non-GCed stack space.
1082 const int kArgStackSpace = 1;
1083
1084 __ PrepareCallApiFunction(kArgStackSpace);
1085 __ lea(rax, Operand(name_arg, 3 * kPointerSize));
1086
1087 // v8::AccessorInfo::args_.
1088 __ movq(StackSpaceOperand(0), rax);
1089
1090 // The context register (rsi) has been saved in PrepareCallApiFunction and
1091 // could be used to pass arguments.
1092 __ lea(accessor_info_arg, StackSpaceOperand(0));
1093
1094 // Emitting a stub call may try to allocate (if the code is not
1095 // already generated). Do not allow the assembler to perform a
1096 // garbage collection but instead return the allocation failure
1097 // object.
1098 MaybeObject* result = masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1099 if (result->IsFailure()) {
1100 *failure = Failure::cast(result);
1101 return false;
1102 }
1103 return true;
1104 }
1105
1106
1107 void StubCompiler::GenerateLoadConstant(JSObject* object,
1108 JSObject* holder,
1109 Register receiver,
1110 Register scratch1,
1111 Register scratch2,
1112 Register scratch3,
1113 Object* value,
1114 String* name,
1115 Label* miss) {
1116 // Check that the receiver isn't a smi.
1117 __ JumpIfSmi(receiver, miss);
1118
1119 // Check that the maps haven't changed.
1120 Register reg =
1121 CheckPrototypes(object, receiver, holder,
1122 scratch1, scratch2, scratch3, name, miss);
1123
1124 // Return the constant value.
1125 __ Move(rax, Handle<Object>(value));
1126 __ ret(0);
1127 }
1128
1129
1130 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1131 JSObject* interceptor_holder,
1132 LookupResult* lookup,
1133 Register receiver,
1134 Register name_reg,
1135 Register scratch1,
1136 Register scratch2,
1137 Register scratch3,
1138 String* name,
1139 Label* miss) {
1140 ASSERT(interceptor_holder->HasNamedInterceptor());
1141 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1142
1143 // Check that the receiver isn't a smi.
1144 __ JumpIfSmi(receiver, miss);
1145
1146 // So far the most popular follow ups for interceptor loads are FIELD
1147 // and CALLBACKS, so inline only them, other cases may be added
1148 // later.
1149 bool compile_followup_inline = false;
1150 if (lookup->IsProperty() && lookup->IsCacheable()) {
1151 if (lookup->type() == FIELD) {
1152 compile_followup_inline = true;
1153 } else if (lookup->type() == CALLBACKS &&
1154 lookup->GetCallbackObject()->IsAccessorInfo() &&
1155 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1156 compile_followup_inline = true;
1157 }
1158 }
1159
1160 if (compile_followup_inline) {
1161 // Compile the interceptor call, followed by inline code to load the
1162 // property from further up the prototype chain if the call fails.
1163 // Check that the maps haven't changed.
1164 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1165 scratch1, scratch2, scratch3,
1166 name, miss);
1167 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1168
1169 // Save necessary data before invoking an interceptor.
1170 // Requires a frame to make GC aware of pushed pointers.
1171 __ EnterInternalFrame();
1172
1173 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1174 // CALLBACKS case needs a receiver to be passed into C++ callback.
1175 __ push(receiver);
1176 }
1177 __ push(holder_reg);
1178 __ push(name_reg);
1179
1180 // Invoke an interceptor. Note: map checks from receiver to
1181 // interceptor's holder has been compiled before (see a caller
1182 // of this method.)
1183 CompileCallLoadPropertyWithInterceptor(masm(),
1184 receiver,
1185 holder_reg,
1186 name_reg,
1187 interceptor_holder);
1188
1189 // Check if interceptor provided a value for property. If it's
1190 // the case, return immediately.
1191 Label interceptor_failed;
1192 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1193 __ j(equal, &interceptor_failed);
1194 __ LeaveInternalFrame();
1195 __ ret(0);
1196
1197 __ bind(&interceptor_failed);
1198 __ pop(name_reg);
1199 __ pop(holder_reg);
1200 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1201 __ pop(receiver);
1202 }
1203
1204 __ LeaveInternalFrame();
1205
1206 // Check that the maps from interceptor's holder to lookup's holder
1207 // haven't changed. And load lookup's holder into |holder| register.
1208 if (interceptor_holder != lookup->holder()) {
1209 holder_reg = CheckPrototypes(interceptor_holder,
1210 holder_reg,
1211 lookup->holder(),
1212 scratch1,
1213 scratch2,
1214 scratch3,
1215 name,
1216 miss);
1217 }
1218
1219 if (lookup->type() == FIELD) {
1220 // We found FIELD property in prototype chain of interceptor's holder.
1221 // Retrieve a field from field's holder.
1222 GenerateFastPropertyLoad(masm(), rax, holder_reg,
1223 lookup->holder(), lookup->GetFieldIndex());
1224 __ ret(0);
1225 } else {
1226 // We found CALLBACKS property in prototype chain of interceptor's
1227 // holder.
1228 ASSERT(lookup->type() == CALLBACKS);
1229 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1230 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1231 ASSERT(callback != NULL);
1232 ASSERT(callback->getter() != NULL);
1233
1234 // Tail call to runtime.
1235 // Important invariant in CALLBACKS case: the code above must be
1236 // structured to never clobber |receiver| register.
1237 __ pop(scratch2); // return address
1238 __ push(receiver);
1239 __ push(holder_reg);
1240 __ Move(holder_reg, Handle<AccessorInfo>(callback));
1241 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1242 __ push(holder_reg);
1243 __ push(name_reg);
1244 __ push(scratch2); // restore return address
1245
1246 ExternalReference ref =
1247 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1248 __ TailCallExternalReference(ref, 5, 1);
1249 }
1250 } else { // !compile_followup_inline
1251 // Call the runtime system to load the interceptor.
1252 // Check that the maps haven't changed.
1253 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1254 scratch1, scratch2, scratch3,
1255 name, miss);
1256 __ pop(scratch2); // save old return address
1257 PushInterceptorArguments(masm(), receiver, holder_reg,
1258 name_reg, interceptor_holder);
1259 __ push(scratch2); // restore old return address
1260
1261 ExternalReference ref = ExternalReference(
1262 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1263 __ TailCallExternalReference(ref, 5, 1);
1264 }
1265 }
1266
1267
864 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { 1268 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
865 if (kind_ == Code::KEYED_CALL_IC) { 1269 if (kind_ == Code::KEYED_CALL_IC) {
866 __ Cmp(rcx, Handle<String>(name)); 1270 __ Cmp(rcx, Handle<String>(name));
867 __ j(not_equal, miss); 1271 __ j(not_equal, miss);
868 } 1272 }
869 } 1273 }
870 1274
871 1275
872 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object, 1276 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
873 JSObject* holder, 1277 JSObject* holder,
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
925 MaybeObject* CallStubCompiler::GenerateMissBranch() { 1329 MaybeObject* CallStubCompiler::GenerateMissBranch() {
926 MaybeObject* maybe_obj = 1330 MaybeObject* maybe_obj =
927 StubCache::ComputeCallMiss(arguments().immediate(), kind_); 1331 StubCache::ComputeCallMiss(arguments().immediate(), kind_);
928 Object* obj; 1332 Object* obj;
929 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1333 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
930 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); 1334 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
931 return obj; 1335 return obj;
932 } 1336 }
933 1337
934 1338
935 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
936 JSObject* holder,
937 JSFunction* function,
938 String* name,
939 CheckType check) {
940 // ----------- S t a t e -------------
941 // rcx : function name
942 // rsp[0] : return address
943 // rsp[8] : argument argc
944 // rsp[16] : argument argc - 1
945 // ...
946 // rsp[argc * 8] : argument 1
947 // rsp[(argc + 1) * 8] : argument 0 = receiver
948 // -----------------------------------
949
950 SharedFunctionInfo* function_info = function->shared();
951 if (function_info->HasBuiltinFunctionId()) {
952 BuiltinFunctionId id = function_info->builtin_function_id();
953 MaybeObject* maybe_result = CompileCustomCall(
954 id, object, holder, NULL, function, name);
955 Object* result;
956 if (!maybe_result->ToObject(&result)) return maybe_result;
957 // undefined means bail out to regular compiler.
958 if (!result->IsUndefined()) return result;
959 }
960
961 Label miss_in_smi_check;
962
963 GenerateNameCheck(name, &miss_in_smi_check);
964
965 // Get the receiver from the stack.
966 const int argc = arguments().immediate();
967 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
968
969 // Check that the receiver isn't a smi.
970 if (check != NUMBER_CHECK) {
971 __ JumpIfSmi(rdx, &miss_in_smi_check);
972 }
973
974 // Make sure that it's okay not to patch the on stack receiver
975 // unless we're doing a receiver map check.
976 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
977
978 CallOptimization optimization(function);
979 int depth = kInvalidProtoDepth;
980 Label miss;
981
982 switch (check) {
983 case RECEIVER_MAP_CHECK:
984 __ IncrementCounter(&Counters::call_const, 1);
985
986 if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
987 depth = optimization.GetPrototypeDepthOfExpectedType(
988 JSObject::cast(object), holder);
989 }
990
991 if (depth != kInvalidProtoDepth) {
992 __ IncrementCounter(&Counters::call_const_fast_api, 1);
993 // Allocate space for v8::Arguments implicit values. Must be initialized
994 // before to call any runtime function.
995 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
996 }
997
998 // Check that the maps haven't changed.
999 CheckPrototypes(JSObject::cast(object), rdx, holder,
1000 rbx, rax, rdi, name, depth, &miss);
1001
1002 // Patch the receiver on the stack with the global proxy if
1003 // necessary.
1004 if (object->IsGlobalObject()) {
1005 ASSERT(depth == kInvalidProtoDepth);
1006 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1007 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1008 }
1009 break;
1010
1011 case STRING_CHECK:
1012 if (!function->IsBuiltin()) {
1013 // Calling non-builtins with a value as receiver requires boxing.
1014 __ jmp(&miss);
1015 } else {
1016 // Check that the object is a two-byte string or a symbol.
1017 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
1018 __ j(above_equal, &miss);
1019 // Check that the maps starting from the prototype haven't changed.
1020 GenerateDirectLoadGlobalFunctionPrototype(
1021 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
1022 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1023 rbx, rdx, rdi, name, &miss);
1024 }
1025 break;
1026
1027 case NUMBER_CHECK: {
1028 if (!function->IsBuiltin()) {
1029 // Calling non-builtins with a value as receiver requires boxing.
1030 __ jmp(&miss);
1031 } else {
1032 Label fast;
1033 // Check that the object is a smi or a heap number.
1034 __ JumpIfSmi(rdx, &fast);
1035 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
1036 __ j(not_equal, &miss);
1037 __ bind(&fast);
1038 // Check that the maps starting from the prototype haven't changed.
1039 GenerateDirectLoadGlobalFunctionPrototype(
1040 masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
1041 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1042 rbx, rdx, rdi, name, &miss);
1043 }
1044 break;
1045 }
1046
1047 case BOOLEAN_CHECK: {
1048 if (!function->IsBuiltin()) {
1049 // Calling non-builtins with a value as receiver requires boxing.
1050 __ jmp(&miss);
1051 } else {
1052 Label fast;
1053 // Check that the object is a boolean.
1054 __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
1055 __ j(equal, &fast);
1056 __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
1057 __ j(not_equal, &miss);
1058 __ bind(&fast);
1059 // Check that the maps starting from the prototype haven't changed.
1060 GenerateDirectLoadGlobalFunctionPrototype(
1061 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
1062 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1063 rbx, rdx, rdi, name, &miss);
1064 }
1065 break;
1066 }
1067
1068 default:
1069 UNREACHABLE();
1070 }
1071
1072 if (depth != kInvalidProtoDepth) {
1073 Failure* failure;
1074 // Move the return address on top of the stack.
1075 __ movq(rax, Operand(rsp, 3 * kPointerSize));
1076 __ movq(Operand(rsp, 0 * kPointerSize), rax);
1077
1078 // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
1079 // duplicate of return address and will be overwritten.
1080 bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
1081 if (!success) {
1082 return failure;
1083 }
1084 } else {
1085 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1086 }
1087
1088 // Handle call cache miss.
1089 __ bind(&miss);
1090 if (depth != kInvalidProtoDepth) {
1091 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
1092 }
1093
1094 // Handle call cache miss.
1095 __ bind(&miss_in_smi_check);
1096 Object* obj;
1097 { MaybeObject* maybe_obj = GenerateMissBranch();
1098 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1099 }
1100
1101 // Return the generated code.
1102 return GetCode(function);
1103 }
1104
1105
1106 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, 1339 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1107 JSObject* holder, 1340 JSObject* holder,
1108 int index, 1341 int index,
1109 String* name) { 1342 String* name) {
1110 // ----------- S t a t e ------------- 1343 // ----------- S t a t e -------------
1111 // rcx : function name 1344 // rcx : function name
1112 // rsp[0] : return address 1345 // rsp[0] : return address
1113 // rsp[8] : argument argc 1346 // rsp[8] : argument argc
1114 // rsp[16] : argument argc - 1 1347 // rsp[16] : argument argc - 1
1115 // ... 1348 // ...
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after
1400 Object* obj; 1633 Object* obj;
1401 { MaybeObject* maybe_obj = GenerateMissBranch(); 1634 { MaybeObject* maybe_obj = GenerateMissBranch();
1402 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1635 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1403 } 1636 }
1404 1637
1405 // Return the generated code. 1638 // Return the generated code.
1406 return GetCode(function); 1639 return GetCode(function);
1407 } 1640 }
1408 1641
1409 1642
1410 MaybeObject* CallStubCompiler::CompileStringCharAtCall( 1643 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1411 Object* object, 1644 Object* object,
1412 JSObject* holder, 1645 JSObject* holder,
1413 JSGlobalPropertyCell* cell, 1646 JSGlobalPropertyCell* cell,
1414 JSFunction* function, 1647 JSFunction* function,
1415 String* name) { 1648 String* name) {
1416 // ----------- S t a t e ------------- 1649 // ----------- S t a t e -------------
1417 // -- rcx : function name 1650 // -- rcx : function name
1418 // -- rsp[0] : return address 1651 // -- rsp[0] : return address
1419 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1652 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1420 // -- ... 1653 // -- ...
(...skipping 12 matching lines...) Expand all
1433 1666
1434 // Check that the maps starting from the prototype haven't changed. 1667 // Check that the maps starting from the prototype haven't changed.
1435 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1668 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1436 Context::STRING_FUNCTION_INDEX, 1669 Context::STRING_FUNCTION_INDEX,
1437 rax, 1670 rax,
1438 &miss); 1671 &miss);
1439 ASSERT(object != holder); 1672 ASSERT(object != holder);
1440 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder, 1673 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1441 rbx, rdx, rdi, name, &miss); 1674 rbx, rdx, rdi, name, &miss);
1442 1675
1443 Register receiver = rax; 1676 Register receiver = rbx;
1444 Register index = rdi; 1677 Register index = rdi;
1445 Register scratch1 = rbx; 1678 Register scratch = rdx;
1446 Register scratch2 = rdx;
1447 Register result = rax; 1679 Register result = rax;
1448 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); 1680 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1449 if (argc > 0) { 1681 if (argc > 0) {
1450 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); 1682 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1451 } else { 1683 } else {
1452 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 1684 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1453 } 1685 }
1454 1686
1455 StringCharAtGenerator char_at_generator(receiver, 1687 StringCharCodeAtGenerator char_code_at_generator(receiver,
1456 index, 1688 index,
1457 scratch1, 1689 scratch,
1458 scratch2, 1690 result,
1459 result, 1691 &miss, // When not a string.
1460 &miss, // When not a string. 1692 &miss, // When not a number.
1461 &miss, // When not a number. 1693 &index_out_of_range,
1462 &index_out_of_range, 1694 STRING_INDEX_IS_NUMBER);
1463 STRING_INDEX_IS_NUMBER); 1695 char_code_at_generator.GenerateFast(masm());
1464 char_at_generator.GenerateFast(masm());
1465 __ ret((argc + 1) * kPointerSize); 1696 __ ret((argc + 1) * kPointerSize);
1466 1697
1467 StubRuntimeCallHelper call_helper; 1698 StubRuntimeCallHelper call_helper;
1468 char_at_generator.GenerateSlow(masm(), call_helper); 1699 char_code_at_generator.GenerateSlow(masm(), call_helper);
1469 1700
1470 __ bind(&index_out_of_range); 1701 __ bind(&index_out_of_range);
1471 __ LoadRoot(rax, Heap::kEmptyStringRootIndex); 1702 __ LoadRoot(rax, Heap::kNanValueRootIndex);
1472 __ ret((argc + 1) * kPointerSize); 1703 __ ret((argc + 1) * kPointerSize);
1473 1704
1474 __ bind(&miss); 1705 __ bind(&miss);
1475 Object* obj; 1706 Object* obj;
1476 { MaybeObject* maybe_obj = GenerateMissBranch(); 1707 { MaybeObject* maybe_obj = GenerateMissBranch();
1477 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1708 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1478 } 1709 }
1479 1710
1480 // Return the generated code. 1711 // Return the generated code.
1481 return GetCode(function); 1712 return GetCode(function);
1482 } 1713 }
1483 1714
1484 1715
1485 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( 1716 MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1486 Object* object, 1717 Object* object,
1487 JSObject* holder, 1718 JSObject* holder,
1488 JSGlobalPropertyCell* cell, 1719 JSGlobalPropertyCell* cell,
1489 JSFunction* function, 1720 JSFunction* function,
1490 String* name) { 1721 String* name) {
1491 // ----------- S t a t e ------------- 1722 // ----------- S t a t e -------------
1492 // -- rcx : function name 1723 // -- rcx : function name
1493 // -- rsp[0] : return address 1724 // -- rsp[0] : return address
1494 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1725 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1495 // -- ... 1726 // -- ...
1496 // -- rsp[(argc + 1) * 8] : receiver 1727 // -- rsp[(argc + 1) * 8] : receiver
1497 // ----------------------------------- 1728 // -----------------------------------
1498 1729
1499 // If object is not a string, bail out to regular call. 1730 // If object is not a string, bail out to regular call.
1500 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); 1731 if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1501 1732
1502 const int argc = arguments().immediate(); 1733 const int argc = arguments().immediate();
1503 1734
1504 Label miss; 1735 Label miss;
1505 Label index_out_of_range; 1736 Label index_out_of_range;
1737
1506 GenerateNameCheck(name, &miss); 1738 GenerateNameCheck(name, &miss);
1507 1739
1508 // Check that the maps starting from the prototype haven't changed. 1740 // Check that the maps starting from the prototype haven't changed.
1509 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1741 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1510 Context::STRING_FUNCTION_INDEX, 1742 Context::STRING_FUNCTION_INDEX,
1511 rax, 1743 rax,
1512 &miss); 1744 &miss);
1513 ASSERT(object != holder); 1745 ASSERT(object != holder);
1514 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder, 1746 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1515 rbx, rdx, rdi, name, &miss); 1747 rbx, rdx, rdi, name, &miss);
1516 1748
1517 Register receiver = rbx; 1749 Register receiver = rax;
1518 Register index = rdi; 1750 Register index = rdi;
1519 Register scratch = rdx; 1751 Register scratch1 = rbx;
1752 Register scratch2 = rdx;
1520 Register result = rax; 1753 Register result = rax;
1521 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); 1754 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1522 if (argc > 0) { 1755 if (argc > 0) {
1523 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); 1756 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1524 } else { 1757 } else {
1525 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 1758 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1526 } 1759 }
1527 1760
1528 StringCharCodeAtGenerator char_code_at_generator(receiver, 1761 StringCharAtGenerator char_at_generator(receiver,
1529 index, 1762 index,
1530 scratch, 1763 scratch1,
1531 result, 1764 scratch2,
1532 &miss, // When not a string. 1765 result,
1533 &miss, // When not a number. 1766 &miss, // When not a string.
1534 &index_out_of_range, 1767 &miss, // When not a number.
1535 STRING_INDEX_IS_NUMBER); 1768 &index_out_of_range,
1536 char_code_at_generator.GenerateFast(masm()); 1769 STRING_INDEX_IS_NUMBER);
1770 char_at_generator.GenerateFast(masm());
1537 __ ret((argc + 1) * kPointerSize); 1771 __ ret((argc + 1) * kPointerSize);
1538 1772
1539 StubRuntimeCallHelper call_helper; 1773 StubRuntimeCallHelper call_helper;
1540 char_code_at_generator.GenerateSlow(masm(), call_helper); 1774 char_at_generator.GenerateSlow(masm(), call_helper);
1541 1775
1542 __ bind(&index_out_of_range); 1776 __ bind(&index_out_of_range);
1543 __ LoadRoot(rax, Heap::kNanValueRootIndex); 1777 __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
1544 __ ret((argc + 1) * kPointerSize); 1778 __ ret((argc + 1) * kPointerSize);
1545 1779
1546 __ bind(&miss); 1780 __ bind(&miss);
1547 Object* obj; 1781 Object* obj;
1548 { MaybeObject* maybe_obj = GenerateMissBranch(); 1782 { MaybeObject* maybe_obj = GenerateMissBranch();
1549 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1783 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1550 } 1784 }
1551 1785
1552 // Return the generated code. 1786 // Return the generated code.
1553 return GetCode(function); 1787 return GetCode(function);
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
1733 Object* obj; 1967 Object* obj;
1734 { MaybeObject* maybe_obj = GenerateMissBranch(); 1968 { MaybeObject* maybe_obj = GenerateMissBranch();
1735 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1969 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1736 } 1970 }
1737 1971
1738 // Return the generated code. 1972 // Return the generated code.
1739 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); 1973 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1740 } 1974 }
1741 1975
1742 1976
1977 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
1978 JSObject* holder,
1979 JSFunction* function,
1980 String* name,
1981 CheckType check) {
1982 // ----------- S t a t e -------------
1983 // rcx : function name
1984 // rsp[0] : return address
1985 // rsp[8] : argument argc
1986 // rsp[16] : argument argc - 1
1987 // ...
1988 // rsp[argc * 8] : argument 1
1989 // rsp[(argc + 1) * 8] : argument 0 = receiver
1990 // -----------------------------------
1991
1992 SharedFunctionInfo* function_info = function->shared();
1993 if (function_info->HasBuiltinFunctionId()) {
1994 BuiltinFunctionId id = function_info->builtin_function_id();
1995 MaybeObject* maybe_result = CompileCustomCall(
1996 id, object, holder, NULL, function, name);
1997 Object* result;
1998 if (!maybe_result->ToObject(&result)) return maybe_result;
1999 // undefined means bail out to regular compiler.
2000 if (!result->IsUndefined()) return result;
2001 }
2002
2003 Label miss_in_smi_check;
2004
2005 GenerateNameCheck(name, &miss_in_smi_check);
2006
2007 // Get the receiver from the stack.
2008 const int argc = arguments().immediate();
2009 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2010
2011 // Check that the receiver isn't a smi.
2012 if (check != NUMBER_CHECK) {
2013 __ JumpIfSmi(rdx, &miss_in_smi_check);
2014 }
2015
2016 // Make sure that it's okay not to patch the on stack receiver
2017 // unless we're doing a receiver map check.
2018 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2019
2020 CallOptimization optimization(function);
2021 int depth = kInvalidProtoDepth;
2022 Label miss;
2023
2024 switch (check) {
2025 case RECEIVER_MAP_CHECK:
2026 __ IncrementCounter(&Counters::call_const, 1);
2027
2028 if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2029 depth = optimization.GetPrototypeDepthOfExpectedType(
2030 JSObject::cast(object), holder);
2031 }
2032
2033 if (depth != kInvalidProtoDepth) {
2034 __ IncrementCounter(&Counters::call_const_fast_api, 1);
2035
2036 // Allocate space for v8::Arguments implicit values. Must be initialized
2037 // before to call any runtime function.
2038 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2039 }
2040
2041 // Check that the maps haven't changed.
2042 CheckPrototypes(JSObject::cast(object), rdx, holder,
2043 rbx, rax, rdi, name, depth, &miss);
2044
2045 // Patch the receiver on the stack with the global proxy if
2046 // necessary.
2047 if (object->IsGlobalObject()) {
2048 ASSERT(depth == kInvalidProtoDepth);
2049 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2050 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2051 }
2052 break;
2053
2054 case STRING_CHECK:
2055 if (!function->IsBuiltin()) {
2056 // Calling non-builtins with a value as receiver requires boxing.
2057 __ jmp(&miss);
2058 } else {
2059 // Check that the object is a two-byte string or a symbol.
2060 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2061 __ j(above_equal, &miss);
2062 // Check that the maps starting from the prototype haven't changed.
2063 GenerateDirectLoadGlobalFunctionPrototype(
2064 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2065 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2066 rbx, rdx, rdi, name, &miss);
2067 }
2068 break;
2069
2070 case NUMBER_CHECK: {
2071 if (!function->IsBuiltin()) {
2072 // Calling non-builtins with a value as receiver requires boxing.
2073 __ jmp(&miss);
2074 } else {
2075 Label fast;
2076 // Check that the object is a smi or a heap number.
2077 __ JumpIfSmi(rdx, &fast);
2078 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2079 __ j(not_equal, &miss);
2080 __ bind(&fast);
2081 // Check that the maps starting from the prototype haven't changed.
2082 GenerateDirectLoadGlobalFunctionPrototype(
2083 masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2084 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2085 rbx, rdx, rdi, name, &miss);
2086 }
2087 break;
2088 }
2089
2090 case BOOLEAN_CHECK: {
2091 if (!function->IsBuiltin()) {
2092 // Calling non-builtins with a value as receiver requires boxing.
2093 __ jmp(&miss);
2094 } else {
2095 Label fast;
2096 // Check that the object is a boolean.
2097 __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2098 __ j(equal, &fast);
2099 __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2100 __ j(not_equal, &miss);
2101 __ bind(&fast);
2102 // Check that the maps starting from the prototype haven't changed.
2103 GenerateDirectLoadGlobalFunctionPrototype(
2104 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2105 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2106 rbx, rdx, rdi, name, &miss);
2107 }
2108 break;
2109 }
2110
2111 default:
2112 UNREACHABLE();
2113 }
2114
2115 if (depth != kInvalidProtoDepth) {
2116 Failure* failure;
2117 // Move the return address on top of the stack.
2118 __ movq(rax, Operand(rsp, 3 * kPointerSize));
2119 __ movq(Operand(rsp, 0 * kPointerSize), rax);
2120
2121 // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
2122 // duplicate of return address and will be overwritten.
2123 bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
2124 if (!success) {
2125 return failure;
2126 }
2127 } else {
2128 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2129 }
2130
2131 // Handle call cache miss.
2132 __ bind(&miss);
2133 if (depth != kInvalidProtoDepth) {
2134 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2135 }
2136
2137 // Handle call cache miss.
2138 __ bind(&miss_in_smi_check);
2139 Object* obj;
2140 { MaybeObject* maybe_obj = GenerateMissBranch();
2141 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2142 }
2143
2144 // Return the generated code.
2145 return GetCode(function);
2146 }
2147
2148
1743 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, 2149 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
1744 JSObject* holder, 2150 JSObject* holder,
1745 String* name) { 2151 String* name) {
1746 // ----------- S t a t e ------------- 2152 // ----------- S t a t e -------------
1747 // rcx : function name 2153 // rcx : function name
1748 // rsp[0] : return address 2154 // rsp[0] : return address
1749 // rsp[8] : argument argc 2155 // rsp[8] : argument argc
1750 // rsp[16] : argument argc - 1 2156 // rsp[16] : argument argc - 1
1751 // ... 2157 // ...
1752 // rsp[argc * 8] : argument 1 2158 // rsp[argc * 8] : argument 1
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1873 Object* obj; 2279 Object* obj;
1874 { MaybeObject* maybe_obj = GenerateMissBranch(); 2280 { MaybeObject* maybe_obj = GenerateMissBranch();
1875 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2281 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1876 } 2282 }
1877 2283
1878 // Return the generated code. 2284 // Return the generated code.
1879 return GetCode(NORMAL, name); 2285 return GetCode(NORMAL, name);
1880 } 2286 }
1881 2287
1882 2288
1883 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, 2289 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
1884 JSObject* object, 2290 int index,
1885 JSObject* holder, 2291 Map* transition,
1886 AccessorInfo* callback) { 2292 String* name) {
1887 // ----------- S t a t e ------------- 2293 // ----------- S t a t e -------------
1888 // -- rax : receiver 2294 // -- rax : value
1889 // -- rcx : name 2295 // -- rcx : name
1890 // -- rsp[0] : return address 2296 // -- rdx : receiver
1891 // ----------------------------------- 2297 // -- rsp[0] : return address
1892 Label miss; 2298 // -----------------------------------
1893 2299 Label miss;
1894 Failure* failure = Failure::InternalError(); 2300
1895 bool success = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi, 2301 // Generate store field code. Preserves receiver and name on jump to miss.
1896 callback, name, &miss, &failure); 2302 GenerateStoreField(masm(),
1897 if (!success) { 2303 object,
1898 miss.Unuse(); 2304 index,
1899 return failure; 2305 transition,
2306 rdx, rcx, rbx,
2307 &miss);
2308
2309 // Handle store cache miss.
2310 __ bind(&miss);
2311 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2312 __ Jump(ic, RelocInfo::CODE_TARGET);
2313
2314 // Return the generated code.
2315 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2316 }
2317
2318
2319 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2320 AccessorInfo* callback,
2321 String* name) {
2322 // ----------- S t a t e -------------
2323 // -- rax : value
2324 // -- rcx : name
2325 // -- rdx : receiver
2326 // -- rsp[0] : return address
2327 // -----------------------------------
2328 Label miss;
2329
2330 // Check that the object isn't a smi.
2331 __ JumpIfSmi(rdx, &miss);
2332
2333 // Check that the map of the object hasn't changed.
2334 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2335 Handle<Map>(object->map()));
2336 __ j(not_equal, &miss);
2337
2338 // Perform global security token check if needed.
2339 if (object->IsJSGlobalProxy()) {
2340 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
1900 } 2341 }
1901 2342
1902 __ bind(&miss); 2343 // Stub never generated for non-global objects that require access
1903 GenerateLoadMiss(masm(), Code::LOAD_IC); 2344 // checks.
2345 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2346
2347 __ pop(rbx); // remove the return address
2348 __ push(rdx); // receiver
2349 __ Push(Handle<AccessorInfo>(callback)); // callback info
2350 __ push(rcx); // name
2351 __ push(rax); // value
2352 __ push(rbx); // restore return address
2353
2354 // Do tail-call to the runtime system.
2355 ExternalReference store_callback_property =
2356 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2357 __ TailCallExternalReference(store_callback_property, 4, 1);
2358
2359 // Handle store cache miss.
2360 __ bind(&miss);
2361 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2362 __ Jump(ic, RelocInfo::CODE_TARGET);
1904 2363
1905 // Return the generated code. 2364 // Return the generated code.
1906 return GetCode(CALLBACKS, name); 2365 return GetCode(CALLBACKS, name);
1907 } 2366 }
1908 2367
1909 2368
1910 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, 2369 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
1911 JSObject* holder, 2370 String* name) {
1912 Object* value, 2371 // ----------- S t a t e -------------
2372 // -- rax : value
2373 // -- rcx : name
2374 // -- rdx : receiver
2375 // -- rsp[0] : return address
2376 // -----------------------------------
2377 Label miss;
2378
2379 // Check that the object isn't a smi.
2380 __ JumpIfSmi(rdx, &miss);
2381
2382 // Check that the map of the object hasn't changed.
2383 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2384 Handle<Map>(receiver->map()));
2385 __ j(not_equal, &miss);
2386
2387 // Perform global security token check if needed.
2388 if (receiver->IsJSGlobalProxy()) {
2389 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2390 }
2391
2392 // Stub never generated for non-global objects that require access
2393 // checks.
2394 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2395
2396 __ pop(rbx); // remove the return address
2397 __ push(rdx); // receiver
2398 __ push(rcx); // name
2399 __ push(rax); // value
2400 __ push(rbx); // restore return address
2401
2402 // Do tail-call to the runtime system.
2403 ExternalReference store_ic_property =
2404 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2405 __ TailCallExternalReference(store_ic_property, 3, 1);
2406
2407 // Handle store cache miss.
2408 __ bind(&miss);
2409 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2410 __ Jump(ic, RelocInfo::CODE_TARGET);
2411
2412 // Return the generated code.
2413 return GetCode(INTERCEPTOR, name);
2414 }
2415
2416
2417 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2418 JSGlobalPropertyCell* cell,
1913 String* name) { 2419 String* name) {
1914 // ----------- S t a t e ------------- 2420 // ----------- S t a t e -------------
1915 // -- rax : receiver 2421 // -- rax : value
1916 // -- rcx : name 2422 // -- rcx : name
1917 // -- rsp[0] : return address 2423 // -- rdx : receiver
1918 // ----------------------------------- 2424 // -- rsp[0] : return address
1919 Label miss; 2425 // -----------------------------------
1920 2426 Label miss;
1921 GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss); 2427
1922 __ bind(&miss); 2428 // Check that the map of the global has not changed.
1923 GenerateLoadMiss(masm(), Code::LOAD_IC); 2429 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1924 2430 Handle<Map>(object->map()));
1925 // Return the generated code. 2431 __ j(not_equal, &miss);
1926 return GetCode(CONSTANT_FUNCTION, name); 2432
2433 // Store the value in the cell.
2434 __ Move(rcx, Handle<JSGlobalPropertyCell>(cell));
2435 __ movq(FieldOperand(rcx, JSGlobalPropertyCell::kValueOffset), rax);
2436
2437 // Return the value (register rax).
2438 __ IncrementCounter(&Counters::named_store_global_inline, 1);
2439 __ ret(0);
2440
2441 // Handle store cache miss.
2442 __ bind(&miss);
2443 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
2444 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2445 __ Jump(ic, RelocInfo::CODE_TARGET);
2446
2447 // Return the generated code.
2448 return GetCode(NORMAL, name);
2449 }
2450
2451
2452 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2453 int index,
2454 Map* transition,
2455 String* name) {
2456 // ----------- S t a t e -------------
2457 // -- rax : value
2458 // -- rcx : key
2459 // -- rdx : receiver
2460 // -- rsp[0] : return address
2461 // -----------------------------------
2462 Label miss;
2463
2464 __ IncrementCounter(&Counters::keyed_store_field, 1);
2465
2466 // Check that the name has not changed.
2467 __ Cmp(rcx, Handle<String>(name));
2468 __ j(not_equal, &miss);
2469
2470 // Generate store field code. Preserves receiver and name on jump to miss.
2471 GenerateStoreField(masm(),
2472 object,
2473 index,
2474 transition,
2475 rdx, rcx, rbx,
2476 &miss);
2477
2478 // Handle store cache miss.
2479 __ bind(&miss);
2480 __ DecrementCounter(&Counters::keyed_store_field, 1);
2481 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2482 __ Jump(ic, RelocInfo::CODE_TARGET);
2483
2484 // Return the generated code.
2485 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2486 }
2487
2488
2489 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2490 JSObject* receiver) {
2491 // ----------- S t a t e -------------
2492 // -- rax : value
2493 // -- rcx : key
2494 // -- rdx : receiver
2495 // -- rsp[0] : return address
2496 // -----------------------------------
2497 Label miss;
2498
2499 // Check that the receiver isn't a smi.
2500 __ JumpIfSmi(rdx, &miss);
2501
2502 // Check that the map matches.
2503 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2504 Handle<Map>(receiver->map()));
2505 __ j(not_equal, &miss);
2506
2507 // Check that the key is a smi.
2508 __ JumpIfNotSmi(rcx, &miss);
2509
2510 // Get the elements array and make sure it is a fast element array, not 'cow'.
2511 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2512 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2513 Factory::fixed_array_map());
2514 __ j(not_equal, &miss);
2515
2516 // Check that the key is within bounds.
2517 if (receiver->IsJSArray()) {
2518 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
2519 __ j(above_equal, &miss);
2520 } else {
2521 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2522 __ j(above_equal, &miss);
2523 }
2524
2525 // Do the store and update the write barrier. Make sure to preserve
2526 // the value in register eax.
2527 __ movq(rdx, rax);
2528 __ SmiToInteger32(rcx, rcx);
2529 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
2530 rax);
2531 __ RecordWrite(rdi, 0, rdx, rcx);
2532
2533 // Done.
2534 __ ret(0);
2535
2536 // Handle store cache miss.
2537 __ bind(&miss);
2538 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2539 __ jmp(ic, RelocInfo::CODE_TARGET);
2540
2541 // Return the generated code.
2542 return GetCode(NORMAL, NULL);
1927 } 2543 }
1928 2544
1929 2545
1930 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, 2546 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
1931 JSObject* object, 2547 JSObject* object,
1932 JSObject* last) { 2548 JSObject* last) {
1933 // ----------- S t a t e ------------- 2549 // ----------- S t a t e -------------
1934 // -- rax : receiver 2550 // -- rax : receiver
1935 // -- rcx : name 2551 // -- rcx : name
1936 // -- rsp[0] : return address 2552 // -- rsp[0] : return address
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1985 2601
1986 GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss); 2602 GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss);
1987 __ bind(&miss); 2603 __ bind(&miss);
1988 GenerateLoadMiss(masm(), Code::LOAD_IC); 2604 GenerateLoadMiss(masm(), Code::LOAD_IC);
1989 2605
1990 // Return the generated code. 2606 // Return the generated code.
1991 return GetCode(FIELD, name); 2607 return GetCode(FIELD, name);
1992 } 2608 }
1993 2609
1994 2610
2611 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2612 JSObject* object,
2613 JSObject* holder,
2614 AccessorInfo* callback) {
2615 // ----------- S t a t e -------------
2616 // -- rax : receiver
2617 // -- rcx : name
2618 // -- rsp[0] : return address
2619 // -----------------------------------
2620 Label miss;
2621
2622 Failure* failure = Failure::InternalError();
2623 bool success = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi,
2624 callback, name, &miss, &failure);
2625 if (!success) {
2626 miss.Unuse();
2627 return failure;
2628 }
2629
2630 __ bind(&miss);
2631 GenerateLoadMiss(masm(), Code::LOAD_IC);
2632
2633 // Return the generated code.
2634 return GetCode(CALLBACKS, name);
2635 }
2636
2637
2638 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2639 JSObject* holder,
2640 Object* value,
2641 String* name) {
2642 // ----------- S t a t e -------------
2643 // -- rax : receiver
2644 // -- rcx : name
2645 // -- rsp[0] : return address
2646 // -----------------------------------
2647 Label miss;
2648
2649 GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
2650 __ bind(&miss);
2651 GenerateLoadMiss(masm(), Code::LOAD_IC);
2652
2653 // Return the generated code.
2654 return GetCode(CONSTANT_FUNCTION, name);
2655 }
2656
2657
1995 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2658 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
1996 JSObject* holder, 2659 JSObject* holder,
1997 String* name) { 2660 String* name) {
1998 // ----------- S t a t e ------------- 2661 // ----------- S t a t e -------------
1999 // -- rax : receiver 2662 // -- rax : receiver
2000 // -- rcx : name 2663 // -- rcx : name
2001 // -- rsp[0] : return address 2664 // -- rsp[0] : return address
2002 // ----------------------------------- 2665 // -----------------------------------
2003 Label miss; 2666 Label miss;
2004 2667
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2067 2730
2068 __ bind(&miss); 2731 __ bind(&miss);
2069 __ IncrementCounter(&Counters::named_load_global_stub_miss, 1); 2732 __ IncrementCounter(&Counters::named_load_global_stub_miss, 1);
2070 GenerateLoadMiss(masm(), Code::LOAD_IC); 2733 GenerateLoadMiss(masm(), Code::LOAD_IC);
2071 2734
2072 // Return the generated code. 2735 // Return the generated code.
2073 return GetCode(NORMAL, name); 2736 return GetCode(NORMAL, name);
2074 } 2737 }
2075 2738
2076 2739
2077 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( 2740 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2078 String* name, 2741 JSObject* receiver,
2079 JSObject* receiver, 2742 JSObject* holder,
2080 JSObject* holder, 2743 int index) {
2081 AccessorInfo* callback) {
2082 // ----------- S t a t e ------------- 2744 // ----------- S t a t e -------------
2083 // -- rax : key 2745 // -- rax : key
2084 // -- rdx : receiver 2746 // -- rdx : receiver
2085 // -- rsp[0] : return address 2747 // -- rsp[0] : return address
2086 // ----------------------------------- 2748 // -----------------------------------
2087 Label miss; 2749 Label miss;
2088 2750
2751 __ IncrementCounter(&Counters::keyed_load_field, 1);
2752
2753 // Check that the name has not changed.
2754 __ Cmp(rax, Handle<String>(name));
2755 __ j(not_equal, &miss);
2756
2757 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2758
2759 __ bind(&miss);
2760 __ DecrementCounter(&Counters::keyed_load_field, 1);
2761 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2762
2763 // Return the generated code.
2764 return GetCode(FIELD, name);
2765 }
2766
2767
2768 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2769 String* name,
2770 JSObject* receiver,
2771 JSObject* holder,
2772 AccessorInfo* callback) {
2773 // ----------- S t a t e -------------
2774 // -- rax : key
2775 // -- rdx : receiver
2776 // -- rsp[0] : return address
2777 // -----------------------------------
2778 Label miss;
2779
2089 __ IncrementCounter(&Counters::keyed_load_callback, 1); 2780 __ IncrementCounter(&Counters::keyed_load_callback, 1);
2090 2781
2091 // Check that the name has not changed. 2782 // Check that the name has not changed.
2092 __ Cmp(rax, Handle<String>(name)); 2783 __ Cmp(rax, Handle<String>(name));
2093 __ j(not_equal, &miss); 2784 __ j(not_equal, &miss);
2094 2785
2095 Failure* failure = Failure::InternalError(); 2786 Failure* failure = Failure::InternalError();
2096 bool success = GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi, 2787 bool success = GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi,
2097 callback, name, &miss, &failure); 2788 callback, name, &miss, &failure);
2098 if (!success) { 2789 if (!success) {
2099 miss.Unuse(); 2790 miss.Unuse();
2100 return failure; 2791 return failure;
2101 } 2792 }
2102 2793
2103 __ bind(&miss); 2794 __ bind(&miss);
2795
2104 __ DecrementCounter(&Counters::keyed_load_callback, 1); 2796 __ DecrementCounter(&Counters::keyed_load_callback, 1);
2105 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2797 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2106 2798
2107 // Return the generated code. 2799 // Return the generated code.
2108 return GetCode(CALLBACKS, name);
2109 }
2110
2111
2112 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2113 // ----------- S t a t e -------------
2114 // -- rax : key
2115 // -- rdx : receiver
2116 // -- rsp[0] : return address
2117 // -----------------------------------
2118 Label miss;
2119
2120 __ IncrementCounter(&Counters::keyed_load_array_length, 1);
2121
2122 // Check that the name has not changed.
2123 __ Cmp(rax, Handle<String>(name));
2124 __ j(not_equal, &miss);
2125
2126 GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2127 __ bind(&miss);
2128 __ DecrementCounter(&Counters::keyed_load_array_length, 1);
2129 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2130
2131 // Return the generated code.
2132 return GetCode(CALLBACKS, name); 2800 return GetCode(CALLBACKS, name);
2133 } 2801 }
2134 2802
2135 2803
2136 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 2804 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2137 JSObject* receiver, 2805 JSObject* receiver,
2138 JSObject* holder, 2806 JSObject* holder,
2139 Object* value) { 2807 Object* value) {
2140 // ----------- S t a t e ------------- 2808 // ----------- S t a t e -------------
2141 // -- rax : key 2809 // -- rax : key
(...skipping 12 matching lines...) Expand all
2154 value, name, &miss); 2822 value, name, &miss);
2155 __ bind(&miss); 2823 __ bind(&miss);
2156 __ DecrementCounter(&Counters::keyed_load_constant_function, 1); 2824 __ DecrementCounter(&Counters::keyed_load_constant_function, 1);
2157 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2825 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2158 2826
2159 // Return the generated code. 2827 // Return the generated code.
2160 return GetCode(CONSTANT_FUNCTION, name); 2828 return GetCode(CONSTANT_FUNCTION, name);
2161 } 2829 }
2162 2830
2163 2831
2164 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2165 // ----------- S t a t e -------------
2166 // -- rax : key
2167 // -- rdx : receiver
2168 // -- rsp[0] : return address
2169 // -----------------------------------
2170 Label miss;
2171
2172 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
2173
2174 // Check that the name has not changed.
2175 __ Cmp(rax, Handle<String>(name));
2176 __ j(not_equal, &miss);
2177
2178 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2179 __ bind(&miss);
2180 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
2181 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2182
2183 // Return the generated code.
2184 return GetCode(CALLBACKS, name);
2185 }
2186
2187
2188 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2832 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2189 JSObject* holder, 2833 JSObject* holder,
2190 String* name) { 2834 String* name) {
2191 // ----------- S t a t e ------------- 2835 // ----------- S t a t e -------------
2192 // -- rax : key 2836 // -- rax : key
2193 // -- rdx : receiver 2837 // -- rdx : receiver
2194 // -- rsp[0] : return address 2838 // -- rsp[0] : return address
2195 // ----------------------------------- 2839 // -----------------------------------
2196 Label miss; 2840 Label miss;
2197 2841
(...skipping 17 matching lines...) Expand all
2215 &miss); 2859 &miss);
2216 __ bind(&miss); 2860 __ bind(&miss);
2217 __ DecrementCounter(&Counters::keyed_load_interceptor, 1); 2861 __ DecrementCounter(&Counters::keyed_load_interceptor, 1);
2218 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2862 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2219 2863
2220 // Return the generated code. 2864 // Return the generated code.
2221 return GetCode(INTERCEPTOR, name); 2865 return GetCode(INTERCEPTOR, name);
2222 } 2866 }
2223 2867
2224 2868
2225 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 2869 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2226 // ----------- S t a t e ------------- 2870 // ----------- S t a t e -------------
2227 // -- rax : key 2871 // -- rax : key
2228 // -- rdx : receiver 2872 // -- rdx : receiver
2229 // -- rsp[0] : return address 2873 // -- rsp[0] : return address
2230 // ----------------------------------- 2874 // -----------------------------------
2231 Label miss; 2875 Label miss;
2232 2876
2233 __ IncrementCounter(&Counters::keyed_load_string_length, 1); 2877 __ IncrementCounter(&Counters::keyed_load_array_length, 1);
2234 2878
2235 // Check that the name has not changed. 2879 // Check that the name has not changed.
2236 __ Cmp(rax, Handle<String>(name)); 2880 __ Cmp(rax, Handle<String>(name));
2237 __ j(not_equal, &miss); 2881 __ j(not_equal, &miss);
2238 2882
2239 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss); 2883 GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2240 __ bind(&miss); 2884 __ bind(&miss);
2241 __ DecrementCounter(&Counters::keyed_load_string_length, 1); 2885 __ DecrementCounter(&Counters::keyed_load_array_length, 1);
2242 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2886 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2243 2887
2244 // Return the generated code. 2888 // Return the generated code.
2245 return GetCode(CALLBACKS, name); 2889 return GetCode(CALLBACKS, name);
2246 } 2890 }
2247 2891
2248 2892
2893 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2894 // ----------- S t a t e -------------
2895 // -- rax : key
2896 // -- rdx : receiver
2897 // -- rsp[0] : return address
2898 // -----------------------------------
2899 Label miss;
2900
2901 __ IncrementCounter(&Counters::keyed_load_string_length, 1);
2902
2903 // Check that the name has not changed.
2904 __ Cmp(rax, Handle<String>(name));
2905 __ j(not_equal, &miss);
2906
2907 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss);
2908 __ bind(&miss);
2909 __ DecrementCounter(&Counters::keyed_load_string_length, 1);
2910 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2911
2912 // Return the generated code.
2913 return GetCode(CALLBACKS, name);
2914 }
2915
2916
2917 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2918 // ----------- S t a t e -------------
2919 // -- rax : key
2920 // -- rdx : receiver
2921 // -- rsp[0] : return address
2922 // -----------------------------------
2923 Label miss;
2924
2925 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
2926
2927 // Check that the name has not changed.
2928 __ Cmp(rax, Handle<String>(name));
2929 __ j(not_equal, &miss);
2930
2931 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2932 __ bind(&miss);
2933 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
2934 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2935
2936 // Return the generated code.
2937 return GetCode(CALLBACKS, name);
2938 }
2939
2940
2249 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { 2941 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2250 // ----------- S t a t e ------------- 2942 // ----------- S t a t e -------------
2251 // -- rax : key 2943 // -- rax : key
2252 // -- rdx : receiver 2944 // -- rdx : receiver
2253 // -- esp[0] : return address 2945 // -- esp[0] : return address
2254 // ----------------------------------- 2946 // -----------------------------------
2255 Label miss; 2947 Label miss;
2256 2948
2257 // Check that the receiver isn't a smi.
2258 __ JumpIfSmi(rdx, &miss);
2259
2260 // Check that the map matches.
2261 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2262 Handle<Map>(receiver->map()));
2263 __ j(not_equal, &miss);
2264
2265 // Check that the key is a smi.
2266 __ JumpIfNotSmi(rax, &miss);
2267
2268 // Get the elements array.
2269 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
2270 __ AssertFastElements(rcx);
2271
2272 // Check that the key is within bounds.
2273 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
2274 __ j(above_equal, &miss);
2275
2276 // Load the result and make sure it's not the hole.
2277 SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
2278 __ movq(rbx, FieldOperand(rcx,
2279 index.reg,
2280 index.scale,
2281 FixedArray::kHeaderSize));
2282 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2283 __ j(equal, &miss);
2284 __ movq(rax, rbx);
2285 __ ret(0);
2286
2287 __ bind(&miss);
2288 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2289
2290 // Return the generated code.
2291 return GetCode(NORMAL, NULL);
2292 }
2293
2294
2295 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2296 AccessorInfo* callback,
2297 String* name) {
2298 // ----------- S t a t e -------------
2299 // -- rax : value
2300 // -- rcx : name
2301 // -- rdx : receiver
2302 // -- rsp[0] : return address
2303 // -----------------------------------
2304 Label miss;
2305
2306 // Check that the object isn't a smi.
2307 __ JumpIfSmi(rdx, &miss);
2308
2309 // Check that the map of the object hasn't changed.
2310 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2311 Handle<Map>(object->map()));
2312 __ j(not_equal, &miss);
2313
2314 // Perform global security token check if needed.
2315 if (object->IsJSGlobalProxy()) {
2316 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2317 }
2318
2319 // Stub never generated for non-global objects that require access
2320 // checks.
2321 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2322
2323 __ pop(rbx); // remove the return address
2324 __ push(rdx); // receiver
2325 __ Push(Handle<AccessorInfo>(callback)); // callback info
2326 __ push(rcx); // name
2327 __ push(rax); // value
2328 __ push(rbx); // restore return address
2329
2330 // Do tail-call to the runtime system.
2331 ExternalReference store_callback_property =
2332 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2333 __ TailCallExternalReference(store_callback_property, 4, 1);
2334
2335 // Handle store cache miss.
2336 __ bind(&miss);
2337 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2338 __ Jump(ic, RelocInfo::CODE_TARGET);
2339
2340 // Return the generated code.
2341 return GetCode(CALLBACKS, name);
2342 }
2343
2344
2345 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2346 int index,
2347 Map* transition,
2348 String* name) {
2349 // ----------- S t a t e -------------
2350 // -- rax : value
2351 // -- rcx : name
2352 // -- rdx : receiver
2353 // -- rsp[0] : return address
2354 // -----------------------------------
2355 Label miss;
2356
2357 // Generate store field code. Preserves receiver and name on jump to miss.
2358 GenerateStoreField(masm(),
2359 object,
2360 index,
2361 transition,
2362 rdx, rcx, rbx,
2363 &miss);
2364
2365 // Handle store cache miss.
2366 __ bind(&miss);
2367 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2368 __ Jump(ic, RelocInfo::CODE_TARGET);
2369
2370 // Return the generated code.
2371 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2372 }
2373
2374
2375 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2376 String* name) {
2377 // ----------- S t a t e -------------
2378 // -- rax : value
2379 // -- rcx : name
2380 // -- rdx : receiver
2381 // -- rsp[0] : return address
2382 // -----------------------------------
2383 Label miss;
2384
2385 // Check that the object isn't a smi.
2386 __ JumpIfSmi(rdx, &miss);
2387
2388 // Check that the map of the object hasn't changed.
2389 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2390 Handle<Map>(receiver->map()));
2391 __ j(not_equal, &miss);
2392
2393 // Perform global security token check if needed.
2394 if (receiver->IsJSGlobalProxy()) {
2395 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2396 }
2397
2398 // Stub never generated for non-global objects that require access
2399 // checks.
2400 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2401
2402 __ pop(rbx); // remove the return address
2403 __ push(rdx); // receiver
2404 __ push(rcx); // name
2405 __ push(rax); // value
2406 __ push(rbx); // restore return address
2407
2408 // Do tail-call to the runtime system.
2409 ExternalReference store_ic_property =
2410 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2411 __ TailCallExternalReference(store_ic_property, 3, 1);
2412
2413 // Handle store cache miss.
2414 __ bind(&miss);
2415 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2416 __ Jump(ic, RelocInfo::CODE_TARGET);
2417
2418 // Return the generated code.
2419 return GetCode(INTERCEPTOR, name);
2420 }
2421
2422
2423 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2424 JSGlobalPropertyCell* cell,
2425 String* name) {
2426 // ----------- S t a t e -------------
2427 // -- rax : value
2428 // -- rcx : name
2429 // -- rdx : receiver
2430 // -- rsp[0] : return address
2431 // -----------------------------------
2432 Label miss;
2433
2434 // Check that the map of the global has not changed.
2435 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2436 Handle<Map>(object->map()));
2437 __ j(not_equal, &miss);
2438
2439 // Store the value in the cell.
2440 __ Move(rcx, Handle<JSGlobalPropertyCell>(cell));
2441 __ movq(FieldOperand(rcx, JSGlobalPropertyCell::kValueOffset), rax);
2442
2443 // Return the value (register rax).
2444 __ IncrementCounter(&Counters::named_store_global_inline, 1);
2445 __ ret(0);
2446
2447 // Handle store cache miss.
2448 __ bind(&miss);
2449 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
2450 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2451 __ Jump(ic, RelocInfo::CODE_TARGET);
2452
2453 // Return the generated code.
2454 return GetCode(NORMAL, name);
2455 }
2456
2457
2458 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2459 JSObject* receiver,
2460 JSObject* holder,
2461 int index) {
2462 // ----------- S t a t e -------------
2463 // -- rax : key
2464 // -- rdx : receiver
2465 // -- rsp[0] : return address
2466 // -----------------------------------
2467 Label miss;
2468
2469 __ IncrementCounter(&Counters::keyed_load_field, 1);
2470
2471 // Check that the name has not changed.
2472 __ Cmp(rax, Handle<String>(name));
2473 __ j(not_equal, &miss);
2474
2475 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2476
2477 __ bind(&miss);
2478 __ DecrementCounter(&Counters::keyed_load_field, 1);
2479 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2480
2481 // Return the generated code.
2482 return GetCode(FIELD, name);
2483 }
2484
2485
2486 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2487 int index,
2488 Map* transition,
2489 String* name) {
2490 // ----------- S t a t e -------------
2491 // -- rax : value
2492 // -- rcx : key
2493 // -- rdx : receiver
2494 // -- rsp[0] : return address
2495 // -----------------------------------
2496 Label miss;
2497
2498 __ IncrementCounter(&Counters::keyed_store_field, 1);
2499
2500 // Check that the name has not changed.
2501 __ Cmp(rcx, Handle<String>(name));
2502 __ j(not_equal, &miss);
2503
2504 // Generate store field code. Preserves receiver and name on jump to miss.
2505 GenerateStoreField(masm(),
2506 object,
2507 index,
2508 transition,
2509 rdx, rcx, rbx,
2510 &miss);
2511
2512 // Handle store cache miss.
2513 __ bind(&miss);
2514 __ DecrementCounter(&Counters::keyed_store_field, 1);
2515 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2516 __ Jump(ic, RelocInfo::CODE_TARGET);
2517
2518 // Return the generated code.
2519 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2520 }
2521
2522
2523 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2524 JSObject* receiver) {
2525 // ----------- S t a t e -------------
2526 // -- rax : value
2527 // -- rcx : key
2528 // -- rdx : receiver
2529 // -- rsp[0] : return address
2530 // -----------------------------------
2531 Label miss;
2532
2533 // Check that the receiver isn't a smi. 2949 // Check that the receiver isn't a smi.
2534 __ JumpIfSmi(rdx, &miss); 2950 __ JumpIfSmi(rdx, &miss);
2535 2951
2536 // Check that the map matches. 2952 // Check that the map matches.
2537 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 2953 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2538 Handle<Map>(receiver->map())); 2954 Handle<Map>(receiver->map()));
2539 __ j(not_equal, &miss); 2955 __ j(not_equal, &miss);
2540 2956
2541 // Check that the key is a smi. 2957 // Check that the key is a smi.
2542 __ JumpIfNotSmi(rcx, &miss); 2958 __ JumpIfNotSmi(rax, &miss);
2543 2959
2544 // Get the elements array and make sure it is a fast element array, not 'cow'. 2960 // Get the elements array.
2545 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 2961 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
2546 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), 2962 __ AssertFastElements(rcx);
2547 Factory::fixed_array_map());
2548 __ j(not_equal, &miss);
2549 2963
2550 // Check that the key is within bounds. 2964 // Check that the key is within bounds.
2551 if (receiver->IsJSArray()) { 2965 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
2552 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); 2966 __ j(above_equal, &miss);
2553 __ j(above_equal, &miss);
2554 } else {
2555 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2556 __ j(above_equal, &miss);
2557 }
2558 2967
2559 // Do the store and update the write barrier. Make sure to preserve 2968 // Load the result and make sure it's not the hole.
2560 // the value in register eax. 2969 SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
2561 __ movq(rdx, rax); 2970 __ movq(rbx, FieldOperand(rcx,
2562 __ SmiToInteger32(rcx, rcx); 2971 index.reg,
2563 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize), 2972 index.scale,
2564 rax); 2973 FixedArray::kHeaderSize));
2565 __ RecordWrite(rdi, 0, rdx, rcx); 2974 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2566 2975 __ j(equal, &miss);
2567 // Done. 2976 __ movq(rax, rbx);
2568 __ ret(0); 2977 __ ret(0);
2569 2978
2570 // Handle store cache miss.
2571 __ bind(&miss); 2979 __ bind(&miss);
2572 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); 2980 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2573 __ jmp(ic, RelocInfo::CODE_TARGET);
2574 2981
2575 // Return the generated code. 2982 // Return the generated code.
2576 return GetCode(NORMAL, NULL); 2983 return GetCode(NORMAL, NULL);
2577 } 2984 }
2578 2985
2579 2986
2580 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
2581 JSObject* interceptor_holder,
2582 LookupResult* lookup,
2583 Register receiver,
2584 Register name_reg,
2585 Register scratch1,
2586 Register scratch2,
2587 Register scratch3,
2588 String* name,
2589 Label* miss) {
2590 ASSERT(interceptor_holder->HasNamedInterceptor());
2591 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
2592
2593 // Check that the receiver isn't a smi.
2594 __ JumpIfSmi(receiver, miss);
2595
2596 // So far the most popular follow ups for interceptor loads are FIELD
2597 // and CALLBACKS, so inline only them, other cases may be added
2598 // later.
2599 bool compile_followup_inline = false;
2600 if (lookup->IsProperty() && lookup->IsCacheable()) {
2601 if (lookup->type() == FIELD) {
2602 compile_followup_inline = true;
2603 } else if (lookup->type() == CALLBACKS &&
2604 lookup->GetCallbackObject()->IsAccessorInfo() &&
2605 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
2606 compile_followup_inline = true;
2607 }
2608 }
2609
2610 if (compile_followup_inline) {
2611 // Compile the interceptor call, followed by inline code to load the
2612 // property from further up the prototype chain if the call fails.
2613 // Check that the maps haven't changed.
2614 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
2615 scratch1, scratch2, scratch3,
2616 name, miss);
2617 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
2618
2619 // Save necessary data before invoking an interceptor.
2620 // Requires a frame to make GC aware of pushed pointers.
2621 __ EnterInternalFrame();
2622
2623 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
2624 // CALLBACKS case needs a receiver to be passed into C++ callback.
2625 __ push(receiver);
2626 }
2627 __ push(holder_reg);
2628 __ push(name_reg);
2629
2630 // Invoke an interceptor. Note: map checks from receiver to
2631 // interceptor's holder has been compiled before (see a caller
2632 // of this method.)
2633 CompileCallLoadPropertyWithInterceptor(masm(),
2634 receiver,
2635 holder_reg,
2636 name_reg,
2637 interceptor_holder);
2638
2639 // Check if interceptor provided a value for property. If it's
2640 // the case, return immediately.
2641 Label interceptor_failed;
2642 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
2643 __ j(equal, &interceptor_failed);
2644 __ LeaveInternalFrame();
2645 __ ret(0);
2646
2647 __ bind(&interceptor_failed);
2648 __ pop(name_reg);
2649 __ pop(holder_reg);
2650 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
2651 __ pop(receiver);
2652 }
2653
2654 __ LeaveInternalFrame();
2655
2656 // Check that the maps from interceptor's holder to lookup's holder
2657 // haven't changed. And load lookup's holder into |holder| register.
2658 if (interceptor_holder != lookup->holder()) {
2659 holder_reg = CheckPrototypes(interceptor_holder,
2660 holder_reg,
2661 lookup->holder(),
2662 scratch1,
2663 scratch2,
2664 scratch3,
2665 name,
2666 miss);
2667 }
2668
2669 if (lookup->type() == FIELD) {
2670 // We found FIELD property in prototype chain of interceptor's holder.
2671 // Retrieve a field from field's holder.
2672 GenerateFastPropertyLoad(masm(), rax, holder_reg,
2673 lookup->holder(), lookup->GetFieldIndex());
2674 __ ret(0);
2675 } else {
2676 // We found CALLBACKS property in prototype chain of interceptor's
2677 // holder.
2678 ASSERT(lookup->type() == CALLBACKS);
2679 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
2680 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
2681 ASSERT(callback != NULL);
2682 ASSERT(callback->getter() != NULL);
2683
2684 // Tail call to runtime.
2685 // Important invariant in CALLBACKS case: the code above must be
2686 // structured to never clobber |receiver| register.
2687 __ pop(scratch2); // return address
2688 __ push(receiver);
2689 __ push(holder_reg);
2690 __ Move(holder_reg, Handle<AccessorInfo>(callback));
2691 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
2692 __ push(holder_reg);
2693 __ push(name_reg);
2694 __ push(scratch2); // restore return address
2695
2696 ExternalReference ref =
2697 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
2698 __ TailCallExternalReference(ref, 5, 1);
2699 }
2700 } else { // !compile_followup_inline
2701 // Call the runtime system to load the interceptor.
2702 // Check that the maps haven't changed.
2703 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
2704 scratch1, scratch2, scratch3,
2705 name, miss);
2706 __ pop(scratch2); // save old return address
2707 PushInterceptorArguments(masm(), receiver, holder_reg,
2708 name_reg, interceptor_holder);
2709 __ push(scratch2); // restore old return address
2710
2711 ExternalReference ref = ExternalReference(
2712 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
2713 __ TailCallExternalReference(ref, 5, 1);
2714 }
2715 }
2716
2717
2718 bool StubCompiler::GenerateLoadCallback(JSObject* object,
2719 JSObject* holder,
2720 Register receiver,
2721 Register name_reg,
2722 Register scratch1,
2723 Register scratch2,
2724 Register scratch3,
2725 AccessorInfo* callback,
2726 String* name,
2727 Label* miss,
2728 Failure** failure) {
2729 // Check that the receiver isn't a smi.
2730 __ JumpIfSmi(receiver, miss);
2731
2732 // Check that the maps haven't changed.
2733 Register reg =
2734 CheckPrototypes(object, receiver, holder, scratch1,
2735 scratch2, scratch3, name, miss);
2736
2737 Handle<AccessorInfo> callback_handle(callback);
2738
2739 // Insert additional parameters into the stack frame above return address.
2740 ASSERT(!scratch2.is(reg));
2741 __ pop(scratch2); // Get return address to place it below.
2742
2743 __ push(receiver); // receiver
2744 __ push(reg); // holder
2745 if (Heap::InNewSpace(callback_handle->data())) {
2746 __ Move(scratch1, callback_handle);
2747 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
2748 } else {
2749 __ Push(Handle<Object>(callback_handle->data()));
2750 }
2751 __ push(name_reg); // name
2752 // Save a pointer to where we pushed the arguments pointer.
2753 // This will be passed as the const AccessorInfo& to the C++ callback.
2754
2755 #ifdef _WIN64
2756 // Win64 uses first register--rcx--for returned value.
2757 Register accessor_info_arg = r8;
2758 Register name_arg = rdx;
2759 #else
2760 Register accessor_info_arg = rsi;
2761 Register name_arg = rdi;
2762 #endif
2763
2764 ASSERT(!name_arg.is(scratch2));
2765 __ movq(name_arg, rsp);
2766 __ push(scratch2); // Restore return address.
2767
2768 // Do call through the api.
2769 Address getter_address = v8::ToCData<Address>(callback->getter());
2770 ApiFunction fun(getter_address);
2771
2772 // 3 elements array for v8::Agruments::values_ and handler for name.
2773 const int kStackSpace = 4;
2774
2775 // Allocate v8::AccessorInfo in non-GCed stack space.
2776 const int kArgStackSpace = 1;
2777
2778 __ PrepareCallApiFunction(kArgStackSpace);
2779 __ lea(rax, Operand(name_arg, 3 * kPointerSize));
2780
2781 // v8::AccessorInfo::args_.
2782 __ movq(StackSpaceOperand(0), rax);
2783
2784 // The context register (rsi) has been saved in PrepareCallApiFunction and
2785 // could be used to pass arguments.
2786 __ lea(accessor_info_arg, StackSpaceOperand(0));
2787
2788 // Emitting a stub call may try to allocate (if the code is not
2789 // already generated). Do not allow the assembler to perform a
2790 // garbage collection but instead return the allocation failure
2791 // object.
2792 MaybeObject* result = masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
2793 if (result->IsFailure()) {
2794 *failure = Failure::cast(result);
2795 return false;
2796 }
2797 return true;
2798 }
2799
2800
2801 Register StubCompiler::CheckPrototypes(JSObject* object,
2802 Register object_reg,
2803 JSObject* holder,
2804 Register holder_reg,
2805 Register scratch1,
2806 Register scratch2,
2807 String* name,
2808 int save_at_depth,
2809 Label* miss) {
2810 // Make sure there's no overlap between holder and object registers.
2811 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
2812 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
2813 && !scratch2.is(scratch1));
2814
2815 // Keep track of the current object in register reg. On the first
2816 // iteration, reg is an alias for object_reg, on later iterations,
2817 // it is an alias for holder_reg.
2818 Register reg = object_reg;
2819 int depth = 0;
2820
2821 if (save_at_depth == depth) {
2822 __ movq(Operand(rsp, kPointerSize), object_reg);
2823 }
2824
2825 // Check the maps in the prototype chain.
2826 // Traverse the prototype chain from the object and do map checks.
2827 JSObject* current = object;
2828 while (current != holder) {
2829 depth++;
2830
2831 // Only global objects and objects that do not require access
2832 // checks are allowed in stubs.
2833 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
2834
2835 JSObject* prototype = JSObject::cast(current->GetPrototype());
2836 if (!current->HasFastProperties() &&
2837 !current->IsJSGlobalObject() &&
2838 !current->IsJSGlobalProxy()) {
2839 if (!name->IsSymbol()) {
2840 MaybeObject* lookup_result = Heap::LookupSymbol(name);
2841 if (lookup_result->IsFailure()) {
2842 set_failure(Failure::cast(lookup_result));
2843 return reg;
2844 } else {
2845 name = String::cast(lookup_result->ToObjectUnchecked());
2846 }
2847 }
2848 ASSERT(current->property_dictionary()->FindEntry(name) ==
2849 StringDictionary::kNotFound);
2850
2851 GenerateDictionaryNegativeLookup(masm(),
2852 miss,
2853 reg,
2854 name,
2855 scratch1,
2856 scratch2);
2857 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2858 reg = holder_reg; // from now the object is in holder_reg
2859 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
2860 } else if (Heap::InNewSpace(prototype)) {
2861 // Get the map of the current object.
2862 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2863 __ Cmp(scratch1, Handle<Map>(current->map()));
2864 // Branch on the result of the map check.
2865 __ j(not_equal, miss);
2866 // Check access rights to the global object. This has to happen
2867 // after the map check so that we know that the object is
2868 // actually a global object.
2869 if (current->IsJSGlobalProxy()) {
2870 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2871
2872 // Restore scratch register to be the map of the object.
2873 // We load the prototype from the map in the scratch register.
2874 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2875 }
2876 // The prototype is in new space; we cannot store a reference
2877 // to it in the code. Load it from the map.
2878 reg = holder_reg; // from now the object is in holder_reg
2879 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
2880
2881 } else {
2882 // Check the map of the current object.
2883 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2884 Handle<Map>(current->map()));
2885 // Branch on the result of the map check.
2886 __ j(not_equal, miss);
2887 // Check access rights to the global object. This has to happen
2888 // after the map check so that we know that the object is
2889 // actually a global object.
2890 if (current->IsJSGlobalProxy()) {
2891 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2892 }
2893 // The prototype is in old space; load it directly.
2894 reg = holder_reg; // from now the object is in holder_reg
2895 __ Move(reg, Handle<JSObject>(prototype));
2896 }
2897
2898 if (save_at_depth == depth) {
2899 __ movq(Operand(rsp, kPointerSize), reg);
2900 }
2901
2902 // Go to the next object in the prototype chain.
2903 current = prototype;
2904 }
2905
2906 // Check the holder map.
2907 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
2908 __ j(not_equal, miss);
2909
2910 // Log the check depth.
2911 LOG(IntEvent("check-maps-depth", depth + 1));
2912
2913 // Perform security check for access to the global object and return
2914 // the holder register.
2915 ASSERT(current == holder);
2916 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
2917 if (current->IsJSGlobalProxy()) {
2918 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2919 }
2920
2921 // If we've skipped any global objects, it's not enough to verify
2922 // that their maps haven't changed. We also need to check that the
2923 // property cell for the property is still empty.
2924 current = object;
2925 while (current != holder) {
2926 if (current->IsGlobalObject()) {
2927 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2928 GlobalObject::cast(current),
2929 name,
2930 scratch1,
2931 miss);
2932 if (cell->IsFailure()) {
2933 set_failure(Failure::cast(cell));
2934 return reg;
2935 }
2936 }
2937 current = JSObject::cast(current->GetPrototype());
2938 }
2939
2940 // Return the register containing the holder.
2941 return reg;
2942 }
2943
2944
2945 void StubCompiler::GenerateLoadField(JSObject* object,
2946 JSObject* holder,
2947 Register receiver,
2948 Register scratch1,
2949 Register scratch2,
2950 Register scratch3,
2951 int index,
2952 String* name,
2953 Label* miss) {
2954 // Check that the receiver isn't a smi.
2955 __ JumpIfSmi(receiver, miss);
2956
2957 // Check the prototype chain.
2958 Register reg =
2959 CheckPrototypes(object, receiver, holder,
2960 scratch1, scratch2, scratch3, name, miss);
2961
2962 // Get the value from the properties.
2963 GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
2964 __ ret(0);
2965 }
2966
2967
2968 void StubCompiler::GenerateLoadConstant(JSObject* object,
2969 JSObject* holder,
2970 Register receiver,
2971 Register scratch1,
2972 Register scratch2,
2973 Register scratch3,
2974 Object* value,
2975 String* name,
2976 Label* miss) {
2977 // Check that the receiver isn't a smi.
2978 __ JumpIfSmi(receiver, miss);
2979
2980 // Check that the maps haven't changed.
2981 Register reg =
2982 CheckPrototypes(object, receiver, holder,
2983 scratch1, scratch2, scratch3, name, miss);
2984
2985 // Return the constant value.
2986 __ Move(rax, Handle<Object>(value));
2987 __ ret(0);
2988 }
2989
2990
2991 // Specialized stub for constructing objects from functions which only have only 2987 // Specialized stub for constructing objects from functions which only have only
2992 // simple assignments of the form this.x = ...; in their body. 2988 // simple assignments of the form this.x = ...; in their body.
2993 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { 2989 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
2994 // ----------- S t a t e ------------- 2990 // ----------- S t a t e -------------
2995 // -- rax : argc 2991 // -- rax : argc
2996 // -- rdi : constructor 2992 // -- rdi : constructor
2997 // -- rsp[0] : return address 2993 // -- rsp[0] : return address
2998 // -- rsp[4] : last argument 2994 // -- rsp[4] : last argument
2999 // ----------------------------------- 2995 // -----------------------------------
3000 Label generic_stub_call; 2996 Label generic_stub_call;
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
3117 // Return the generated code. 3113 // Return the generated code.
3118 return GetCode(); 3114 return GetCode();
3119 } 3115 }
3120 3116
3121 3117
3122 #undef __ 3118 #undef __
3123 3119
3124 } } // namespace v8::internal 3120 } } // namespace v8::internal
3125 3121
3126 #endif // V8_TARGET_ARCH_X64 3122 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/ia32/stub-cache-ia32.cc ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698