Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(178)

Side by Side Diff: src/ia32/stub-cache-ia32.cc

Issue 8139027: Version 3.6.5 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: '' Created 9 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/regexp-macro-assembler-ia32.cc ('k') | src/ic.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
59 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset)); 59 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
60 __ j(not_equal, &miss); 60 __ j(not_equal, &miss);
61 61
62 // Check that the flags match what we're looking for. 62 // Check that the flags match what we're looking for.
63 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset)); 63 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
64 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 64 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
65 __ cmp(offset, flags); 65 __ cmp(offset, flags);
66 __ j(not_equal, &miss); 66 __ j(not_equal, &miss);
67 67
68 // Jump to the first instruction in the code stub. 68 // Jump to the first instruction in the code stub.
69 __ add(Operand(extra), Immediate(Code::kHeaderSize - kHeapObjectTag)); 69 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
70 __ jmp(Operand(extra)); 70 __ jmp(extra);
71 71
72 __ bind(&miss); 72 __ bind(&miss);
73 } else { 73 } else {
74 // Save the offset on the stack. 74 // Save the offset on the stack.
75 __ push(offset); 75 __ push(offset);
76 76
77 // Check that the key in the entry matches the name. 77 // Check that the key in the entry matches the name.
78 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset)); 78 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
79 __ j(not_equal, &miss); 79 __ j(not_equal, &miss);
80 80
81 // Get the code entry from the cache. 81 // Get the code entry from the cache.
82 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset)); 82 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
83 83
84 // Check that the flags match what we're looking for. 84 // Check that the flags match what we're looking for.
85 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset)); 85 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
86 __ and_(offset, ~Code::kFlagsNotUsedInLookup); 86 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
87 __ cmp(offset, flags); 87 __ cmp(offset, flags);
88 __ j(not_equal, &miss); 88 __ j(not_equal, &miss);
89 89
90 // Restore offset and re-load code entry from cache. 90 // Restore offset and re-load code entry from cache.
91 __ pop(offset); 91 __ pop(offset);
92 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset)); 92 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
93 93
94 // Jump to the first instruction in the code stub. 94 // Jump to the first instruction in the code stub.
95 __ add(Operand(offset), Immediate(Code::kHeaderSize - kHeapObjectTag)); 95 __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
96 __ jmp(Operand(offset)); 96 __ jmp(offset);
97 97
98 // Pop at miss. 98 // Pop at miss.
99 __ bind(&miss); 99 __ bind(&miss);
100 __ pop(offset); 100 __ pop(offset);
101 } 101 }
102 } 102 }
103 103
104 104
105 // Helper function used to check that the dictionary doesn't contain 105 // Helper function used to check that the dictionary doesn't contain
106 // the property. This function may return false negatives, so miss_label 106 // the property. This function may return false negatives, so miss_label
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
197 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize); 197 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
198 198
199 // Probe the primary table. 199 // Probe the primary table.
200 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra); 200 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra);
201 201
202 // Primary miss: Compute hash for secondary probe. 202 // Primary miss: Compute hash for secondary probe.
203 __ mov(scratch, FieldOperand(name, String::kHashFieldOffset)); 203 __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
204 __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 204 __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
205 __ xor_(scratch, flags); 205 __ xor_(scratch, flags);
206 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize); 206 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
207 __ sub(scratch, Operand(name)); 207 __ sub(scratch, name);
208 __ add(Operand(scratch), Immediate(flags)); 208 __ add(scratch, Immediate(flags));
209 __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize); 209 __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
210 210
211 // Probe the secondary table. 211 // Probe the secondary table.
212 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra); 212 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra);
213 213
214 // Cache miss: Fall-through and let caller handle the miss by 214 // Cache miss: Fall-through and let caller handle the miss by
215 // entering the runtime system. 215 // entering the runtime system.
216 __ bind(&miss); 216 __ bind(&miss);
217 } 217 }
218 218
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
311 } 311 }
312 } 312 }
313 313
314 314
315 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 315 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
316 Register receiver, 316 Register receiver,
317 Register scratch1, 317 Register scratch1,
318 Register scratch2, 318 Register scratch2,
319 Label* miss_label) { 319 Label* miss_label) {
320 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); 320 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
321 __ mov(eax, Operand(scratch1)); 321 __ mov(eax, scratch1);
322 __ ret(0); 322 __ ret(0);
323 } 323 }
324 324
325 325
326 // Load a fast property out of a holder object (src). In-object properties 326 // Load a fast property out of a holder object (src). In-object properties
327 // are loaded directly otherwise the property is loaded from the properties 327 // are loaded directly otherwise the property is loaded from the properties
328 // fixed array. 328 // fixed array.
329 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 329 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
330 Register dst, Register src, 330 Register dst, Register src,
331 JSObject* holder, int index) { 331 JSObject* holder, int index) {
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
399 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 399 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
400 // ----------- S t a t e ------------- 400 // ----------- S t a t e -------------
401 // -- esp[0] : return address. 401 // -- esp[0] : return address.
402 // -- esp[4] : last fast api call extra argument. 402 // -- esp[4] : last fast api call extra argument.
403 // -- ... 403 // -- ...
404 // -- esp[kFastApiCallArguments * 4] : first fast api call extra argument. 404 // -- esp[kFastApiCallArguments * 4] : first fast api call extra argument.
405 // -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal 405 // -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal
406 // frame. 406 // frame.
407 // ----------------------------------- 407 // -----------------------------------
408 __ pop(scratch); 408 __ pop(scratch);
409 __ add(Operand(esp), Immediate(kPointerSize * kFastApiCallArguments)); 409 __ add(esp, Immediate(kPointerSize * kFastApiCallArguments));
410 __ push(scratch); 410 __ push(scratch);
411 } 411 }
412 412
413 413
414 // Generates call to API function. 414 // Generates call to API function.
415 static MaybeObject* GenerateFastApiCall(MacroAssembler* masm, 415 static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
416 const CallOptimization& optimization, 416 const CallOptimization& optimization,
417 int argc) { 417 int argc) {
418 // ----------- S t a t e ------------- 418 // ----------- S t a t e -------------
419 // -- esp[0] : return address 419 // -- esp[0] : return address
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
455 455
456 const int kApiArgc = 1; // API function gets reference to the v8::Arguments. 456 const int kApiArgc = 1; // API function gets reference to the v8::Arguments.
457 457
458 // Allocate the v8::Arguments structure in the arguments' space since 458 // Allocate the v8::Arguments structure in the arguments' space since
459 // it's not controlled by GC. 459 // it's not controlled by GC.
460 const int kApiStackSpace = 4; 460 const int kApiStackSpace = 4;
461 461
462 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace); 462 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
463 463
464 __ mov(ApiParameterOperand(1), eax); // v8::Arguments::implicit_args_. 464 __ mov(ApiParameterOperand(1), eax); // v8::Arguments::implicit_args_.
465 __ add(Operand(eax), Immediate(argc * kPointerSize)); 465 __ add(eax, Immediate(argc * kPointerSize));
466 __ mov(ApiParameterOperand(2), eax); // v8::Arguments::values_. 466 __ mov(ApiParameterOperand(2), eax); // v8::Arguments::values_.
467 __ Set(ApiParameterOperand(3), Immediate(argc)); // v8::Arguments::length_. 467 __ Set(ApiParameterOperand(3), Immediate(argc)); // v8::Arguments::length_.
468 // v8::Arguments::is_construct_call_. 468 // v8::Arguments::is_construct_call_.
469 __ Set(ApiParameterOperand(4), Immediate(0)); 469 __ Set(ApiParameterOperand(4), Immediate(0));
470 470
471 // v8::InvocationCallback's argument. 471 // v8::InvocationCallback's argument.
472 __ lea(eax, ApiParameterOperand(1)); 472 __ lea(eax, ApiParameterOperand(1));
473 __ mov(ApiParameterOperand(0), eax); 473 __ mov(ApiParameterOperand(0), eax);
474 474
475 // Emitting a stub call may try to allocate (if the code is not 475 // Emitting a stub call may try to allocate (if the code is not
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after
644 Register scratch2, 644 Register scratch2,
645 Register scratch3, 645 Register scratch3,
646 String* name, 646 String* name,
647 JSObject* interceptor_holder, 647 JSObject* interceptor_holder,
648 Label* miss_label) { 648 Label* miss_label) {
649 Register holder = 649 Register holder =
650 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder, 650 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
651 scratch1, scratch2, scratch3, name, 651 scratch1, scratch2, scratch3, name,
652 miss_label); 652 miss_label);
653 653
654 __ EnterInternalFrame(); 654 FrameScope scope(masm, StackFrame::INTERNAL);
655 // Save the name_ register across the call. 655 // Save the name_ register across the call.
656 __ push(name_); 656 __ push(name_);
657 657
658 PushInterceptorArguments(masm, 658 PushInterceptorArguments(masm,
659 receiver, 659 receiver,
660 holder, 660 holder,
661 name_, 661 name_,
662 interceptor_holder); 662 interceptor_holder);
663 663
664 __ CallExternalReference( 664 __ CallExternalReference(
665 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), 665 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
666 masm->isolate()), 666 masm->isolate()),
667 5); 667 5);
668 668
669 // Restore the name_ register. 669 // Restore the name_ register.
670 __ pop(name_); 670 __ pop(name_);
671 __ LeaveInternalFrame(); 671
672 // Leave the internal frame.
672 } 673 }
673 674
674 void LoadWithInterceptor(MacroAssembler* masm, 675 void LoadWithInterceptor(MacroAssembler* masm,
675 Register receiver, 676 Register receiver,
676 Register holder, 677 Register holder,
677 JSObject* holder_obj, 678 JSObject* holder_obj,
678 Label* interceptor_succeeded) { 679 Label* interceptor_succeeded) {
679 __ EnterInternalFrame(); 680 {
680 __ push(holder); // Save the holder. 681 FrameScope scope(masm, StackFrame::INTERNAL);
681 __ push(name_); // Save the name. 682 __ push(holder); // Save the holder.
683 __ push(name_); // Save the name.
682 684
683 CompileCallLoadPropertyWithInterceptor(masm, 685 CompileCallLoadPropertyWithInterceptor(masm,
684 receiver, 686 receiver,
685 holder, 687 holder,
686 name_, 688 name_,
687 holder_obj); 689 holder_obj);
688 690
689 __ pop(name_); // Restore the name. 691 __ pop(name_); // Restore the name.
690 __ pop(receiver); // Restore the holder. 692 __ pop(receiver); // Restore the holder.
691 __ LeaveInternalFrame(); 693 // Leave the internal frame.
694 }
692 695
693 __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel()); 696 __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
694 __ j(not_equal, interceptor_succeeded); 697 __ j(not_equal, interceptor_succeeded);
695 } 698 }
696 699
697 StubCompiler* stub_compiler_; 700 StubCompiler* stub_compiler_;
698 const ParameterCount& arguments_; 701 const ParameterCount& arguments_;
699 Register name_; 702 Register name_;
700 Code::ExtraICState extra_ic_state_; 703 Code::ExtraICState extra_ic_state_;
701 }; 704 };
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
779 // object and the number of in-object properties is not going to change. 782 // object and the number of in-object properties is not going to change.
780 index -= object->map()->inobject_properties(); 783 index -= object->map()->inobject_properties();
781 784
782 if (index < 0) { 785 if (index < 0) {
783 // Set the property straight into the object. 786 // Set the property straight into the object.
784 int offset = object->map()->instance_size() + (index * kPointerSize); 787 int offset = object->map()->instance_size() + (index * kPointerSize);
785 __ mov(FieldOperand(receiver_reg, offset), eax); 788 __ mov(FieldOperand(receiver_reg, offset), eax);
786 789
787 // Update the write barrier for the array address. 790 // Update the write barrier for the array address.
788 // Pass the value being stored in the now unused name_reg. 791 // Pass the value being stored in the now unused name_reg.
789 __ mov(name_reg, Operand(eax)); 792 __ mov(name_reg, eax);
790 __ RecordWrite(receiver_reg, offset, name_reg, scratch); 793 __ RecordWriteField(receiver_reg,
794 offset,
795 name_reg,
796 scratch,
797 kDontSaveFPRegs);
791 } else { 798 } else {
792 // Write to the properties array. 799 // Write to the properties array.
793 int offset = index * kPointerSize + FixedArray::kHeaderSize; 800 int offset = index * kPointerSize + FixedArray::kHeaderSize;
794 // Get the properties array (optimistically). 801 // Get the properties array (optimistically).
795 __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); 802 __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
796 __ mov(FieldOperand(scratch, offset), eax); 803 __ mov(FieldOperand(scratch, offset), eax);
797 804
798 // Update the write barrier for the array address. 805 // Update the write barrier for the array address.
799 // Pass the value being stored in the now unused name_reg. 806 // Pass the value being stored in the now unused name_reg.
800 __ mov(name_reg, Operand(eax)); 807 __ mov(name_reg, eax);
801 __ RecordWrite(scratch, offset, name_reg, receiver_reg); 808 __ RecordWriteField(scratch,
809 offset,
810 name_reg,
811 receiver_reg,
812 kDontSaveFPRegs);
802 } 813 }
803 814
804 // Return the value (register eax). 815 // Return the value (register eax).
805 __ ret(0); 816 __ ret(0);
806 } 817 }
807 818
808 819
809 // Generate code to check that a global property cell is empty. Create 820 // Generate code to check that a global property cell is empty. Create
810 // the property cell at compilation time if no cell exists for the 821 // the property cell at compilation time if no cell exists for the
811 // property. 822 // property.
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
925 set_failure(Failure::cast(negative_lookup)); 936 set_failure(Failure::cast(negative_lookup));
926 return reg; 937 return reg;
927 } 938 }
928 939
929 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 940 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
930 reg = holder_reg; // from now the object is in holder_reg 941 reg = holder_reg; // from now the object is in holder_reg
931 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 942 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
932 } else if (heap()->InNewSpace(prototype)) { 943 } else if (heap()->InNewSpace(prototype)) {
933 // Get the map of the current object. 944 // Get the map of the current object.
934 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 945 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
935 __ cmp(Operand(scratch1), Immediate(Handle<Map>(current->map()))); 946 __ cmp(scratch1, Immediate(Handle<Map>(current->map())));
936 // Branch on the result of the map check. 947 // Branch on the result of the map check.
937 __ j(not_equal, miss); 948 __ j(not_equal, miss);
938 // Check access rights to the global object. This has to happen 949 // Check access rights to the global object. This has to happen
939 // after the map check so that we know that the object is 950 // after the map check so that we know that the object is
940 // actually a global object. 951 // actually a global object.
941 if (current->IsJSGlobalProxy()) { 952 if (current->IsJSGlobalProxy()) {
942 __ CheckAccessGlobalProxy(reg, scratch1, miss); 953 __ CheckAccessGlobalProxy(reg, scratch1, miss);
943 954
944 // Restore scratch register to be the map of the object. 955 // Restore scratch register to be the map of the object.
945 // We load the prototype from the map in the scratch register. 956 // We load the prototype from the map in the scratch register.
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
1046 CheckPrototypes(object, receiver, holder, scratch1, 1057 CheckPrototypes(object, receiver, holder, scratch1,
1047 scratch2, scratch3, name, miss); 1058 scratch2, scratch3, name, miss);
1048 1059
1049 Handle<AccessorInfo> callback_handle(callback); 1060 Handle<AccessorInfo> callback_handle(callback);
1050 1061
1051 // Insert additional parameters into the stack frame above return address. 1062 // Insert additional parameters into the stack frame above return address.
1052 ASSERT(!scratch3.is(reg)); 1063 ASSERT(!scratch3.is(reg));
1053 __ pop(scratch3); // Get return address to place it below. 1064 __ pop(scratch3); // Get return address to place it below.
1054 1065
1055 __ push(receiver); // receiver 1066 __ push(receiver); // receiver
1056 __ mov(scratch2, Operand(esp)); 1067 __ mov(scratch2, esp);
1057 ASSERT(!scratch2.is(reg)); 1068 ASSERT(!scratch2.is(reg));
1058 __ push(reg); // holder 1069 __ push(reg); // holder
1059 // Push data from AccessorInfo. 1070 // Push data from AccessorInfo.
1060 if (isolate()->heap()->InNewSpace(callback_handle->data())) { 1071 if (isolate()->heap()->InNewSpace(callback_handle->data())) {
1061 __ mov(scratch1, Immediate(callback_handle)); 1072 __ mov(scratch1, Immediate(callback_handle));
1062 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); 1073 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset));
1063 } else { 1074 } else {
1064 __ push(Immediate(Handle<Object>(callback_handle->data()))); 1075 __ push(Immediate(Handle<Object>(callback_handle->data())));
1065 } 1076 }
1066 1077
(...skipping 10 matching lines...) Expand all
1077 Address getter_address = v8::ToCData<Address>(callback->getter()); 1088 Address getter_address = v8::ToCData<Address>(callback->getter());
1078 ApiFunction fun(getter_address); 1089 ApiFunction fun(getter_address);
1079 1090
1080 // 3 elements array for v8::Agruments::values_, handler for name and pointer 1091 // 3 elements array for v8::Agruments::values_, handler for name and pointer
1081 // to the values (it considered as smi in GC). 1092 // to the values (it considered as smi in GC).
1082 const int kStackSpace = 5; 1093 const int kStackSpace = 5;
1083 const int kApiArgc = 2; 1094 const int kApiArgc = 2;
1084 1095
1085 __ PrepareCallApiFunction(kApiArgc); 1096 __ PrepareCallApiFunction(kApiArgc);
1086 __ mov(ApiParameterOperand(0), ebx); // name. 1097 __ mov(ApiParameterOperand(0), ebx); // name.
1087 __ add(Operand(ebx), Immediate(kPointerSize)); 1098 __ add(ebx, Immediate(kPointerSize));
1088 __ mov(ApiParameterOperand(1), ebx); // arguments pointer. 1099 __ mov(ApiParameterOperand(1), ebx); // arguments pointer.
1089 1100
1090 // Emitting a stub call may try to allocate (if the code is not 1101 // Emitting a stub call may try to allocate (if the code is not
1091 // already generated). Do not allow the assembler to perform a 1102 // already generated). Do not allow the assembler to perform a
1092 // garbage collection but instead return the allocation failure 1103 // garbage collection but instead return the allocation failure
1093 // object. 1104 // object.
1094 return masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace); 1105 return masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1095 } 1106 }
1096 1107
1097 1108
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
1151 // Compile the interceptor call, followed by inline code to load the 1162 // Compile the interceptor call, followed by inline code to load the
1152 // property from further up the prototype chain if the call fails. 1163 // property from further up the prototype chain if the call fails.
1153 // Check that the maps haven't changed. 1164 // Check that the maps haven't changed.
1154 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder, 1165 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1155 scratch1, scratch2, scratch3, 1166 scratch1, scratch2, scratch3,
1156 name, miss); 1167 name, miss);
1157 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1)); 1168 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1158 1169
1159 // Save necessary data before invoking an interceptor. 1170 // Save necessary data before invoking an interceptor.
1160 // Requires a frame to make GC aware of pushed pointers. 1171 // Requires a frame to make GC aware of pushed pointers.
1161 __ EnterInternalFrame(); 1172 {
1173 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1162 1174
1163 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) { 1175 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1164 // CALLBACKS case needs a receiver to be passed into C++ callback. 1176 // CALLBACKS case needs a receiver to be passed into C++ callback.
1165 __ push(receiver); 1177 __ push(receiver);
1178 }
1179 __ push(holder_reg);
1180 __ push(name_reg);
1181
1182 // Invoke an interceptor. Note: map checks from receiver to
1183 // interceptor's holder has been compiled before (see a caller
1184 // of this method.)
1185 CompileCallLoadPropertyWithInterceptor(masm(),
1186 receiver,
1187 holder_reg,
1188 name_reg,
1189 interceptor_holder);
1190
1191 // Check if interceptor provided a value for property. If it's
1192 // the case, return immediately.
1193 Label interceptor_failed;
1194 __ cmp(eax, factory()->no_interceptor_result_sentinel());
1195 __ j(equal, &interceptor_failed);
1196 frame_scope.GenerateLeaveFrame();
1197 __ ret(0);
1198
1199 __ bind(&interceptor_failed);
1200 __ pop(name_reg);
1201 __ pop(holder_reg);
1202 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1203 __ pop(receiver);
1204 }
1205
1206 // Leave the internal frame.
1166 } 1207 }
1167 __ push(holder_reg);
1168 __ push(name_reg);
1169
1170 // Invoke an interceptor. Note: map checks from receiver to
1171 // interceptor's holder has been compiled before (see a caller
1172 // of this method.)
1173 CompileCallLoadPropertyWithInterceptor(masm(),
1174 receiver,
1175 holder_reg,
1176 name_reg,
1177 interceptor_holder);
1178
1179 // Check if interceptor provided a value for property. If it's
1180 // the case, return immediately.
1181 Label interceptor_failed;
1182 __ cmp(eax, factory()->no_interceptor_result_sentinel());
1183 __ j(equal, &interceptor_failed);
1184 __ LeaveInternalFrame();
1185 __ ret(0);
1186
1187 __ bind(&interceptor_failed);
1188 __ pop(name_reg);
1189 __ pop(holder_reg);
1190 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1191 __ pop(receiver);
1192 }
1193
1194 __ LeaveInternalFrame();
1195 1208
1196 // Check that the maps from interceptor's holder to lookup's holder 1209 // Check that the maps from interceptor's holder to lookup's holder
1197 // haven't changed. And load lookup's holder into holder_reg. 1210 // haven't changed. And load lookup's holder into holder_reg.
1198 if (interceptor_holder != lookup->holder()) { 1211 if (interceptor_holder != lookup->holder()) {
1199 holder_reg = CheckPrototypes(interceptor_holder, 1212 holder_reg = CheckPrototypes(interceptor_holder,
1200 holder_reg, 1213 holder_reg,
1201 lookup->holder(), 1214 lookup->holder(),
1202 scratch1, 1215 scratch1,
1203 scratch2, 1216 scratch2,
1204 scratch3, 1217 scratch3,
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
1252 ExternalReference ref = 1265 ExternalReference ref =
1253 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), 1266 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1254 isolate()); 1267 isolate());
1255 __ TailCallExternalReference(ref, 5, 1); 1268 __ TailCallExternalReference(ref, 5, 1);
1256 } 1269 }
1257 } 1270 }
1258 1271
1259 1272
1260 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { 1273 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1261 if (kind_ == Code::KEYED_CALL_IC) { 1274 if (kind_ == Code::KEYED_CALL_IC) {
1262 __ cmp(Operand(ecx), Immediate(Handle<String>(name))); 1275 __ cmp(ecx, Immediate(Handle<String>(name)));
1263 __ j(not_equal, miss); 1276 __ j(not_equal, miss);
1264 } 1277 }
1265 } 1278 }
1266 1279
1267 1280
1268 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object, 1281 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1269 JSObject* holder, 1282 JSObject* holder,
1270 String* name, 1283 String* name,
1271 Label* miss) { 1284 Label* miss) {
1272 ASSERT(holder->IsGlobalObject()); 1285 ASSERT(holder->IsGlobalObject());
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
1309 // function, we have to verify that it still is a function. 1322 // function, we have to verify that it still is a function.
1310 __ JumpIfSmi(edi, miss); 1323 __ JumpIfSmi(edi, miss);
1311 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx); 1324 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
1312 __ j(not_equal, miss); 1325 __ j(not_equal, miss);
1313 1326
1314 // Check the shared function info. Make sure it hasn't changed. 1327 // Check the shared function info. Make sure it hasn't changed.
1315 __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset), 1328 __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
1316 Immediate(Handle<SharedFunctionInfo>(function->shared()))); 1329 Immediate(Handle<SharedFunctionInfo>(function->shared())));
1317 __ j(not_equal, miss); 1330 __ j(not_equal, miss);
1318 } else { 1331 } else {
1319 __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function))); 1332 __ cmp(edi, Immediate(Handle<JSFunction>(function)));
1320 __ j(not_equal, miss); 1333 __ j(not_equal, miss);
1321 } 1334 }
1322 } 1335 }
1323 1336
1324 1337
1325 MaybeObject* CallStubCompiler::GenerateMissBranch() { 1338 MaybeObject* CallStubCompiler::GenerateMissBranch() {
1326 MaybeObject* maybe_obj = 1339 MaybeObject* maybe_obj =
1327 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), 1340 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1328 kind_, 1341 kind_,
1329 extra_ic_state_); 1342 extra_ic_state_);
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
1434 1447
1435 // Get the elements array of the object. 1448 // Get the elements array of the object.
1436 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset)); 1449 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
1437 1450
1438 // Check that the elements are in fast mode and writable. 1451 // Check that the elements are in fast mode and writable.
1439 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 1452 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1440 Immediate(factory()->fixed_array_map())); 1453 Immediate(factory()->fixed_array_map()));
1441 __ j(not_equal, &call_builtin); 1454 __ j(not_equal, &call_builtin);
1442 1455
1443 if (argc == 1) { // Otherwise fall through to call builtin. 1456 if (argc == 1) { // Otherwise fall through to call builtin.
1444 Label exit, with_write_barrier, attempt_to_grow_elements; 1457 Label attempt_to_grow_elements, with_write_barrier;
1445 1458
1446 // Get the array's length into eax and calculate new length. 1459 // Get the array's length into eax and calculate new length.
1447 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset)); 1460 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
1448 STATIC_ASSERT(kSmiTagSize == 1); 1461 STATIC_ASSERT(kSmiTagSize == 1);
1449 STATIC_ASSERT(kSmiTag == 0); 1462 STATIC_ASSERT(kSmiTag == 0);
1450 __ add(Operand(eax), Immediate(Smi::FromInt(argc))); 1463 __ add(eax, Immediate(Smi::FromInt(argc)));
1451 1464
1452 // Get the element's length into ecx. 1465 // Get the element's length into ecx.
1453 __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset)); 1466 __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
1454 1467
1455 // Check if we could survive without allocation. 1468 // Check if we could survive without allocation.
1456 __ cmp(eax, Operand(ecx)); 1469 __ cmp(eax, ecx);
1457 __ j(greater, &attempt_to_grow_elements); 1470 __ j(greater, &attempt_to_grow_elements);
1458 1471
1472 // Check if value is a smi.
1473 __ mov(ecx, Operand(esp, argc * kPointerSize));
1474 __ JumpIfNotSmi(ecx, &with_write_barrier);
1475
1459 // Save new length. 1476 // Save new length.
1460 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); 1477 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1461 1478
1462 // Push the element. 1479 // Push the element.
1463 __ lea(edx, FieldOperand(ebx, 1480 __ lea(edx, FieldOperand(ebx,
1464 eax, times_half_pointer_size, 1481 eax, times_half_pointer_size,
1465 FixedArray::kHeaderSize - argc * kPointerSize)); 1482 FixedArray::kHeaderSize - argc * kPointerSize));
1466 __ mov(ecx, Operand(esp, argc * kPointerSize));
1467 __ mov(Operand(edx, 0), ecx); 1483 __ mov(Operand(edx, 0), ecx);
1468 1484
1469 // Check if value is a smi.
1470 __ JumpIfNotSmi(ecx, &with_write_barrier);
1471
1472 __ bind(&exit);
1473 __ ret((argc + 1) * kPointerSize); 1485 __ ret((argc + 1) * kPointerSize);
1474 1486
1475 __ bind(&with_write_barrier); 1487 __ bind(&with_write_barrier);
1476 1488
1477 __ InNewSpace(ebx, ecx, equal, &exit); 1489 if (FLAG_smi_only_arrays) {
1490 __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
1491 __ CheckFastObjectElements(edi, &call_builtin);
1492 }
1478 1493
1479 __ RecordWriteHelper(ebx, edx, ecx); 1494 // Save new length.
1495 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1496
1497 // Push the element.
1498 __ lea(edx, FieldOperand(ebx,
1499 eax, times_half_pointer_size,
1500 FixedArray::kHeaderSize - argc * kPointerSize));
1501 __ mov(Operand(edx, 0), ecx);
1502
1503 __ RecordWrite(
1504 ebx, edx, ecx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1505
1480 __ ret((argc + 1) * kPointerSize); 1506 __ ret((argc + 1) * kPointerSize);
1481 1507
1482 __ bind(&attempt_to_grow_elements); 1508 __ bind(&attempt_to_grow_elements);
1483 if (!FLAG_inline_new) { 1509 if (!FLAG_inline_new) {
1484 __ jmp(&call_builtin); 1510 __ jmp(&call_builtin);
1485 } 1511 }
1486 1512
1513 __ mov(edi, Operand(esp, argc * kPointerSize));
1514 if (FLAG_smi_only_arrays) {
1515 // Growing elements that are SMI-only requires special handling in case
1516 // the new element is non-Smi. For now, delegate to the builtin.
1517 Label no_fast_elements_check;
1518 __ JumpIfSmi(edi, &no_fast_elements_check);
1519 __ mov(esi, FieldOperand(edx, HeapObject::kMapOffset));
1520 __ CheckFastObjectElements(esi, &call_builtin, Label::kFar);
1521 __ bind(&no_fast_elements_check);
1522 }
1523
1524 // We could be lucky and the elements array could be at the top of
1525 // new-space. In this case we can just grow it in place by moving the
1526 // allocation pointer up.
1527
1487 ExternalReference new_space_allocation_top = 1528 ExternalReference new_space_allocation_top =
1488 ExternalReference::new_space_allocation_top_address(isolate()); 1529 ExternalReference::new_space_allocation_top_address(isolate());
1489 ExternalReference new_space_allocation_limit = 1530 ExternalReference new_space_allocation_limit =
1490 ExternalReference::new_space_allocation_limit_address(isolate()); 1531 ExternalReference::new_space_allocation_limit_address(isolate());
1491 1532
1492 const int kAllocationDelta = 4; 1533 const int kAllocationDelta = 4;
1493 // Load top. 1534 // Load top.
1494 __ mov(ecx, Operand::StaticVariable(new_space_allocation_top)); 1535 __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
1495 1536
1496 // Check if it's the end of elements. 1537 // Check if it's the end of elements.
1497 __ lea(edx, FieldOperand(ebx, 1538 __ lea(edx, FieldOperand(ebx,
1498 eax, times_half_pointer_size, 1539 eax, times_half_pointer_size,
1499 FixedArray::kHeaderSize - argc * kPointerSize)); 1540 FixedArray::kHeaderSize - argc * kPointerSize));
1500 __ cmp(edx, Operand(ecx)); 1541 __ cmp(edx, ecx);
1501 __ j(not_equal, &call_builtin); 1542 __ j(not_equal, &call_builtin);
1502 __ add(Operand(ecx), Immediate(kAllocationDelta * kPointerSize)); 1543 __ add(ecx, Immediate(kAllocationDelta * kPointerSize));
1503 __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit)); 1544 __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
1504 __ j(above, &call_builtin); 1545 __ j(above, &call_builtin);
1505 1546
1506 // We fit and could grow elements. 1547 // We fit and could grow elements.
1507 __ mov(Operand::StaticVariable(new_space_allocation_top), ecx); 1548 __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
1508 __ mov(ecx, Operand(esp, argc * kPointerSize));
1509 1549
1510 // Push the argument... 1550 // Push the argument...
1511 __ mov(Operand(edx, 0), ecx); 1551 __ mov(Operand(edx, 0), edi);
1512 // ... and fill the rest with holes. 1552 // ... and fill the rest with holes.
1513 for (int i = 1; i < kAllocationDelta; i++) { 1553 for (int i = 1; i < kAllocationDelta; i++) {
1514 __ mov(Operand(edx, i * kPointerSize), 1554 __ mov(Operand(edx, i * kPointerSize),
1515 Immediate(factory()->the_hole_value())); 1555 Immediate(factory()->the_hole_value()));
1516 } 1556 }
1517 1557
1558 // We know the elements array is in new space so we don't need the
1559 // remembered set, but we just pushed a value onto it so we may have to
1560 // tell the incremental marker to rescan the object that we just grew. We
1561 // don't need to worry about the holes because they are in old space and
1562 // already marked black.
1563 __ RecordWrite(ebx, edx, edi, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
1564
1518 // Restore receiver to edx as finish sequence assumes it's here. 1565 // Restore receiver to edx as finish sequence assumes it's here.
1519 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); 1566 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1520 1567
1521 // Increment element's and array's sizes. 1568 // Increment element's and array's sizes.
1522 __ add(FieldOperand(ebx, FixedArray::kLengthOffset), 1569 __ add(FieldOperand(ebx, FixedArray::kLengthOffset),
1523 Immediate(Smi::FromInt(kAllocationDelta))); 1570 Immediate(Smi::FromInt(kAllocationDelta)));
1571
1572 // NOTE: This only happen in new-space, where we don't
1573 // care about the black-byte-count on pages. Otherwise we should
1574 // update that too if the object is black.
1575
1524 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax); 1576 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1525 1577
1526 // Elements are in new space, so write barrier is not required.
1527 __ ret((argc + 1) * kPointerSize); 1578 __ ret((argc + 1) * kPointerSize);
1528 } 1579 }
1529 1580
1530 __ bind(&call_builtin); 1581 __ bind(&call_builtin);
1531 __ TailCallExternalReference( 1582 __ TailCallExternalReference(
1532 ExternalReference(Builtins::c_ArrayPush, isolate()), 1583 ExternalReference(Builtins::c_ArrayPush, isolate()),
1533 argc + 1, 1584 argc + 1,
1534 1); 1585 1);
1535 } 1586 }
1536 1587
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1578 // Get the elements array of the object. 1629 // Get the elements array of the object.
1579 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset)); 1630 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
1580 1631
1581 // Check that the elements are in fast mode and writable. 1632 // Check that the elements are in fast mode and writable.
1582 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 1633 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1583 Immediate(factory()->fixed_array_map())); 1634 Immediate(factory()->fixed_array_map()));
1584 __ j(not_equal, &call_builtin); 1635 __ j(not_equal, &call_builtin);
1585 1636
1586 // Get the array's length into ecx and calculate new length. 1637 // Get the array's length into ecx and calculate new length.
1587 __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset)); 1638 __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
1588 __ sub(Operand(ecx), Immediate(Smi::FromInt(1))); 1639 __ sub(ecx, Immediate(Smi::FromInt(1)));
1589 __ j(negative, &return_undefined); 1640 __ j(negative, &return_undefined);
1590 1641
1591 // Get the last element. 1642 // Get the last element.
1592 STATIC_ASSERT(kSmiTagSize == 1); 1643 STATIC_ASSERT(kSmiTagSize == 1);
1593 STATIC_ASSERT(kSmiTag == 0); 1644 STATIC_ASSERT(kSmiTag == 0);
1594 __ mov(eax, FieldOperand(ebx, 1645 __ mov(eax, FieldOperand(ebx,
1595 ecx, times_half_pointer_size, 1646 ecx, times_half_pointer_size,
1596 FixedArray::kHeaderSize)); 1647 FixedArray::kHeaderSize));
1597 __ cmp(Operand(eax), Immediate(factory()->the_hole_value())); 1648 __ cmp(eax, Immediate(factory()->the_hole_value()));
1598 __ j(equal, &call_builtin); 1649 __ j(equal, &call_builtin);
1599 1650
1600 // Set the array's length. 1651 // Set the array's length.
1601 __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx); 1652 __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx);
1602 1653
1603 // Fill with the hole. 1654 // Fill with the hole.
1604 __ mov(FieldOperand(ebx, 1655 __ mov(FieldOperand(ebx,
1605 ecx, times_half_pointer_size, 1656 ecx, times_half_pointer_size,
1606 FixedArray::kHeaderSize), 1657 FixedArray::kHeaderSize),
1607 Immediate(factory()->the_hole_value())); 1658 Immediate(factory()->the_hole_value()));
(...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after
2051 Label not_smi; 2102 Label not_smi;
2052 STATIC_ASSERT(kSmiTag == 0); 2103 STATIC_ASSERT(kSmiTag == 0);
2053 __ JumpIfNotSmi(eax, &not_smi); 2104 __ JumpIfNotSmi(eax, &not_smi);
2054 2105
2055 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0 2106 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
2056 // otherwise. 2107 // otherwise.
2057 __ mov(ebx, eax); 2108 __ mov(ebx, eax);
2058 __ sar(ebx, kBitsPerInt - 1); 2109 __ sar(ebx, kBitsPerInt - 1);
2059 2110
2060 // Do bitwise not or do nothing depending on ebx. 2111 // Do bitwise not or do nothing depending on ebx.
2061 __ xor_(eax, Operand(ebx)); 2112 __ xor_(eax, ebx);
2062 2113
2063 // Add 1 or do nothing depending on ebx. 2114 // Add 1 or do nothing depending on ebx.
2064 __ sub(eax, Operand(ebx)); 2115 __ sub(eax, ebx);
2065 2116
2066 // If the result is still negative, go to the slow case. 2117 // If the result is still negative, go to the slow case.
2067 // This only happens for the most negative smi. 2118 // This only happens for the most negative smi.
2068 Label slow; 2119 Label slow;
2069 __ j(negative, &slow); 2120 __ j(negative, &slow);
2070 2121
2071 // Smi case done. 2122 // Smi case done.
2072 __ ret(2 * kPointerSize); 2123 __ ret(2 * kPointerSize);
2073 2124
2074 // Check if the argument is a heap number and load its exponent and 2125 // Check if the argument is a heap number and load its exponent and
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2137 2188
2138 // Check that the receiver isn't a smi. 2189 // Check that the receiver isn't a smi.
2139 __ JumpIfSmi(edx, &miss_before_stack_reserved); 2190 __ JumpIfSmi(edx, &miss_before_stack_reserved);
2140 2191
2141 Counters* counters = isolate()->counters(); 2192 Counters* counters = isolate()->counters();
2142 __ IncrementCounter(counters->call_const(), 1); 2193 __ IncrementCounter(counters->call_const(), 1);
2143 __ IncrementCounter(counters->call_const_fast_api(), 1); 2194 __ IncrementCounter(counters->call_const_fast_api(), 1);
2144 2195
2145 // Allocate space for v8::Arguments implicit values. Must be initialized 2196 // Allocate space for v8::Arguments implicit values. Must be initialized
2146 // before calling any runtime function. 2197 // before calling any runtime function.
2147 __ sub(Operand(esp), Immediate(kFastApiCallArguments * kPointerSize)); 2198 __ sub(esp, Immediate(kFastApiCallArguments * kPointerSize));
2148 2199
2149 // Check that the maps haven't changed and find a Holder as a side effect. 2200 // Check that the maps haven't changed and find a Holder as a side effect.
2150 CheckPrototypes(JSObject::cast(object), edx, holder, 2201 CheckPrototypes(JSObject::cast(object), edx, holder,
2151 ebx, eax, edi, name, depth, &miss); 2202 ebx, eax, edi, name, depth, &miss);
2152 2203
2153 // Move the return address on top of the stack. 2204 // Move the return address on top of the stack.
2154 __ mov(eax, Operand(esp, 3 * kPointerSize)); 2205 __ mov(eax, Operand(esp, 3 * kPointerSize));
2155 __ mov(Operand(esp, 0 * kPointerSize), eax); 2206 __ mov(Operand(esp, 0 * kPointerSize), eax);
2156 2207
2157 // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains 2208 // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains
2158 // duplicate of return address and will be overwritten. 2209 // duplicate of return address and will be overwritten.
2159 MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc); 2210 MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2160 if (result->IsFailure()) return result; 2211 if (result->IsFailure()) return result;
2161 2212
2162 __ bind(&miss); 2213 __ bind(&miss);
2163 __ add(Operand(esp), Immediate(kFastApiCallArguments * kPointerSize)); 2214 __ add(esp, Immediate(kFastApiCallArguments * kPointerSize));
2164 2215
2165 __ bind(&miss_before_stack_reserved); 2216 __ bind(&miss_before_stack_reserved);
2166 MaybeObject* maybe_result = GenerateMissBranch(); 2217 MaybeObject* maybe_result = GenerateMissBranch();
2167 if (maybe_result->IsFailure()) return maybe_result; 2218 if (maybe_result->IsFailure()) return maybe_result;
2168 2219
2169 // Return the generated code. 2220 // Return the generated code.
2170 return GetCode(function); 2221 return GetCode(function);
2171 } 2222 }
2172 2223
2173 2224
(...skipping 418 matching lines...) Expand 10 before | Expand all | Expand 10 after
2592 // -- edx : receiver 2643 // -- edx : receiver
2593 // -- esp[0] : return address 2644 // -- esp[0] : return address
2594 // ----------------------------------- 2645 // -----------------------------------
2595 Label miss; 2646 Label miss;
2596 2647
2597 // Check that the map of the global has not changed. 2648 // Check that the map of the global has not changed.
2598 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 2649 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2599 Immediate(Handle<Map>(object->map()))); 2650 Immediate(Handle<Map>(object->map())));
2600 __ j(not_equal, &miss); 2651 __ j(not_equal, &miss);
2601 2652
2602
2603 // Compute the cell operand to use. 2653 // Compute the cell operand to use.
2604 Operand cell_operand = Operand::Cell(Handle<JSGlobalPropertyCell>(cell)); 2654 __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
2605 if (Serializer::enabled()) { 2655 Operand cell_operand = FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset);
2606 __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
2607 cell_operand = FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset);
2608 }
2609 2656
2610 // Check that the value in the cell is not the hole. If it is, this 2657 // Check that the value in the cell is not the hole. If it is, this
2611 // cell could have been deleted and reintroducing the global needs 2658 // cell could have been deleted and reintroducing the global needs
2612 // to update the property details in the property dictionary of the 2659 // to update the property details in the property dictionary of the
2613 // global object. We bail out to the runtime system to do that. 2660 // global object. We bail out to the runtime system to do that.
2614 __ cmp(cell_operand, factory()->the_hole_value()); 2661 __ cmp(cell_operand, factory()->the_hole_value());
2615 __ j(equal, &miss); 2662 __ j(equal, &miss);
2616 2663
2617 // Store the value in the cell. 2664 // Store the value in the cell.
2618 __ mov(cell_operand, eax); 2665 __ mov(cell_operand, eax);
2666 Label done;
2667 __ test(eax, Immediate(kSmiTagMask));
2668 __ j(zero, &done);
2669
2670 __ mov(ecx, eax);
2671 __ lea(edx, cell_operand);
2672 // Cells are always in the remembered set.
2673 __ RecordWrite(ebx, // Object.
2674 edx, // Address.
2675 ecx, // Value.
2676 kDontSaveFPRegs,
2677 OMIT_REMEMBERED_SET,
2678 OMIT_SMI_CHECK);
2619 2679
2620 // Return the value (register eax). 2680 // Return the value (register eax).
2681 __ bind(&done);
2682
2621 Counters* counters = isolate()->counters(); 2683 Counters* counters = isolate()->counters();
2622 __ IncrementCounter(counters->named_store_global_inline(), 1); 2684 __ IncrementCounter(counters->named_store_global_inline(), 1);
2623 __ ret(0); 2685 __ ret(0);
2624 2686
2625 // Handle store cache miss. 2687 // Handle store cache miss.
2626 __ bind(&miss); 2688 __ bind(&miss);
2627 __ IncrementCounter(counters->named_store_global_inline_miss(), 1); 2689 __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2628 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss(); 2690 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2629 __ jmp(ic, RelocInfo::CODE_TARGET); 2691 __ jmp(ic, RelocInfo::CODE_TARGET);
2630 2692
(...skipping 11 matching lines...) Expand all
2642 // -- ecx : key 2704 // -- ecx : key
2643 // -- edx : receiver 2705 // -- edx : receiver
2644 // -- esp[0] : return address 2706 // -- esp[0] : return address
2645 // ----------------------------------- 2707 // -----------------------------------
2646 Label miss; 2708 Label miss;
2647 2709
2648 Counters* counters = isolate()->counters(); 2710 Counters* counters = isolate()->counters();
2649 __ IncrementCounter(counters->keyed_store_field(), 1); 2711 __ IncrementCounter(counters->keyed_store_field(), 1);
2650 2712
2651 // Check that the name has not changed. 2713 // Check that the name has not changed.
2652 __ cmp(Operand(ecx), Immediate(Handle<String>(name))); 2714 __ cmp(ecx, Immediate(Handle<String>(name)));
2653 __ j(not_equal, &miss); 2715 __ j(not_equal, &miss);
2654 2716
2655 // Generate store field code. Trashes the name register. 2717 // Generate store field code. Trashes the name register.
2656 GenerateStoreField(masm(), 2718 GenerateStoreField(masm(),
2657 object, 2719 object,
2658 index, 2720 index,
2659 transition, 2721 transition,
2660 edx, ecx, ebx, 2722 edx, ecx, ebx,
2661 &miss); 2723 &miss);
2662 2724
(...skipping 271 matching lines...) Expand 10 before | Expand all | Expand 10 after
2934 // -- eax : key 2996 // -- eax : key
2935 // -- edx : receiver 2997 // -- edx : receiver
2936 // -- esp[0] : return address 2998 // -- esp[0] : return address
2937 // ----------------------------------- 2999 // -----------------------------------
2938 Label miss; 3000 Label miss;
2939 3001
2940 Counters* counters = isolate()->counters(); 3002 Counters* counters = isolate()->counters();
2941 __ IncrementCounter(counters->keyed_load_field(), 1); 3003 __ IncrementCounter(counters->keyed_load_field(), 1);
2942 3004
2943 // Check that the name has not changed. 3005 // Check that the name has not changed.
2944 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3006 __ cmp(eax, Immediate(Handle<String>(name)));
2945 __ j(not_equal, &miss); 3007 __ j(not_equal, &miss);
2946 3008
2947 GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss); 3009 GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
2948 3010
2949 __ bind(&miss); 3011 __ bind(&miss);
2950 __ DecrementCounter(counters->keyed_load_field(), 1); 3012 __ DecrementCounter(counters->keyed_load_field(), 1);
2951 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3013 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2952 3014
2953 // Return the generated code. 3015 // Return the generated code.
2954 return GetCode(FIELD, name); 3016 return GetCode(FIELD, name);
2955 } 3017 }
2956 3018
2957 3019
2958 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( 3020 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2959 String* name, 3021 String* name,
2960 JSObject* receiver, 3022 JSObject* receiver,
2961 JSObject* holder, 3023 JSObject* holder,
2962 AccessorInfo* callback) { 3024 AccessorInfo* callback) {
2963 // ----------- S t a t e ------------- 3025 // ----------- S t a t e -------------
2964 // -- eax : key 3026 // -- eax : key
2965 // -- edx : receiver 3027 // -- edx : receiver
2966 // -- esp[0] : return address 3028 // -- esp[0] : return address
2967 // ----------------------------------- 3029 // -----------------------------------
2968 Label miss; 3030 Label miss;
2969 3031
2970 Counters* counters = isolate()->counters(); 3032 Counters* counters = isolate()->counters();
2971 __ IncrementCounter(counters->keyed_load_callback(), 1); 3033 __ IncrementCounter(counters->keyed_load_callback(), 1);
2972 3034
2973 // Check that the name has not changed. 3035 // Check that the name has not changed.
2974 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3036 __ cmp(eax, Immediate(Handle<String>(name)));
2975 __ j(not_equal, &miss); 3037 __ j(not_equal, &miss);
2976 3038
2977 MaybeObject* result = GenerateLoadCallback(receiver, holder, edx, eax, ebx, 3039 MaybeObject* result = GenerateLoadCallback(receiver, holder, edx, eax, ebx,
2978 ecx, edi, callback, name, &miss); 3040 ecx, edi, callback, name, &miss);
2979 if (result->IsFailure()) { 3041 if (result->IsFailure()) {
2980 miss.Unuse(); 3042 miss.Unuse();
2981 return result; 3043 return result;
2982 } 3044 }
2983 3045
2984 __ bind(&miss); 3046 __ bind(&miss);
(...skipping 14 matching lines...) Expand all
2999 // -- eax : key 3061 // -- eax : key
3000 // -- edx : receiver 3062 // -- edx : receiver
3001 // -- esp[0] : return address 3063 // -- esp[0] : return address
3002 // ----------------------------------- 3064 // -----------------------------------
3003 Label miss; 3065 Label miss;
3004 3066
3005 Counters* counters = isolate()->counters(); 3067 Counters* counters = isolate()->counters();
3006 __ IncrementCounter(counters->keyed_load_constant_function(), 1); 3068 __ IncrementCounter(counters->keyed_load_constant_function(), 1);
3007 3069
3008 // Check that the name has not changed. 3070 // Check that the name has not changed.
3009 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3071 __ cmp(eax, Immediate(Handle<String>(name)));
3010 __ j(not_equal, &miss); 3072 __ j(not_equal, &miss);
3011 3073
3012 GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi, 3074 GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi,
3013 value, name, &miss); 3075 value, name, &miss);
3014 __ bind(&miss); 3076 __ bind(&miss);
3015 __ DecrementCounter(counters->keyed_load_constant_function(), 1); 3077 __ DecrementCounter(counters->keyed_load_constant_function(), 1);
3016 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3078 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3017 3079
3018 // Return the generated code. 3080 // Return the generated code.
3019 return GetCode(CONSTANT_FUNCTION, name); 3081 return GetCode(CONSTANT_FUNCTION, name);
3020 } 3082 }
3021 3083
3022 3084
3023 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 3085 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
3024 JSObject* holder, 3086 JSObject* holder,
3025 String* name) { 3087 String* name) {
3026 // ----------- S t a t e ------------- 3088 // ----------- S t a t e -------------
3027 // -- eax : key 3089 // -- eax : key
3028 // -- edx : receiver 3090 // -- edx : receiver
3029 // -- esp[0] : return address 3091 // -- esp[0] : return address
3030 // ----------------------------------- 3092 // -----------------------------------
3031 Label miss; 3093 Label miss;
3032 3094
3033 Counters* counters = isolate()->counters(); 3095 Counters* counters = isolate()->counters();
3034 __ IncrementCounter(counters->keyed_load_interceptor(), 1); 3096 __ IncrementCounter(counters->keyed_load_interceptor(), 1);
3035 3097
3036 // Check that the name has not changed. 3098 // Check that the name has not changed.
3037 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3099 __ cmp(eax, Immediate(Handle<String>(name)));
3038 __ j(not_equal, &miss); 3100 __ j(not_equal, &miss);
3039 3101
3040 LookupResult lookup; 3102 LookupResult lookup;
3041 LookupPostInterceptor(holder, name, &lookup); 3103 LookupPostInterceptor(holder, name, &lookup);
3042 GenerateLoadInterceptor(receiver, 3104 GenerateLoadInterceptor(receiver,
3043 holder, 3105 holder,
3044 &lookup, 3106 &lookup,
3045 edx, 3107 edx,
3046 eax, 3108 eax,
3047 ecx, 3109 ecx,
(...skipping 15 matching lines...) Expand all
3063 // -- eax : key 3125 // -- eax : key
3064 // -- edx : receiver 3126 // -- edx : receiver
3065 // -- esp[0] : return address 3127 // -- esp[0] : return address
3066 // ----------------------------------- 3128 // -----------------------------------
3067 Label miss; 3129 Label miss;
3068 3130
3069 Counters* counters = isolate()->counters(); 3131 Counters* counters = isolate()->counters();
3070 __ IncrementCounter(counters->keyed_load_array_length(), 1); 3132 __ IncrementCounter(counters->keyed_load_array_length(), 1);
3071 3133
3072 // Check that the name has not changed. 3134 // Check that the name has not changed.
3073 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3135 __ cmp(eax, Immediate(Handle<String>(name)));
3074 __ j(not_equal, &miss); 3136 __ j(not_equal, &miss);
3075 3137
3076 GenerateLoadArrayLength(masm(), edx, ecx, &miss); 3138 GenerateLoadArrayLength(masm(), edx, ecx, &miss);
3077 __ bind(&miss); 3139 __ bind(&miss);
3078 __ DecrementCounter(counters->keyed_load_array_length(), 1); 3140 __ DecrementCounter(counters->keyed_load_array_length(), 1);
3079 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3141 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3080 3142
3081 // Return the generated code. 3143 // Return the generated code.
3082 return GetCode(CALLBACKS, name); 3144 return GetCode(CALLBACKS, name);
3083 } 3145 }
3084 3146
3085 3147
3086 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 3148 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3087 // ----------- S t a t e ------------- 3149 // ----------- S t a t e -------------
3088 // -- eax : key 3150 // -- eax : key
3089 // -- edx : receiver 3151 // -- edx : receiver
3090 // -- esp[0] : return address 3152 // -- esp[0] : return address
3091 // ----------------------------------- 3153 // -----------------------------------
3092 Label miss; 3154 Label miss;
3093 3155
3094 Counters* counters = isolate()->counters(); 3156 Counters* counters = isolate()->counters();
3095 __ IncrementCounter(counters->keyed_load_string_length(), 1); 3157 __ IncrementCounter(counters->keyed_load_string_length(), 1);
3096 3158
3097 // Check that the name has not changed. 3159 // Check that the name has not changed.
3098 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3160 __ cmp(eax, Immediate(Handle<String>(name)));
3099 __ j(not_equal, &miss); 3161 __ j(not_equal, &miss);
3100 3162
3101 GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true); 3163 GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
3102 __ bind(&miss); 3164 __ bind(&miss);
3103 __ DecrementCounter(counters->keyed_load_string_length(), 1); 3165 __ DecrementCounter(counters->keyed_load_string_length(), 1);
3104 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3166 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3105 3167
3106 // Return the generated code. 3168 // Return the generated code.
3107 return GetCode(CALLBACKS, name); 3169 return GetCode(CALLBACKS, name);
3108 } 3170 }
3109 3171
3110 3172
3111 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { 3173 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3112 // ----------- S t a t e ------------- 3174 // ----------- S t a t e -------------
3113 // -- eax : key 3175 // -- eax : key
3114 // -- edx : receiver 3176 // -- edx : receiver
3115 // -- esp[0] : return address 3177 // -- esp[0] : return address
3116 // ----------------------------------- 3178 // -----------------------------------
3117 Label miss; 3179 Label miss;
3118 3180
3119 Counters* counters = isolate()->counters(); 3181 Counters* counters = isolate()->counters();
3120 __ IncrementCounter(counters->keyed_load_function_prototype(), 1); 3182 __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
3121 3183
3122 // Check that the name has not changed. 3184 // Check that the name has not changed.
3123 __ cmp(Operand(eax), Immediate(Handle<String>(name))); 3185 __ cmp(eax, Immediate(Handle<String>(name)));
3124 __ j(not_equal, &miss); 3186 __ j(not_equal, &miss);
3125 3187
3126 GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss); 3188 GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
3127 __ bind(&miss); 3189 __ bind(&miss);
3128 __ DecrementCounter(counters->keyed_load_function_prototype(), 1); 3190 __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
3129 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 3191 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3130 3192
3131 // Return the generated code. 3193 // Return the generated code.
3132 return GetCode(CALLBACKS, name); 3194 return GetCode(CALLBACKS, name);
3133 } 3195 }
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
3291 ASSERT(function->has_initial_map()); 3353 ASSERT(function->has_initial_map());
3292 for (int i = shared->this_property_assignments_count(); 3354 for (int i = shared->this_property_assignments_count();
3293 i < function->initial_map()->inobject_properties(); 3355 i < function->initial_map()->inobject_properties();
3294 i++) { 3356 i++) {
3295 __ mov(Operand(edx, i * kPointerSize), edi); 3357 __ mov(Operand(edx, i * kPointerSize), edi);
3296 } 3358 }
3297 3359
3298 // Move argc to ebx and retrieve and tag the JSObject to return. 3360 // Move argc to ebx and retrieve and tag the JSObject to return.
3299 __ mov(ebx, eax); 3361 __ mov(ebx, eax);
3300 __ pop(eax); 3362 __ pop(eax);
3301 __ or_(Operand(eax), Immediate(kHeapObjectTag)); 3363 __ or_(eax, Immediate(kHeapObjectTag));
3302 3364
3303 // Remove caller arguments and receiver from the stack and return. 3365 // Remove caller arguments and receiver from the stack and return.
3304 __ pop(ecx); 3366 __ pop(ecx);
3305 __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize)); 3367 __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
3306 __ push(ecx); 3368 __ push(ecx);
3307 Counters* counters = isolate()->counters(); 3369 Counters* counters = isolate()->counters();
3308 __ IncrementCounter(counters->constructed_objects(), 1); 3370 __ IncrementCounter(counters->constructed_objects(), 1);
3309 __ IncrementCounter(counters->constructed_objects_stub(), 1); 3371 __ IncrementCounter(counters->constructed_objects_stub(), 1);
3310 __ ret(0); 3372 __ ret(0);
3311 3373
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after
3672 } 3734 }
3673 } else { 3735 } else {
3674 if (CpuFeatures::IsSupported(SSE3)) { 3736 if (CpuFeatures::IsSupported(SSE3)) {
3675 CpuFeatures::Scope scope(SSE3); 3737 CpuFeatures::Scope scope(SSE3);
3676 // fisttp stores values as signed integers. To represent the 3738 // fisttp stores values as signed integers. To represent the
3677 // entire range of int and unsigned int arrays, store as a 3739 // entire range of int and unsigned int arrays, store as a
3678 // 64-bit int and discard the high 32 bits. 3740 // 64-bit int and discard the high 32 bits.
3679 // If the value is NaN or +/-infinity, the result is 0x80000000, 3741 // If the value is NaN or +/-infinity, the result is 0x80000000,
3680 // which is automatically zero when taken mod 2^n, n < 32. 3742 // which is automatically zero when taken mod 2^n, n < 32.
3681 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset)); 3743 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
3682 __ sub(Operand(esp), Immediate(2 * kPointerSize)); 3744 __ sub(esp, Immediate(2 * kPointerSize));
3683 __ fisttp_d(Operand(esp, 0)); 3745 __ fisttp_d(Operand(esp, 0));
3684 __ pop(ebx); 3746 __ pop(ebx);
3685 __ add(Operand(esp), Immediate(kPointerSize)); 3747 __ add(esp, Immediate(kPointerSize));
3686 } else { 3748 } else {
3687 ASSERT(CpuFeatures::IsSupported(SSE2)); 3749 ASSERT(CpuFeatures::IsSupported(SSE2));
3688 CpuFeatures::Scope scope(SSE2); 3750 CpuFeatures::Scope scope(SSE2);
3689 // We can easily implement the correct rounding behavior for the 3751 // We can easily implement the correct rounding behavior for the
3690 // range [0, 2^31-1]. For the time being, to keep this code simple, 3752 // range [0, 2^31-1]. For the time being, to keep this code simple,
3691 // make the slow runtime call for values outside this range. 3753 // make the slow runtime call for values outside this range.
3692 // Note: we could do better for signed int arrays. 3754 // Note: we could do better for signed int arrays.
3693 __ movd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset)); 3755 __ movd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
3694 // We will need the key if we have to make the slow runtime call. 3756 // We will need the key if we have to make the slow runtime call.
3695 __ push(ebx); 3757 __ push(ebx);
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
3831 masm->isolate()->builtins()->KeyedLoadIC_Slow(); 3893 masm->isolate()->builtins()->KeyedLoadIC_Slow();
3832 __ jmp(slow_ic, RelocInfo::CODE_TARGET); 3894 __ jmp(slow_ic, RelocInfo::CODE_TARGET);
3833 3895
3834 __ bind(&miss_force_generic); 3896 __ bind(&miss_force_generic);
3835 Handle<Code> miss_ic = 3897 Handle<Code> miss_ic =
3836 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); 3898 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric();
3837 __ jmp(miss_ic, RelocInfo::CODE_TARGET); 3899 __ jmp(miss_ic, RelocInfo::CODE_TARGET);
3838 } 3900 }
3839 3901
3840 3902
3841 void KeyedStoreStubCompiler::GenerateStoreFastElement(MacroAssembler* masm, 3903 void KeyedStoreStubCompiler::GenerateStoreFastElement(
3842 bool is_js_array) { 3904 MacroAssembler* masm,
3905 bool is_js_array,
3906 ElementsKind elements_kind) {
3843 // ----------- S t a t e ------------- 3907 // ----------- S t a t e -------------
3844 // -- eax : value 3908 // -- eax : value
3845 // -- ecx : key 3909 // -- ecx : key
3846 // -- edx : receiver 3910 // -- edx : receiver
3847 // -- esp[0] : return address 3911 // -- esp[0] : return address
3848 // ----------------------------------- 3912 // -----------------------------------
3849 Label miss_force_generic; 3913 Label miss_force_generic;
3850 3914
3851 // This stub is meant to be tail-jumped to, the receiver must already 3915 // This stub is meant to be tail-jumped to, the receiver must already
3852 // have been verified by the caller to not be a smi. 3916 // have been verified by the caller to not be a smi.
(...skipping 10 matching lines...) Expand all
3863 if (is_js_array) { 3927 if (is_js_array) {
3864 // Check that the key is within bounds. 3928 // Check that the key is within bounds.
3865 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis. 3929 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis.
3866 __ j(above_equal, &miss_force_generic); 3930 __ j(above_equal, &miss_force_generic);
3867 } else { 3931 } else {
3868 // Check that the key is within bounds. 3932 // Check that the key is within bounds.
3869 __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis. 3933 __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis.
3870 __ j(above_equal, &miss_force_generic); 3934 __ j(above_equal, &miss_force_generic);
3871 } 3935 }
3872 3936
3873 // Do the store and update the write barrier. Make sure to preserve 3937 if (elements_kind == FAST_SMI_ONLY_ELEMENTS) {
3874 // the value in register eax. 3938 __ JumpIfNotSmi(eax, &miss_force_generic);
3875 __ mov(edx, Operand(eax)); 3939 // ecx is a smi, use times_half_pointer_size instead of
3876 __ mov(FieldOperand(edi, ecx, times_2, FixedArray::kHeaderSize), eax); 3940 // times_pointer_size
3877 __ RecordWrite(edi, 0, edx, ecx); 3941 __ mov(FieldOperand(edi,
3942 ecx,
3943 times_half_pointer_size,
3944 FixedArray::kHeaderSize), eax);
3945 } else {
3946 ASSERT(elements_kind == FAST_ELEMENTS);
3947 // Do the store and update the write barrier.
3948 // ecx is a smi, use times_half_pointer_size instead of
3949 // times_pointer_size
3950 __ lea(ecx, FieldOperand(edi,
3951 ecx,
3952 times_half_pointer_size,
3953 FixedArray::kHeaderSize));
3954 __ mov(Operand(ecx, 0), eax);
3955 // Make sure to preserve the value in register eax.
3956 __ mov(edx, eax);
3957 __ RecordWrite(edi, ecx, edx, kDontSaveFPRegs);
3958 }
3878 3959
3879 // Done. 3960 // Done.
3880 __ ret(0); 3961 __ ret(0);
3881 3962
3882 // Handle store cache miss, replacing the ic with the generic stub. 3963 // Handle store cache miss, replacing the ic with the generic stub.
3883 __ bind(&miss_force_generic); 3964 __ bind(&miss_force_generic);
3884 Handle<Code> ic_force_generic = 3965 Handle<Code> ic_force_generic =
3885 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); 3966 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3886 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET); 3967 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
3887 } 3968 }
3888 3969
3889 3970
3890 void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement( 3971 void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
3891 MacroAssembler* masm, 3972 MacroAssembler* masm,
3892 bool is_js_array) { 3973 bool is_js_array) {
3893 // ----------- S t a t e ------------- 3974 // ----------- S t a t e -------------
3894 // -- eax : value 3975 // -- eax : value
3895 // -- ecx : key 3976 // -- ecx : key
3896 // -- edx : receiver 3977 // -- edx : receiver
3897 // -- esp[0] : return address 3978 // -- esp[0] : return address
3898 // ----------------------------------- 3979 // -----------------------------------
3899 Label miss_force_generic, smi_value, is_nan, maybe_nan; 3980 Label miss_force_generic;
3900 Label have_double_value, not_nan;
3901 3981
3902 // This stub is meant to be tail-jumped to, the receiver must already 3982 // This stub is meant to be tail-jumped to, the receiver must already
3903 // have been verified by the caller to not be a smi. 3983 // have been verified by the caller to not be a smi.
3904 3984
3905 // Check that the key is a smi. 3985 // Check that the key is a smi.
3906 __ JumpIfNotSmi(ecx, &miss_force_generic); 3986 __ JumpIfNotSmi(ecx, &miss_force_generic);
3907 3987
3908 // Get the elements array. 3988 // Get the elements array.
3909 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset)); 3989 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
3910 __ AssertFastElements(edi); 3990 __ AssertFastElements(edi);
3911 3991
3912 if (is_js_array) { 3992 if (is_js_array) {
3913 // Check that the key is within bounds. 3993 // Check that the key is within bounds.
3914 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis. 3994 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // smis.
3915 } else { 3995 } else {
3916 // Check that the key is within bounds. 3996 // Check that the key is within bounds.
3917 __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis. 3997 __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // smis.
3918 } 3998 }
3919 __ j(above_equal, &miss_force_generic); 3999 __ j(above_equal, &miss_force_generic);
3920 4000
3921 __ JumpIfSmi(eax, &smi_value, Label::kNear); 4001 __ StoreNumberToDoubleElements(eax,
3922 4002 edi,
3923 __ CheckMap(eax, 4003 ecx,
3924 masm->isolate()->factory()->heap_number_map(), 4004 edx,
3925 &miss_force_generic, 4005 xmm0,
3926 DONT_DO_SMI_CHECK); 4006 &miss_force_generic,
3927 4007 true);
3928 // Double value, canonicalize NaN.
3929 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
3930 __ cmp(FieldOperand(eax, offset), Immediate(kNaNOrInfinityLowerBoundUpper32));
3931 __ j(greater_equal, &maybe_nan, Label::kNear);
3932
3933 __ bind(&not_nan);
3934 ExternalReference canonical_nan_reference =
3935 ExternalReference::address_of_canonical_non_hole_nan();
3936 if (CpuFeatures::IsSupported(SSE2)) {
3937 CpuFeatures::Scope use_sse2(SSE2);
3938 __ movdbl(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
3939 __ bind(&have_double_value);
3940 __ movdbl(FieldOperand(edi, ecx, times_4, FixedDoubleArray::kHeaderSize),
3941 xmm0);
3942 __ ret(0);
3943 } else {
3944 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
3945 __ bind(&have_double_value);
3946 __ fstp_d(FieldOperand(edi, ecx, times_4, FixedDoubleArray::kHeaderSize));
3947 __ ret(0);
3948 }
3949
3950 __ bind(&maybe_nan);
3951 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
3952 // it's an Infinity, and the non-NaN code path applies.
3953 __ j(greater, &is_nan, Label::kNear);
3954 __ cmp(FieldOperand(eax, HeapNumber::kValueOffset), Immediate(0));
3955 __ j(zero, &not_nan);
3956 __ bind(&is_nan);
3957 if (CpuFeatures::IsSupported(SSE2)) {
3958 CpuFeatures::Scope use_sse2(SSE2);
3959 __ movdbl(xmm0, Operand::StaticVariable(canonical_nan_reference));
3960 } else {
3961 __ fld_d(Operand::StaticVariable(canonical_nan_reference));
3962 }
3963 __ jmp(&have_double_value, Label::kNear);
3964
3965 __ bind(&smi_value);
3966 // Value is a smi. convert to a double and store.
3967 // Preserve original value.
3968 __ mov(edx, eax);
3969 __ SmiUntag(edx);
3970 __ push(edx);
3971 __ fild_s(Operand(esp, 0));
3972 __ pop(edx);
3973 __ fstp_d(FieldOperand(edi, ecx, times_4, FixedDoubleArray::kHeaderSize));
3974 __ ret(0); 4008 __ ret(0);
3975 4009
3976 // Handle store cache miss, replacing the ic with the generic stub. 4010 // Handle store cache miss, replacing the ic with the generic stub.
3977 __ bind(&miss_force_generic); 4011 __ bind(&miss_force_generic);
3978 Handle<Code> ic_force_generic = 4012 Handle<Code> ic_force_generic =
3979 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); 4013 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric();
3980 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET); 4014 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET);
3981 } 4015 }
3982 4016
3983 4017
3984 #undef __ 4018 #undef __
3985 4019
3986 } } // namespace v8::internal 4020 } } // namespace v8::internal
3987 4021
3988 #endif // V8_TARGET_ARCH_IA32 4022 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/regexp-macro-assembler-ia32.cc ('k') | src/ic.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698