Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(174)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 6085006: Reorder the functions in stub-cache-x64.cc, so they are in the same order as ... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28
29 #include "v8.h" 28 #include "v8.h"
30 29
31 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
32 31
33 #include "ic-inl.h" 32 #include "ic-inl.h"
34 #include "code-stubs.h"
35 #include "codegen-inl.h" 33 #include "codegen-inl.h"
36 #include "stub-cache.h" 34 #include "stub-cache.h"
37 #include "macro-assembler.h"
38 35
39 namespace v8 { 36 namespace v8 {
40 namespace internal { 37 namespace internal {
41 38
42 //-----------------------------------------------------------------------------
43 // StubCompiler static helper functions
44
45 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
46 40
47 41
48 static void ProbeTable(MacroAssembler* masm, 42 static void ProbeTable(MacroAssembler* masm,
49 Code::Flags flags, 43 Code::Flags flags,
50 StubCache::Table table, 44 StubCache::Table table,
51 Register name, 45 Register name,
52 Register offset) { 46 Register offset) {
53 ASSERT_EQ(8, kPointerSize); 47 ASSERT_EQ(8, kPointerSize);
54 ASSERT_EQ(16, sizeof(StubCache::Entry)); 48 ASSERT_EQ(16, sizeof(StubCache::Entry));
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
175 // Give up probing if still not found the undefined value. 169 // Give up probing if still not found the undefined value.
176 __ j(not_equal, miss_label); 170 __ j(not_equal, miss_label);
177 } 171 }
178 } 172 }
179 173
180 __ bind(&done); 174 __ bind(&done);
181 __ DecrementCounter(&Counters::negative_lookups_miss, 1); 175 __ DecrementCounter(&Counters::negative_lookups_miss, 1);
182 } 176 }
183 177
184 178
185 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
186 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
187 Code* code = NULL;
188 if (kind == Code::LOAD_IC) {
189 code = Builtins::builtin(Builtins::LoadIC_Miss);
190 } else {
191 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
192 }
193
194 Handle<Code> ic(code);
195 __ Jump(ic, RelocInfo::CODE_TARGET);
196 }
197
198
199 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
200 int index,
201 Register prototype) {
202 // Load the global or builtins object from the current context.
203 __ movq(prototype,
204 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
205 // Load the global context from the global or builtins object.
206 __ movq(prototype,
207 FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
208 // Load the function from the global context.
209 __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
210 // Load the initial map. The global functions all have initial maps.
211 __ movq(prototype,
212 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
213 // Load the prototype from the initial map.
214 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
215 }
216
217
218 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
219 MacroAssembler* masm, int index, Register prototype, Label* miss) {
220 // Check we're still in the same context.
221 __ Move(prototype, Top::global());
222 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
223 prototype);
224 __ j(not_equal, miss);
225 // Get the global function with the given index.
226 JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
227 // Load its initial map. The global functions all have initial maps.
228 __ Move(prototype, Handle<Map>(function->initial_map()));
229 // Load the prototype from the initial map.
230 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
231 }
232
233
234 // Load a fast property out of a holder object (src). In-object properties
235 // are loaded directly otherwise the property is loaded from the properties
236 // fixed array.
237 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
238 Register dst, Register src,
239 JSObject* holder, int index) {
240 // Adjust for the number of properties stored in the holder.
241 index -= holder->map()->inobject_properties();
242 if (index < 0) {
243 // Get the property straight out of the holder.
244 int offset = holder->map()->instance_size() + (index * kPointerSize);
245 __ movq(dst, FieldOperand(src, offset));
246 } else {
247 // Calculate the offset into the properties array.
248 int offset = index * kPointerSize + FixedArray::kHeaderSize;
249 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
250 __ movq(dst, FieldOperand(dst, offset));
251 }
252 }
253
254
255 static void PushInterceptorArguments(MacroAssembler* masm,
256 Register receiver,
257 Register holder,
258 Register name,
259 JSObject* holder_obj) {
260 __ push(name);
261 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
262 ASSERT(!Heap::InNewSpace(interceptor));
263 __ Move(kScratchRegister, Handle<Object>(interceptor));
264 __ push(kScratchRegister);
265 __ push(receiver);
266 __ push(holder);
267 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
268 }
269
270
271 void StubCache::GenerateProbe(MacroAssembler* masm, 179 void StubCache::GenerateProbe(MacroAssembler* masm,
272 Code::Flags flags, 180 Code::Flags flags,
273 Register receiver, 181 Register receiver,
274 Register name, 182 Register name,
275 Register scratch, 183 Register scratch,
276 Register extra, 184 Register extra,
277 Register extra2) { 185 Register extra2) {
278 Label miss; 186 Label miss;
279 USE(extra); // The register extra is not used on the X64 platform. 187 USE(extra); // The register extra is not used on the X64 platform.
280 USE(extra2); // The register extra2 is not used on the X64 platform. 188 USE(extra2); // The register extra2 is not used on the X64 platform.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
317 225
318 // Probe the secondary table. 226 // Probe the secondary table.
319 ProbeTable(masm, flags, kSecondary, name, scratch); 227 ProbeTable(masm, flags, kSecondary, name, scratch);
320 228
321 // Cache miss: Fall-through and let caller handle the miss by 229 // Cache miss: Fall-through and let caller handle the miss by
322 // entering the runtime system. 230 // entering the runtime system.
323 __ bind(&miss); 231 __ bind(&miss);
324 } 232 }
325 233
326 234
327 // Both name_reg and receiver_reg are preserved on jumps to miss_label, 235 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
328 // but may be destroyed if store is successful. 236 int index,
329 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 237 Register prototype) {
330 JSObject* object, 238 // Load the global or builtins object from the current context.
331 int index, 239 __ movq(prototype,
332 Map* transition, 240 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
333 Register receiver_reg, 241 // Load the global context from the global or builtins object.
334 Register name_reg, 242 __ movq(prototype,
335 Register scratch, 243 FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
336 Label* miss_label) { 244 // Load the function from the global context.
337 // Check that the object isn't a smi. 245 __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
338 __ JumpIfSmi(receiver_reg, miss_label); 246 // Load the initial map. The global functions all have initial maps.
339 247 __ movq(prototype,
340 // Check that the map of the object hasn't changed. 248 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
341 __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset), 249 // Load the prototype from the initial map.
342 Handle<Map>(object->map())); 250 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
343 __ j(not_equal, miss_label);
344
345 // Perform global security token check if needed.
346 if (object->IsJSGlobalProxy()) {
347 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
348 }
349
350 // Stub never generated for non-global objects that require access
351 // checks.
352 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
353
354 // Perform map transition for the receiver if necessary.
355 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
356 // The properties must be extended before we can store the value.
357 // We jump to a runtime call that extends the properties array.
358 __ pop(scratch); // Return address.
359 __ push(receiver_reg);
360 __ Push(Handle<Map>(transition));
361 __ push(rax);
362 __ push(scratch);
363 __ TailCallExternalReference(
364 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
365 return;
366 }
367
368 if (transition != NULL) {
369 // Update the map of the object; no write barrier updating is
370 // needed because the map is never in new space.
371 __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
372 Handle<Map>(transition));
373 }
374
375 // Adjust for the number of properties stored in the object. Even in the
376 // face of a transition we can use the old map here because the size of the
377 // object and the number of in-object properties is not going to change.
378 index -= object->map()->inobject_properties();
379
380 if (index < 0) {
381 // Set the property straight into the object.
382 int offset = object->map()->instance_size() + (index * kPointerSize);
383 __ movq(FieldOperand(receiver_reg, offset), rax);
384
385 // Update the write barrier for the array address.
386 // Pass the value being stored in the now unused name_reg.
387 __ movq(name_reg, rax);
388 __ RecordWrite(receiver_reg, offset, name_reg, scratch);
389 } else {
390 // Write to the properties array.
391 int offset = index * kPointerSize + FixedArray::kHeaderSize;
392 // Get the properties array (optimistically).
393 __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
394 __ movq(FieldOperand(scratch, offset), rax);
395
396 // Update the write barrier for the array address.
397 // Pass the value being stored in the now unused name_reg.
398 __ movq(name_reg, rax);
399 __ RecordWrite(scratch, offset, name_reg, receiver_reg);
400 }
401
402 // Return the value (register rax).
403 __ ret(0);
404 } 251 }
405 252
406 253
254 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
255 MacroAssembler* masm, int index, Register prototype, Label* miss) {
256 // Check we're still in the same context.
257 __ Move(prototype, Top::global());
258 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
259 prototype);
260 __ j(not_equal, miss);
261 // Get the global function with the given index.
262 JSFunction* function = JSFunction::cast(Top::global_context()->get(index));
263 // Load its initial map. The global functions all have initial maps.
264 __ Move(prototype, Handle<Map>(function->initial_map()));
265 // Load the prototype from the initial map.
266 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
267 }
268
269
407 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 270 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
408 Register receiver, 271 Register receiver,
409 Register scratch, 272 Register scratch,
410 Label* miss_label) { 273 Label* miss_label) {
411 // Check that the receiver isn't a smi. 274 // Check that the receiver isn't a smi.
412 __ JumpIfSmi(receiver, miss_label); 275 __ JumpIfSmi(receiver, miss_label);
413 276
414 // Check that the object is a JS array. 277 // Check that the object is a JS array.
415 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); 278 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
416 __ j(not_equal, miss_label); 279 __ j(not_equal, miss_label);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
462 325
463 // Check if the wrapped value is a string and load the length 326 // Check if the wrapped value is a string and load the length
464 // directly if it is. 327 // directly if it is.
465 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); 328 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
466 GenerateStringCheck(masm, scratch2, scratch1, miss, miss); 329 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
467 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset)); 330 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
468 __ ret(0); 331 __ ret(0);
469 } 332 }
470 333
471 334
335 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
336 Register receiver,
337 Register result,
338 Register scratch,
339 Label* miss_label) {
340 __ TryGetFunctionPrototype(receiver, result, miss_label);
341 if (!result.is(rax)) __ movq(rax, result);
342 __ ret(0);
343 }
344
345
346 // Load a fast property out of a holder object (src). In-object properties
347 // are loaded directly otherwise the property is loaded from the properties
348 // fixed array.
349 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
350 Register dst, Register src,
351 JSObject* holder, int index) {
352 // Adjust for the number of properties stored in the holder.
353 index -= holder->map()->inobject_properties();
354 if (index < 0) {
355 // Get the property straight out of the holder.
356 int offset = holder->map()->instance_size() + (index * kPointerSize);
357 __ movq(dst, FieldOperand(src, offset));
358 } else {
359 // Calculate the offset into the properties array.
360 int offset = index * kPointerSize + FixedArray::kHeaderSize;
361 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
362 __ movq(dst, FieldOperand(dst, offset));
363 }
364 }
365
366
367 static void PushInterceptorArguments(MacroAssembler* masm,
368 Register receiver,
369 Register holder,
370 Register name,
371 JSObject* holder_obj) {
372 __ push(name);
373 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
374 ASSERT(!Heap::InNewSpace(interceptor));
375 __ Move(kScratchRegister, Handle<Object>(interceptor));
376 __ push(kScratchRegister);
377 __ push(receiver);
378 __ push(holder);
379 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
380 }
381
382
472 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, 383 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
473 Register receiver, 384 Register receiver,
474 Register holder, 385 Register holder,
475 Register name, 386 Register name,
476 JSObject* holder_obj) { 387 JSObject* holder_obj) {
477 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 388 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
478 389
479 ExternalReference ref = 390 ExternalReference ref =
480 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)); 391 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
481 __ movq(rax, Immediate(5)); 392 __ movq(rax, Immediate(5));
482 __ movq(rbx, ref); 393 __ movq(rbx, ref);
483 394
484 CEntryStub stub(1); 395 CEntryStub stub(1);
485 __ CallStub(&stub); 396 __ CallStub(&stub);
486 } 397 }
487 398
488 399
489
490 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
491 Register receiver,
492 Register result,
493 Register scratch,
494 Label* miss_label) {
495 __ TryGetFunctionPrototype(receiver, result, miss_label);
496 if (!result.is(rax)) __ movq(rax, result);
497 __ ret(0);
498 }
499
500 // Number of pointers to be reserved on stack for fast API call. 400 // Number of pointers to be reserved on stack for fast API call.
501 static const int kFastApiCallArguments = 3; 401 static const int kFastApiCallArguments = 3;
502 402
403
503 // Reserves space for the extra arguments to API function in the 404 // Reserves space for the extra arguments to API function in the
504 // caller's frame. 405 // caller's frame.
505 // 406 //
506 // These arguments are set by CheckPrototypes and GenerateFastApiCall. 407 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
507 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 408 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
508 // ----------- S t a t e ------------- 409 // ----------- S t a t e -------------
509 // -- rsp[0] : return address 410 // -- rsp[0] : return address
510 // -- rsp[8] : last argument in the internal frame of the caller 411 // -- rsp[8] : last argument in the internal frame of the caller
511 // ----------------------------------- 412 // -----------------------------------
512 __ movq(scratch, Operand(rsp, 0)); 413 __ movq(scratch, Operand(rsp, 0));
(...skipping 313 matching lines...) Expand 10 before | Expand all | Expand 10 after
826 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex); 727 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
827 __ j(not_equal, interceptor_succeeded); 728 __ j(not_equal, interceptor_succeeded);
828 } 729 }
829 730
830 StubCompiler* stub_compiler_; 731 StubCompiler* stub_compiler_;
831 const ParameterCount& arguments_; 732 const ParameterCount& arguments_;
832 Register name_; 733 Register name_;
833 }; 734 };
834 735
835 736
737 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
738 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
739 Code* code = NULL;
740 if (kind == Code::LOAD_IC) {
741 code = Builtins::builtin(Builtins::LoadIC_Miss);
742 } else {
743 code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
744 }
745
746 Handle<Code> ic(code);
747 __ Jump(ic, RelocInfo::CODE_TARGET);
748 }
749
750
751 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
752 // but may be destroyed if store is successful.
753 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
754 JSObject* object,
755 int index,
756 Map* transition,
757 Register receiver_reg,
758 Register name_reg,
759 Register scratch,
760 Label* miss_label) {
761 // Check that the object isn't a smi.
762 __ JumpIfSmi(receiver_reg, miss_label);
763
764 // Check that the map of the object hasn't changed.
765 __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
766 Handle<Map>(object->map()));
767 __ j(not_equal, miss_label);
768
769 // Perform global security token check if needed.
770 if (object->IsJSGlobalProxy()) {
771 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
772 }
773
774 // Stub never generated for non-global objects that require access
775 // checks.
776 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
777
778 // Perform map transition for the receiver if necessary.
779 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
780 // The properties must be extended before we can store the value.
781 // We jump to a runtime call that extends the properties array.
782 __ pop(scratch); // Return address.
783 __ push(receiver_reg);
784 __ Push(Handle<Map>(transition));
785 __ push(rax);
786 __ push(scratch);
787 __ TailCallExternalReference(
788 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
789 return;
790 }
791
792 if (transition != NULL) {
793 // Update the map of the object; no write barrier updating is
794 // needed because the map is never in new space.
795 __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
796 Handle<Map>(transition));
797 }
798
799 // Adjust for the number of properties stored in the object. Even in the
800 // face of a transition we can use the old map here because the size of the
801 // object and the number of in-object properties is not going to change.
802 index -= object->map()->inobject_properties();
803
804 if (index < 0) {
805 // Set the property straight into the object.
806 int offset = object->map()->instance_size() + (index * kPointerSize);
807 __ movq(FieldOperand(receiver_reg, offset), rax);
808
809 // Update the write barrier for the array address.
810 // Pass the value being stored in the now unused name_reg.
811 __ movq(name_reg, rax);
812 __ RecordWrite(receiver_reg, offset, name_reg, scratch);
813 } else {
814 // Write to the properties array.
815 int offset = index * kPointerSize + FixedArray::kHeaderSize;
816 // Get the properties array (optimistically).
817 __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
818 __ movq(FieldOperand(scratch, offset), rax);
819
820 // Update the write barrier for the array address.
821 // Pass the value being stored in the now unused name_reg.
822 __ movq(name_reg, rax);
823 __ RecordWrite(scratch, offset, name_reg, receiver_reg);
824 }
825
826 // Return the value (register rax).
827 __ ret(0);
828 }
829
830
836 // Generate code to check that a global property cell is empty. Create 831 // Generate code to check that a global property cell is empty. Create
837 // the property cell at compilation time if no cell exists for the 832 // the property cell at compilation time if no cell exists for the
838 // property. 833 // property.
839 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell( 834 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
840 MacroAssembler* masm, 835 MacroAssembler* masm,
841 GlobalObject* global, 836 GlobalObject* global,
842 String* name, 837 String* name,
843 Register scratch, 838 Register scratch,
844 Label* miss) { 839 Label* miss) {
845 Object* probe; 840 Object* probe;
846 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name); 841 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
847 if (!maybe_probe->ToObject(&probe)) return maybe_probe; 842 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
848 } 843 }
849 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); 844 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
850 ASSERT(cell->value()->IsTheHole()); 845 ASSERT(cell->value()->IsTheHole());
851 __ Move(scratch, Handle<Object>(cell)); 846 __ Move(scratch, Handle<Object>(cell));
852 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), 847 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
853 Factory::the_hole_value()); 848 Factory::the_hole_value());
854 __ j(not_equal, miss); 849 __ j(not_equal, miss);
855 return cell; 850 return cell;
856 } 851 }
857 852
858 853
859 #undef __ 854 #undef __
860
861 #define __ ACCESS_MASM((masm())) 855 #define __ ACCESS_MASM((masm()))
862 856
863 857
858 Register StubCompiler::CheckPrototypes(JSObject* object,
859 Register object_reg,
860 JSObject* holder,
861 Register holder_reg,
862 Register scratch1,
863 Register scratch2,
864 String* name,
865 int save_at_depth,
866 Label* miss) {
867 // Make sure there's no overlap between holder and object registers.
868 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
869 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
870 && !scratch2.is(scratch1));
871
872 // Keep track of the current object in register reg. On the first
873 // iteration, reg is an alias for object_reg, on later iterations,
874 // it is an alias for holder_reg.
875 Register reg = object_reg;
876 int depth = 0;
877
878 if (save_at_depth == depth) {
879 __ movq(Operand(rsp, kPointerSize), object_reg);
880 }
881
882 // Check the maps in the prototype chain.
883 // Traverse the prototype chain from the object and do map checks.
884 JSObject* current = object;
885 while (current != holder) {
886 depth++;
887
888 // Only global objects and objects that do not require access
889 // checks are allowed in stubs.
890 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
891
892 JSObject* prototype = JSObject::cast(current->GetPrototype());
893 if (!current->HasFastProperties() &&
894 !current->IsJSGlobalObject() &&
895 !current->IsJSGlobalProxy()) {
896 if (!name->IsSymbol()) {
897 MaybeObject* lookup_result = Heap::LookupSymbol(name);
898 if (lookup_result->IsFailure()) {
899 set_failure(Failure::cast(lookup_result));
900 return reg;
901 } else {
902 name = String::cast(lookup_result->ToObjectUnchecked());
903 }
904 }
905 ASSERT(current->property_dictionary()->FindEntry(name) ==
906 StringDictionary::kNotFound);
907
908 GenerateDictionaryNegativeLookup(masm(),
909 miss,
910 reg,
911 name,
912 scratch1,
913 scratch2);
914 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
915 reg = holder_reg; // from now the object is in holder_reg
916 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
917 } else if (Heap::InNewSpace(prototype)) {
918 // Get the map of the current object.
919 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
920 __ Cmp(scratch1, Handle<Map>(current->map()));
921 // Branch on the result of the map check.
922 __ j(not_equal, miss);
923 // Check access rights to the global object. This has to happen
924 // after the map check so that we know that the object is
925 // actually a global object.
926 if (current->IsJSGlobalProxy()) {
927 __ CheckAccessGlobalProxy(reg, scratch1, miss);
928
929 // Restore scratch register to be the map of the object.
930 // We load the prototype from the map in the scratch register.
931 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
932 }
933 // The prototype is in new space; we cannot store a reference
934 // to it in the code. Load it from the map.
935 reg = holder_reg; // from now the object is in holder_reg
936 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
937
938 } else {
939 // Check the map of the current object.
940 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
941 Handle<Map>(current->map()));
942 // Branch on the result of the map check.
943 __ j(not_equal, miss);
944 // Check access rights to the global object. This has to happen
945 // after the map check so that we know that the object is
946 // actually a global object.
947 if (current->IsJSGlobalProxy()) {
948 __ CheckAccessGlobalProxy(reg, scratch1, miss);
949 }
950 // The prototype is in old space; load it directly.
951 reg = holder_reg; // from now the object is in holder_reg
952 __ Move(reg, Handle<JSObject>(prototype));
953 }
954
955 if (save_at_depth == depth) {
956 __ movq(Operand(rsp, kPointerSize), reg);
957 }
958
959 // Go to the next object in the prototype chain.
960 current = prototype;
961 }
962
963 // Check the holder map.
964 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
965 __ j(not_equal, miss);
966
967 // Log the check depth.
968 LOG(IntEvent("check-maps-depth", depth + 1));
969
970 // Perform security check for access to the global object and return
971 // the holder register.
972 ASSERT(current == holder);
973 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
974 if (current->IsJSGlobalProxy()) {
975 __ CheckAccessGlobalProxy(reg, scratch1, miss);
976 }
977
978 // If we've skipped any global objects, it's not enough to verify
979 // that their maps haven't changed. We also need to check that the
980 // property cell for the property is still empty.
981 current = object;
982 while (current != holder) {
983 if (current->IsGlobalObject()) {
984 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
985 GlobalObject::cast(current),
986 name,
987 scratch1,
988 miss);
989 if (cell->IsFailure()) {
990 set_failure(Failure::cast(cell));
991 return reg;
992 }
993 }
994 current = JSObject::cast(current->GetPrototype());
995 }
996
997 // Return the register containing the holder.
998 return reg;
999 }
1000
1001
1002 void StubCompiler::GenerateLoadField(JSObject* object,
1003 JSObject* holder,
1004 Register receiver,
1005 Register scratch1,
1006 Register scratch2,
1007 Register scratch3,
1008 int index,
1009 String* name,
1010 Label* miss) {
1011 // Check that the receiver isn't a smi.
1012 __ JumpIfSmi(receiver, miss);
1013
1014 // Check the prototype chain.
1015 Register reg =
1016 CheckPrototypes(object, receiver, holder,
1017 scratch1, scratch2, scratch3, name, miss);
1018
1019 // Get the value from the properties.
1020 GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1021 __ ret(0);
1022 }
1023
1024
1025 bool StubCompiler::GenerateLoadCallback(JSObject* object,
1026 JSObject* holder,
1027 Register receiver,
1028 Register name_reg,
1029 Register scratch1,
1030 Register scratch2,
1031 Register scratch3,
1032 AccessorInfo* callback,
1033 String* name,
1034 Label* miss,
1035 Failure** failure) {
1036 // Check that the receiver isn't a smi.
1037 __ JumpIfSmi(receiver, miss);
1038
1039 // Check that the maps haven't changed.
1040 Register reg =
1041 CheckPrototypes(object, receiver, holder, scratch1,
1042 scratch2, scratch3, name, miss);
1043
1044 Handle<AccessorInfo> callback_handle(callback);
1045
1046 // Insert additional parameters into the stack frame above return address.
1047 ASSERT(!scratch2.is(reg));
1048 __ pop(scratch2); // Get return address to place it below.
1049
1050 __ push(receiver); // receiver
1051 __ push(reg); // holder
1052 if (Heap::InNewSpace(callback_handle->data())) {
1053 __ Move(scratch1, callback_handle);
1054 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1055 } else {
1056 __ Push(Handle<Object>(callback_handle->data()));
1057 }
1058 __ push(name_reg); // name
1059 // Save a pointer to where we pushed the arguments pointer.
1060 // This will be passed as the const AccessorInfo& to the C++ callback.
1061
1062 #ifdef _WIN64
1063 // Win64 uses first register--rcx--for returned value.
1064 Register accessor_info_arg = r8;
1065 Register name_arg = rdx;
1066 #else
1067 Register accessor_info_arg = rsi;
1068 Register name_arg = rdi;
1069 #endif
1070
1071 ASSERT(!name_arg.is(scratch2));
1072 __ movq(name_arg, rsp);
1073 __ push(scratch2); // Restore return address.
1074
1075 // Do call through the api.
1076 Address getter_address = v8::ToCData<Address>(callback->getter());
1077 ApiFunction fun(getter_address);
1078
1079 // 3 elements array for v8::Agruments::values_ and handler for name.
1080 const int kStackSpace = 4;
1081
1082 // Allocate v8::AccessorInfo in non-GCed stack space.
1083 const int kArgStackSpace = 1;
1084
1085 __ PrepareCallApiFunction(kArgStackSpace);
1086 __ lea(rax, Operand(name_arg, 3 * kPointerSize));
1087
1088 // v8::AccessorInfo::args_.
1089 __ movq(StackSpaceOperand(0), rax);
1090
1091 // The context register (rsi) has been saved in PrepareCallApiFunction and
1092 // could be used to pass arguments.
1093 __ lea(accessor_info_arg, StackSpaceOperand(0));
1094
1095 // Emitting a stub call may try to allocate (if the code is not
1096 // already generated). Do not allow the assembler to perform a
1097 // garbage collection but instead return the allocation failure
1098 // object.
1099 MaybeObject* result = masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1100 if (result->IsFailure()) {
1101 *failure = Failure::cast(result);
1102 return false;
1103 }
1104 return true;
1105 }
1106
1107
1108 void StubCompiler::GenerateLoadConstant(JSObject* object,
1109 JSObject* holder,
1110 Register receiver,
1111 Register scratch1,
1112 Register scratch2,
1113 Register scratch3,
1114 Object* value,
1115 String* name,
1116 Label* miss) {
1117 // Check that the receiver isn't a smi.
1118 __ JumpIfSmi(receiver, miss);
1119
1120 // Check that the maps haven't changed.
1121 Register reg =
1122 CheckPrototypes(object, receiver, holder,
1123 scratch1, scratch2, scratch3, name, miss);
1124
1125 // Return the constant value.
1126 __ Move(rax, Handle<Object>(value));
1127 __ ret(0);
1128 }
1129
1130
1131 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1132 JSObject* interceptor_holder,
1133 LookupResult* lookup,
1134 Register receiver,
1135 Register name_reg,
1136 Register scratch1,
1137 Register scratch2,
1138 Register scratch3,
1139 String* name,
1140 Label* miss) {
1141 ASSERT(interceptor_holder->HasNamedInterceptor());
1142 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1143
1144 // Check that the receiver isn't a smi.
1145 __ JumpIfSmi(receiver, miss);
1146
1147 // So far the most popular follow ups for interceptor loads are FIELD
1148 // and CALLBACKS, so inline only them, other cases may be added
1149 // later.
1150 bool compile_followup_inline = false;
1151 if (lookup->IsProperty() && lookup->IsCacheable()) {
1152 if (lookup->type() == FIELD) {
1153 compile_followup_inline = true;
1154 } else if (lookup->type() == CALLBACKS &&
1155 lookup->GetCallbackObject()->IsAccessorInfo() &&
1156 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1157 compile_followup_inline = true;
1158 }
1159 }
1160
1161 if (compile_followup_inline) {
1162 // Compile the interceptor call, followed by inline code to load the
1163 // property from further up the prototype chain if the call fails.
1164 // Check that the maps haven't changed.
1165 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1166 scratch1, scratch2, scratch3,
1167 name, miss);
1168 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1169
1170 // Save necessary data before invoking an interceptor.
1171 // Requires a frame to make GC aware of pushed pointers.
1172 __ EnterInternalFrame();
1173
1174 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1175 // CALLBACKS case needs a receiver to be passed into C++ callback.
1176 __ push(receiver);
1177 }
1178 __ push(holder_reg);
1179 __ push(name_reg);
1180
1181 // Invoke an interceptor. Note: map checks from receiver to
1182 // interceptor's holder has been compiled before (see a caller
1183 // of this method.)
1184 CompileCallLoadPropertyWithInterceptor(masm(),
1185 receiver,
1186 holder_reg,
1187 name_reg,
1188 interceptor_holder);
1189
1190 // Check if interceptor provided a value for property. If it's
1191 // the case, return immediately.
1192 Label interceptor_failed;
1193 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1194 __ j(equal, &interceptor_failed);
1195 __ LeaveInternalFrame();
1196 __ ret(0);
1197
1198 __ bind(&interceptor_failed);
1199 __ pop(name_reg);
1200 __ pop(holder_reg);
1201 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1202 __ pop(receiver);
1203 }
1204
1205 __ LeaveInternalFrame();
1206
1207 // Check that the maps from interceptor's holder to lookup's holder
1208 // haven't changed. And load lookup's holder into |holder| register.
1209 if (interceptor_holder != lookup->holder()) {
1210 holder_reg = CheckPrototypes(interceptor_holder,
1211 holder_reg,
1212 lookup->holder(),
1213 scratch1,
1214 scratch2,
1215 scratch3,
1216 name,
1217 miss);
1218 }
1219
1220 if (lookup->type() == FIELD) {
1221 // We found FIELD property in prototype chain of interceptor's holder.
1222 // Retrieve a field from field's holder.
1223 GenerateFastPropertyLoad(masm(), rax, holder_reg,
1224 lookup->holder(), lookup->GetFieldIndex());
1225 __ ret(0);
1226 } else {
1227 // We found CALLBACKS property in prototype chain of interceptor's
1228 // holder.
1229 ASSERT(lookup->type() == CALLBACKS);
1230 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1231 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1232 ASSERT(callback != NULL);
1233 ASSERT(callback->getter() != NULL);
1234
1235 // Tail call to runtime.
1236 // Important invariant in CALLBACKS case: the code above must be
1237 // structured to never clobber |receiver| register.
1238 __ pop(scratch2); // return address
1239 __ push(receiver);
1240 __ push(holder_reg);
1241 __ Move(holder_reg, Handle<AccessorInfo>(callback));
1242 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1243 __ push(holder_reg);
1244 __ push(name_reg);
1245 __ push(scratch2); // restore return address
1246
1247 ExternalReference ref =
1248 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1249 __ TailCallExternalReference(ref, 5, 1);
1250 }
1251 } else { // !compile_followup_inline
1252 // Call the runtime system to load the interceptor.
1253 // Check that the maps haven't changed.
1254 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1255 scratch1, scratch2, scratch3,
1256 name, miss);
1257 __ pop(scratch2); // save old return address
1258 PushInterceptorArguments(masm(), receiver, holder_reg,
1259 name_reg, interceptor_holder);
1260 __ push(scratch2); // restore old return address
1261
1262 ExternalReference ref = ExternalReference(
1263 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1264 __ TailCallExternalReference(ref, 5, 1);
1265 }
1266 }
1267
1268
864 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { 1269 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
865 if (kind_ == Code::KEYED_CALL_IC) { 1270 if (kind_ == Code::KEYED_CALL_IC) {
866 __ Cmp(rcx, Handle<String>(name)); 1271 __ Cmp(rcx, Handle<String>(name));
867 __ j(not_equal, miss); 1272 __ j(not_equal, miss);
868 } 1273 }
869 } 1274 }
870 1275
871 1276
872 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object, 1277 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
873 JSObject* holder, 1278 JSObject* holder,
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
925 MaybeObject* CallStubCompiler::GenerateMissBranch() { 1330 MaybeObject* CallStubCompiler::GenerateMissBranch() {
926 MaybeObject* maybe_obj = 1331 MaybeObject* maybe_obj =
927 StubCache::ComputeCallMiss(arguments().immediate(), kind_); 1332 StubCache::ComputeCallMiss(arguments().immediate(), kind_);
928 Object* obj; 1333 Object* obj;
929 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1334 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
930 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); 1335 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
931 return obj; 1336 return obj;
932 } 1337 }
933 1338
934 1339
935 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
936 JSObject* holder,
937 JSFunction* function,
938 String* name,
939 CheckType check) {
940 // ----------- S t a t e -------------
941 // rcx : function name
942 // rsp[0] : return address
943 // rsp[8] : argument argc
944 // rsp[16] : argument argc - 1
945 // ...
946 // rsp[argc * 8] : argument 1
947 // rsp[(argc + 1) * 8] : argument 0 = receiver
948 // -----------------------------------
949
950 SharedFunctionInfo* function_info = function->shared();
951 if (function_info->HasBuiltinFunctionId()) {
952 BuiltinFunctionId id = function_info->builtin_function_id();
953 MaybeObject* maybe_result = CompileCustomCall(
954 id, object, holder, NULL, function, name);
955 Object* result;
956 if (!maybe_result->ToObject(&result)) return maybe_result;
957 // undefined means bail out to regular compiler.
958 if (!result->IsUndefined()) return result;
959 }
960
961 Label miss_in_smi_check;
962
963 GenerateNameCheck(name, &miss_in_smi_check);
964
965 // Get the receiver from the stack.
966 const int argc = arguments().immediate();
967 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
968
969 // Check that the receiver isn't a smi.
970 if (check != NUMBER_CHECK) {
971 __ JumpIfSmi(rdx, &miss_in_smi_check);
972 }
973
974 // Make sure that it's okay not to patch the on stack receiver
975 // unless we're doing a receiver map check.
976 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
977
978 CallOptimization optimization(function);
979 int depth = kInvalidProtoDepth;
980 Label miss;
981
982 switch (check) {
983 case RECEIVER_MAP_CHECK:
984 __ IncrementCounter(&Counters::call_const, 1);
985
986 if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
987 depth = optimization.GetPrototypeDepthOfExpectedType(
988 JSObject::cast(object), holder);
989 }
990
991 if (depth != kInvalidProtoDepth) {
992 __ IncrementCounter(&Counters::call_const_fast_api, 1);
993 // Allocate space for v8::Arguments implicit values. Must be initialized
994 // before to call any runtime function.
995 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
996 }
997
998 // Check that the maps haven't changed.
999 CheckPrototypes(JSObject::cast(object), rdx, holder,
1000 rbx, rax, rdi, name, depth, &miss);
1001
1002 // Patch the receiver on the stack with the global proxy if
1003 // necessary.
1004 if (object->IsGlobalObject()) {
1005 ASSERT(depth == kInvalidProtoDepth);
1006 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1007 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1008 }
1009 break;
1010
1011 case STRING_CHECK:
1012 if (!function->IsBuiltin()) {
1013 // Calling non-builtins with a value as receiver requires boxing.
1014 __ jmp(&miss);
1015 } else {
1016 // Check that the object is a two-byte string or a symbol.
1017 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
1018 __ j(above_equal, &miss);
1019 // Check that the maps starting from the prototype haven't changed.
1020 GenerateDirectLoadGlobalFunctionPrototype(
1021 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
1022 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1023 rbx, rdx, rdi, name, &miss);
1024 }
1025 break;
1026
1027 case NUMBER_CHECK: {
1028 if (!function->IsBuiltin()) {
1029 // Calling non-builtins with a value as receiver requires boxing.
1030 __ jmp(&miss);
1031 } else {
1032 Label fast;
1033 // Check that the object is a smi or a heap number.
1034 __ JumpIfSmi(rdx, &fast);
1035 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
1036 __ j(not_equal, &miss);
1037 __ bind(&fast);
1038 // Check that the maps starting from the prototype haven't changed.
1039 GenerateDirectLoadGlobalFunctionPrototype(
1040 masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
1041 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1042 rbx, rdx, rdi, name, &miss);
1043 }
1044 break;
1045 }
1046
1047 case BOOLEAN_CHECK: {
1048 if (!function->IsBuiltin()) {
1049 // Calling non-builtins with a value as receiver requires boxing.
1050 __ jmp(&miss);
1051 } else {
1052 Label fast;
1053 // Check that the object is a boolean.
1054 __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
1055 __ j(equal, &fast);
1056 __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
1057 __ j(not_equal, &miss);
1058 __ bind(&fast);
1059 // Check that the maps starting from the prototype haven't changed.
1060 GenerateDirectLoadGlobalFunctionPrototype(
1061 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
1062 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1063 rbx, rdx, rdi, name, &miss);
1064 }
1065 break;
1066 }
1067
1068 default:
1069 UNREACHABLE();
1070 }
1071
1072 if (depth != kInvalidProtoDepth) {
1073 Failure* failure;
1074 // Move the return address on top of the stack.
1075 __ movq(rax, Operand(rsp, 3 * kPointerSize));
1076 __ movq(Operand(rsp, 0 * kPointerSize), rax);
1077
1078 // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
1079 // duplicate of return address and will be overwritten.
1080 bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
1081 if (!success) {
1082 return failure;
1083 }
1084 } else {
1085 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1086 }
1087
1088 // Handle call cache miss.
1089 __ bind(&miss);
1090 if (depth != kInvalidProtoDepth) {
1091 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
1092 }
1093
1094 // Handle call cache miss.
1095 __ bind(&miss_in_smi_check);
1096 Object* obj;
1097 { MaybeObject* maybe_obj = GenerateMissBranch();
1098 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1099 }
1100
1101 // Return the generated code.
1102 return GetCode(function);
1103 }
1104
1105
1106 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, 1340 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1107 JSObject* holder, 1341 JSObject* holder,
1108 int index, 1342 int index,
1109 String* name) { 1343 String* name) {
1110 // ----------- S t a t e ------------- 1344 // ----------- S t a t e -------------
1111 // rcx : function name 1345 // rcx : function name
1112 // rsp[0] : return address 1346 // rsp[0] : return address
1113 // rsp[8] : argument argc 1347 // rsp[8] : argument argc
1114 // rsp[16] : argument argc - 1 1348 // rsp[16] : argument argc - 1
1115 // ... 1349 // ...
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after
1421 // -- rsp[(argc + 1) * 8] : receiver 1655 // -- rsp[(argc + 1) * 8] : receiver
1422 // ----------------------------------- 1656 // -----------------------------------
1423 1657
1424 // If object is not a string, bail out to regular call. 1658 // If object is not a string, bail out to regular call.
1425 if (!object->IsString() || cell != NULL) return Heap::undefined_value(); 1659 if (!object->IsString() || cell != NULL) return Heap::undefined_value();
1426 1660
1427 const int argc = arguments().immediate(); 1661 const int argc = arguments().immediate();
1428 1662
1429 Label miss; 1663 Label miss;
1430 Label index_out_of_range; 1664 Label index_out_of_range;
1431
Lasse Reichstein 2011/01/03 16:50:11 I actually prefer the whitespace here, between dec
William Hesse 2011/01/04 08:50:56 Actually, the functions CompileStringCharAtCall an
1432 GenerateNameCheck(name, &miss); 1665 GenerateNameCheck(name, &miss);
1433 1666
1434 // Check that the maps starting from the prototype haven't changed. 1667 // Check that the maps starting from the prototype haven't changed.
1435 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1668 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1436 Context::STRING_FUNCTION_INDEX, 1669 Context::STRING_FUNCTION_INDEX,
1437 rax, 1670 rax,
1438 &miss); 1671 &miss);
1439 ASSERT(object != holder); 1672 ASSERT(object != holder);
1440 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder, 1673 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1441 rbx, rdx, rdi, name, &miss); 1674 rbx, rdx, rdi, name, &miss);
(...skipping 291 matching lines...) Expand 10 before | Expand all | Expand 10 after
1733 Object* obj; 1966 Object* obj;
1734 { MaybeObject* maybe_obj = GenerateMissBranch(); 1967 { MaybeObject* maybe_obj = GenerateMissBranch();
1735 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1968 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1736 } 1969 }
1737 1970
1738 // Return the generated code. 1971 // Return the generated code.
1739 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); 1972 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1740 } 1973 }
1741 1974
1742 1975
1976 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
1977 JSObject* holder,
1978 JSFunction* function,
1979 String* name,
1980 CheckType check) {
1981 // ----------- S t a t e -------------
1982 // rcx : function name
1983 // rsp[0] : return address
1984 // rsp[8] : argument argc
1985 // rsp[16] : argument argc - 1
1986 // ...
1987 // rsp[argc * 8] : argument 1
1988 // rsp[(argc + 1) * 8] : argument 0 = receiver
1989 // -----------------------------------
1990
1991 SharedFunctionInfo* function_info = function->shared();
1992 if (function_info->HasBuiltinFunctionId()) {
1993 BuiltinFunctionId id = function_info->builtin_function_id();
1994 MaybeObject* maybe_result = CompileCustomCall(
1995 id, object, holder, NULL, function, name);
1996 Object* result;
1997 if (!maybe_result->ToObject(&result)) return maybe_result;
1998 // undefined means bail out to regular compiler.
1999 if (!result->IsUndefined()) return result;
2000 }
2001
2002 Label miss_in_smi_check;
2003
2004 GenerateNameCheck(name, &miss_in_smi_check);
2005
2006 // Get the receiver from the stack.
2007 const int argc = arguments().immediate();
2008 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2009
2010 // Check that the receiver isn't a smi.
2011 if (check != NUMBER_CHECK) {
2012 __ JumpIfSmi(rdx, &miss_in_smi_check);
2013 }
2014
2015 // Make sure that it's okay not to patch the on stack receiver
2016 // unless we're doing a receiver map check.
2017 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2018
2019 CallOptimization optimization(function);
2020 int depth = kInvalidProtoDepth;
2021 Label miss;
2022
2023 switch (check) {
2024 case RECEIVER_MAP_CHECK:
2025 __ IncrementCounter(&Counters::call_const, 1);
2026
2027 if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2028 depth = optimization.GetPrototypeDepthOfExpectedType(
2029 JSObject::cast(object), holder);
2030 }
2031
2032 if (depth != kInvalidProtoDepth) {
2033 __ IncrementCounter(&Counters::call_const_fast_api, 1);
2034 // Allocate space for v8::Arguments implicit values. Must be initialized
2035 // before to call any runtime function.
2036 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2037 }
2038
2039 // Check that the maps haven't changed.
2040 CheckPrototypes(JSObject::cast(object), rdx, holder,
2041 rbx, rax, rdi, name, depth, &miss);
2042
2043 // Patch the receiver on the stack with the global proxy if
2044 // necessary.
2045 if (object->IsGlobalObject()) {
2046 ASSERT(depth == kInvalidProtoDepth);
2047 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2048 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2049 }
2050 break;
2051
2052 case STRING_CHECK:
2053 if (!function->IsBuiltin()) {
2054 // Calling non-builtins with a value as receiver requires boxing.
2055 __ jmp(&miss);
2056 } else {
2057 // Check that the object is a two-byte string or a symbol.
2058 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2059 __ j(above_equal, &miss);
2060 // Check that the maps starting from the prototype haven't changed.
2061 GenerateDirectLoadGlobalFunctionPrototype(
2062 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2063 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2064 rbx, rdx, rdi, name, &miss);
2065 }
2066 break;
2067
2068 case NUMBER_CHECK: {
2069 if (!function->IsBuiltin()) {
2070 // Calling non-builtins with a value as receiver requires boxing.
2071 __ jmp(&miss);
2072 } else {
2073 Label fast;
2074 // Check that the object is a smi or a heap number.
2075 __ JumpIfSmi(rdx, &fast);
2076 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2077 __ j(not_equal, &miss);
2078 __ bind(&fast);
2079 // Check that the maps starting from the prototype haven't changed.
2080 GenerateDirectLoadGlobalFunctionPrototype(
2081 masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2082 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2083 rbx, rdx, rdi, name, &miss);
2084 }
2085 break;
2086 }
2087
2088 case BOOLEAN_CHECK: {
2089 if (!function->IsBuiltin()) {
2090 // Calling non-builtins with a value as receiver requires boxing.
2091 __ jmp(&miss);
2092 } else {
2093 Label fast;
2094 // Check that the object is a boolean.
2095 __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2096 __ j(equal, &fast);
2097 __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2098 __ j(not_equal, &miss);
2099 __ bind(&fast);
2100 // Check that the maps starting from the prototype haven't changed.
2101 GenerateDirectLoadGlobalFunctionPrototype(
2102 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2103 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2104 rbx, rdx, rdi, name, &miss);
2105 }
2106 break;
2107 }
2108
2109 default:
2110 UNREACHABLE();
2111 }
2112
2113 if (depth != kInvalidProtoDepth) {
2114 Failure* failure;
2115 // Move the return address on top of the stack.
2116 __ movq(rax, Operand(rsp, 3 * kPointerSize));
2117 __ movq(Operand(rsp, 0 * kPointerSize), rax);
2118
2119 // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
2120 // duplicate of return address and will be overwritten.
2121 bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
2122 if (!success) {
2123 return failure;
2124 }
2125 } else {
2126 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2127 }
2128
2129 // Handle call cache miss.
2130 __ bind(&miss);
2131 if (depth != kInvalidProtoDepth) {
2132 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2133 }
2134
2135 // Handle call cache miss.
2136 __ bind(&miss_in_smi_check);
2137 Object* obj;
2138 { MaybeObject* maybe_obj = GenerateMissBranch();
2139 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2140 }
2141
2142 // Return the generated code.
2143 return GetCode(function);
2144 }
2145
2146
1743 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, 2147 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
1744 JSObject* holder, 2148 JSObject* holder,
1745 String* name) { 2149 String* name) {
1746 // ----------- S t a t e ------------- 2150 // ----------- S t a t e -------------
1747 // rcx : function name 2151 // rcx : function name
1748 // rsp[0] : return address 2152 // rsp[0] : return address
1749 // rsp[8] : argument argc 2153 // rsp[8] : argument argc
1750 // rsp[16] : argument argc - 1 2154 // rsp[16] : argument argc - 1
1751 // ... 2155 // ...
1752 // rsp[argc * 8] : argument 1 2156 // rsp[argc * 8] : argument 1
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1873 Object* obj; 2277 Object* obj;
1874 { MaybeObject* maybe_obj = GenerateMissBranch(); 2278 { MaybeObject* maybe_obj = GenerateMissBranch();
1875 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2279 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1876 } 2280 }
1877 2281
1878 // Return the generated code. 2282 // Return the generated code.
1879 return GetCode(NORMAL, name); 2283 return GetCode(NORMAL, name);
1880 } 2284 }
1881 2285
1882 2286
1883 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, 2287 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
1884 JSObject* object, 2288 int index,
1885 JSObject* holder, 2289 Map* transition,
1886 AccessorInfo* callback) { 2290 String* name) {
1887 // ----------- S t a t e ------------- 2291 // ----------- S t a t e -------------
1888 // -- rax : receiver 2292 // -- rax : value
1889 // -- rcx : name 2293 // -- rcx : name
1890 // -- rsp[0] : return address 2294 // -- rdx : receiver
1891 // ----------------------------------- 2295 // -- rsp[0] : return address
1892 Label miss; 2296 // -----------------------------------
1893 2297 Label miss;
1894 Failure* failure = Failure::InternalError(); 2298
1895 bool success = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi, 2299 // Generate store field code. Preserves receiver and name on jump to miss.
1896 callback, name, &miss, &failure); 2300 GenerateStoreField(masm(),
1897 if (!success) { 2301 object,
1898 miss.Unuse(); 2302 index,
1899 return failure; 2303 transition,
2304 rdx, rcx, rbx,
2305 &miss);
2306
2307 // Handle store cache miss.
2308 __ bind(&miss);
2309 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2310 __ Jump(ic, RelocInfo::CODE_TARGET);
2311
2312 // Return the generated code.
2313 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2314 }
2315
2316
2317 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2318 AccessorInfo* callback,
2319 String* name) {
2320 // ----------- S t a t e -------------
2321 // -- rax : value
2322 // -- rcx : name
2323 // -- rdx : receiver
2324 // -- rsp[0] : return address
2325 // -----------------------------------
2326 Label miss;
2327
2328 // Check that the object isn't a smi.
2329 __ JumpIfSmi(rdx, &miss);
2330
2331 // Check that the map of the object hasn't changed.
2332 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2333 Handle<Map>(object->map()));
2334 __ j(not_equal, &miss);
2335
2336 // Perform global security token check if needed.
2337 if (object->IsJSGlobalProxy()) {
2338 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
1900 } 2339 }
1901 2340
1902 __ bind(&miss); 2341 // Stub never generated for non-global objects that require access
1903 GenerateLoadMiss(masm(), Code::LOAD_IC); 2342 // checks.
2343 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2344
2345 __ pop(rbx); // remove the return address
2346 __ push(rdx); // receiver
2347 __ Push(Handle<AccessorInfo>(callback)); // callback info
2348 __ push(rcx); // name
2349 __ push(rax); // value
2350 __ push(rbx); // restore return address
2351
2352 // Do tail-call to the runtime system.
2353 ExternalReference store_callback_property =
2354 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2355 __ TailCallExternalReference(store_callback_property, 4, 1);
2356
2357 // Handle store cache miss.
2358 __ bind(&miss);
2359 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2360 __ Jump(ic, RelocInfo::CODE_TARGET);
1904 2361
1905 // Return the generated code. 2362 // Return the generated code.
1906 return GetCode(CALLBACKS, name); 2363 return GetCode(CALLBACKS, name);
1907 } 2364 }
1908 2365
1909 2366
1910 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, 2367 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
1911 JSObject* holder, 2368 String* name) {
1912 Object* value, 2369 // ----------- S t a t e -------------
2370 // -- rax : value
2371 // -- rcx : name
2372 // -- rdx : receiver
2373 // -- rsp[0] : return address
2374 // -----------------------------------
2375 Label miss;
2376
2377 // Check that the object isn't a smi.
2378 __ JumpIfSmi(rdx, &miss);
2379
2380 // Check that the map of the object hasn't changed.
2381 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2382 Handle<Map>(receiver->map()));
2383 __ j(not_equal, &miss);
2384
2385 // Perform global security token check if needed.
2386 if (receiver->IsJSGlobalProxy()) {
2387 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2388 }
2389
2390 // Stub never generated for non-global objects that require access
2391 // checks.
2392 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2393
2394 __ pop(rbx); // remove the return address
2395 __ push(rdx); // receiver
2396 __ push(rcx); // name
2397 __ push(rax); // value
2398 __ push(rbx); // restore return address
2399
2400 // Do tail-call to the runtime system.
2401 ExternalReference store_ic_property =
2402 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2403 __ TailCallExternalReference(store_ic_property, 3, 1);
2404
2405 // Handle store cache miss.
2406 __ bind(&miss);
2407 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2408 __ Jump(ic, RelocInfo::CODE_TARGET);
2409
2410 // Return the generated code.
2411 return GetCode(INTERCEPTOR, name);
2412 }
2413
2414
2415 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2416 JSGlobalPropertyCell* cell,
1913 String* name) { 2417 String* name) {
1914 // ----------- S t a t e ------------- 2418 // ----------- S t a t e -------------
1915 // -- rax : receiver 2419 // -- rax : value
1916 // -- rcx : name 2420 // -- rcx : name
1917 // -- rsp[0] : return address 2421 // -- rdx : receiver
1918 // ----------------------------------- 2422 // -- rsp[0] : return address
1919 Label miss; 2423 // -----------------------------------
1920 2424 Label miss;
1921 GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss); 2425
1922 __ bind(&miss); 2426 // Check that the map of the global has not changed.
1923 GenerateLoadMiss(masm(), Code::LOAD_IC); 2427 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1924 2428 Handle<Map>(object->map()));
1925 // Return the generated code. 2429 __ j(not_equal, &miss);
1926 return GetCode(CONSTANT_FUNCTION, name); 2430
2431 // Store the value in the cell.
2432 __ Move(rcx, Handle<JSGlobalPropertyCell>(cell));
2433 __ movq(FieldOperand(rcx, JSGlobalPropertyCell::kValueOffset), rax);
2434
2435 // Return the value (register rax).
2436 __ IncrementCounter(&Counters::named_store_global_inline, 1);
2437 __ ret(0);
2438
2439 // Handle store cache miss.
2440 __ bind(&miss);
2441 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
2442 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2443 __ Jump(ic, RelocInfo::CODE_TARGET);
2444
2445 // Return the generated code.
2446 return GetCode(NORMAL, name);
2447 }
2448
2449
2450 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2451 int index,
2452 Map* transition,
2453 String* name) {
2454 // ----------- S t a t e -------------
2455 // -- rax : value
2456 // -- rcx : key
2457 // -- rdx : receiver
2458 // -- rsp[0] : return address
2459 // -----------------------------------
2460 Label miss;
2461
2462 __ IncrementCounter(&Counters::keyed_store_field, 1);
2463
2464 // Check that the name has not changed.
2465 __ Cmp(rcx, Handle<String>(name));
2466 __ j(not_equal, &miss);
2467
2468 // Generate store field code. Preserves receiver and name on jump to miss.
2469 GenerateStoreField(masm(),
2470 object,
2471 index,
2472 transition,
2473 rdx, rcx, rbx,
2474 &miss);
2475
2476 // Handle store cache miss.
2477 __ bind(&miss);
2478 __ DecrementCounter(&Counters::keyed_store_field, 1);
2479 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2480 __ Jump(ic, RelocInfo::CODE_TARGET);
2481
2482 // Return the generated code.
2483 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2484 }
2485
2486
2487 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2488 JSObject* receiver) {
2489 // ----------- S t a t e -------------
2490 // -- rax : value
2491 // -- rcx : key
2492 // -- rdx : receiver
2493 // -- rsp[0] : return address
2494 // -----------------------------------
2495 Label miss;
2496
2497 // Check that the receiver isn't a smi.
2498 __ JumpIfSmi(rdx, &miss);
2499
2500 // Check that the map matches.
2501 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2502 Handle<Map>(receiver->map()));
2503 __ j(not_equal, &miss);
2504
2505 // Check that the key is a smi.
2506 __ JumpIfNotSmi(rcx, &miss);
2507
2508 // Get the elements array and make sure it is a fast element array, not 'cow'.
2509 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2510 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2511 Factory::fixed_array_map());
2512 __ j(not_equal, &miss);
2513
2514 // Check that the key is within bounds.
2515 if (receiver->IsJSArray()) {
2516 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
2517 __ j(above_equal, &miss);
2518 } else {
2519 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2520 __ j(above_equal, &miss);
2521 }
2522
2523 // Do the store and update the write barrier. Make sure to preserve
2524 // the value in register eax.
2525 __ movq(rdx, rax);
2526 __ SmiToInteger32(rcx, rcx);
2527 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
2528 rax);
2529 __ RecordWrite(rdi, 0, rdx, rcx);
2530
2531 // Done.
2532 __ ret(0);
2533
2534 // Handle store cache miss.
2535 __ bind(&miss);
2536 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2537 __ jmp(ic, RelocInfo::CODE_TARGET);
2538
2539 // Return the generated code.
2540 return GetCode(NORMAL, NULL);
1927 } 2541 }
1928 2542
1929 2543
1930 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, 2544 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
1931 JSObject* object, 2545 JSObject* object,
1932 JSObject* last) { 2546 JSObject* last) {
1933 // ----------- S t a t e ------------- 2547 // ----------- S t a t e -------------
1934 // -- rax : receiver 2548 // -- rax : receiver
1935 // -- rcx : name 2549 // -- rcx : name
1936 // -- rsp[0] : return address 2550 // -- rsp[0] : return address
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1985 2599
1986 GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss); 2600 GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss);
1987 __ bind(&miss); 2601 __ bind(&miss);
1988 GenerateLoadMiss(masm(), Code::LOAD_IC); 2602 GenerateLoadMiss(masm(), Code::LOAD_IC);
1989 2603
1990 // Return the generated code. 2604 // Return the generated code.
1991 return GetCode(FIELD, name); 2605 return GetCode(FIELD, name);
1992 } 2606 }
1993 2607
1994 2608
2609 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2610 JSObject* object,
2611 JSObject* holder,
2612 AccessorInfo* callback) {
2613 // ----------- S t a t e -------------
2614 // -- rax : receiver
2615 // -- rcx : name
2616 // -- rsp[0] : return address
2617 // -----------------------------------
2618 Label miss;
2619
2620 Failure* failure = Failure::InternalError();
2621 bool success = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi,
2622 callback, name, &miss, &failure);
2623 if (!success) {
2624 miss.Unuse();
2625 return failure;
2626 }
2627
2628 __ bind(&miss);
2629 GenerateLoadMiss(masm(), Code::LOAD_IC);
2630
2631 // Return the generated code.
2632 return GetCode(CALLBACKS, name);
2633 }
2634
2635
2636 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2637 JSObject* holder,
2638 Object* value,
2639 String* name) {
2640 // ----------- S t a t e -------------
2641 // -- rax : receiver
2642 // -- rcx : name
2643 // -- rsp[0] : return address
2644 // -----------------------------------
2645 Label miss;
2646
2647 GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
2648 __ bind(&miss);
2649 GenerateLoadMiss(masm(), Code::LOAD_IC);
2650
2651 // Return the generated code.
2652 return GetCode(CONSTANT_FUNCTION, name);
2653 }
2654
2655
1995 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2656 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
1996 JSObject* holder, 2657 JSObject* holder,
1997 String* name) { 2658 String* name) {
1998 // ----------- S t a t e ------------- 2659 // ----------- S t a t e -------------
1999 // -- rax : receiver 2660 // -- rax : receiver
2000 // -- rcx : name 2661 // -- rcx : name
2001 // -- rsp[0] : return address 2662 // -- rsp[0] : return address
2002 // ----------------------------------- 2663 // -----------------------------------
2003 Label miss; 2664 Label miss;
2004 2665
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2067 2728
2068 __ bind(&miss); 2729 __ bind(&miss);
2069 __ IncrementCounter(&Counters::named_load_global_stub_miss, 1); 2730 __ IncrementCounter(&Counters::named_load_global_stub_miss, 1);
2070 GenerateLoadMiss(masm(), Code::LOAD_IC); 2731 GenerateLoadMiss(masm(), Code::LOAD_IC);
2071 2732
2072 // Return the generated code. 2733 // Return the generated code.
2073 return GetCode(NORMAL, name); 2734 return GetCode(NORMAL, name);
2074 } 2735 }
2075 2736
2076 2737
2077 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( 2738 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2078 String* name, 2739 JSObject* receiver,
2079 JSObject* receiver, 2740 JSObject* holder,
2080 JSObject* holder, 2741 int index) {
2081 AccessorInfo* callback) {
2082 // ----------- S t a t e ------------- 2742 // ----------- S t a t e -------------
2083 // -- rax : key 2743 // -- rax : key
2084 // -- rdx : receiver 2744 // -- rdx : receiver
2085 // -- rsp[0] : return address 2745 // -- rsp[0] : return address
2086 // ----------------------------------- 2746 // -----------------------------------
2087 Label miss; 2747 Label miss;
2088 2748
2749 __ IncrementCounter(&Counters::keyed_load_field, 1);
2750
2751 // Check that the name has not changed.
2752 __ Cmp(rax, Handle<String>(name));
2753 __ j(not_equal, &miss);
2754
2755 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2756
2757 __ bind(&miss);
2758 __ DecrementCounter(&Counters::keyed_load_field, 1);
2759 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2760
2761 // Return the generated code.
2762 return GetCode(FIELD, name);
2763 }
2764
2765
2766 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2767 String* name,
2768 JSObject* receiver,
2769 JSObject* holder,
2770 AccessorInfo* callback) {
2771 // ----------- S t a t e -------------
2772 // -- rax : key
2773 // -- rdx : receiver
2774 // -- rsp[0] : return address
2775 // -----------------------------------
2776 Label miss;
2777
2089 __ IncrementCounter(&Counters::keyed_load_callback, 1); 2778 __ IncrementCounter(&Counters::keyed_load_callback, 1);
2090 2779
2091 // Check that the name has not changed. 2780 // Check that the name has not changed.
2092 __ Cmp(rax, Handle<String>(name)); 2781 __ Cmp(rax, Handle<String>(name));
2093 __ j(not_equal, &miss); 2782 __ j(not_equal, &miss);
2094 2783
2095 Failure* failure = Failure::InternalError(); 2784 Failure* failure = Failure::InternalError();
2096 bool success = GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi, 2785 bool success = GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi,
2097 callback, name, &miss, &failure); 2786 callback, name, &miss, &failure);
2098 if (!success) { 2787 if (!success) {
2099 miss.Unuse(); 2788 miss.Unuse();
2100 return failure; 2789 return failure;
2101 } 2790 }
2102 2791
2103 __ bind(&miss); 2792 __ bind(&miss);
2104 __ DecrementCounter(&Counters::keyed_load_callback, 1); 2793 __ DecrementCounter(&Counters::keyed_load_callback, 1);
2105 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2794 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2106 2795
2107 // Return the generated code. 2796 // Return the generated code.
2108 return GetCode(CALLBACKS, name);
2109 }
2110
2111
2112 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2113 // ----------- S t a t e -------------
2114 // -- rax : key
2115 // -- rdx : receiver
2116 // -- rsp[0] : return address
2117 // -----------------------------------
2118 Label miss;
2119
2120 __ IncrementCounter(&Counters::keyed_load_array_length, 1);
2121
2122 // Check that the name has not changed.
2123 __ Cmp(rax, Handle<String>(name));
2124 __ j(not_equal, &miss);
2125
2126 GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2127 __ bind(&miss);
2128 __ DecrementCounter(&Counters::keyed_load_array_length, 1);
2129 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2130
2131 // Return the generated code.
2132 return GetCode(CALLBACKS, name); 2797 return GetCode(CALLBACKS, name);
2133 } 2798 }
2134 2799
2135 2800
2136 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 2801 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2137 JSObject* receiver, 2802 JSObject* receiver,
2138 JSObject* holder, 2803 JSObject* holder,
2139 Object* value) { 2804 Object* value) {
2140 // ----------- S t a t e ------------- 2805 // ----------- S t a t e -------------
2141 // -- rax : key 2806 // -- rax : key
(...skipping 12 matching lines...) Expand all
2154 value, name, &miss); 2819 value, name, &miss);
2155 __ bind(&miss); 2820 __ bind(&miss);
2156 __ DecrementCounter(&Counters::keyed_load_constant_function, 1); 2821 __ DecrementCounter(&Counters::keyed_load_constant_function, 1);
2157 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2822 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2158 2823
2159 // Return the generated code. 2824 // Return the generated code.
2160 return GetCode(CONSTANT_FUNCTION, name); 2825 return GetCode(CONSTANT_FUNCTION, name);
2161 } 2826 }
2162 2827
2163 2828
2164 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2165 // ----------- S t a t e -------------
2166 // -- rax : key
2167 // -- rdx : receiver
2168 // -- rsp[0] : return address
2169 // -----------------------------------
2170 Label miss;
2171
2172 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
2173
2174 // Check that the name has not changed.
2175 __ Cmp(rax, Handle<String>(name));
2176 __ j(not_equal, &miss);
2177
2178 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2179 __ bind(&miss);
2180 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
2181 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2182
2183 // Return the generated code.
2184 return GetCode(CALLBACKS, name);
2185 }
2186
2187
2188 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2829 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2189 JSObject* holder, 2830 JSObject* holder,
2190 String* name) { 2831 String* name) {
2191 // ----------- S t a t e ------------- 2832 // ----------- S t a t e -------------
2192 // -- rax : key 2833 // -- rax : key
2193 // -- rdx : receiver 2834 // -- rdx : receiver
2194 // -- rsp[0] : return address 2835 // -- rsp[0] : return address
2195 // ----------------------------------- 2836 // -----------------------------------
2196 Label miss; 2837 Label miss;
2197 2838
(...skipping 17 matching lines...) Expand all
2215 &miss); 2856 &miss);
2216 __ bind(&miss); 2857 __ bind(&miss);
2217 __ DecrementCounter(&Counters::keyed_load_interceptor, 1); 2858 __ DecrementCounter(&Counters::keyed_load_interceptor, 1);
2218 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2859 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2219 2860
2220 // Return the generated code. 2861 // Return the generated code.
2221 return GetCode(INTERCEPTOR, name); 2862 return GetCode(INTERCEPTOR, name);
2222 } 2863 }
2223 2864
2224 2865
2225 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 2866 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2226 // ----------- S t a t e ------------- 2867 // ----------- S t a t e -------------
2227 // -- rax : key 2868 // -- rax : key
2228 // -- rdx : receiver 2869 // -- rdx : receiver
2229 // -- rsp[0] : return address 2870 // -- rsp[0] : return address
2230 // ----------------------------------- 2871 // -----------------------------------
2231 Label miss; 2872 Label miss;
2232 2873
2233 __ IncrementCounter(&Counters::keyed_load_string_length, 1); 2874 __ IncrementCounter(&Counters::keyed_load_array_length, 1);
2234 2875
2235 // Check that the name has not changed. 2876 // Check that the name has not changed.
2236 __ Cmp(rax, Handle<String>(name)); 2877 __ Cmp(rax, Handle<String>(name));
2237 __ j(not_equal, &miss); 2878 __ j(not_equal, &miss);
2238 2879
2239 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss); 2880 GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2240 __ bind(&miss); 2881 __ bind(&miss);
2241 __ DecrementCounter(&Counters::keyed_load_string_length, 1); 2882 __ DecrementCounter(&Counters::keyed_load_array_length, 1);
2242 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2883 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2243 2884
2244 // Return the generated code. 2885 // Return the generated code.
2245 return GetCode(CALLBACKS, name); 2886 return GetCode(CALLBACKS, name);
2246 } 2887 }
2247 2888
2248 2889
2890 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2891 // ----------- S t a t e -------------
2892 // -- rax : key
2893 // -- rdx : receiver
2894 // -- rsp[0] : return address
2895 // -----------------------------------
2896 Label miss;
2897
2898 __ IncrementCounter(&Counters::keyed_load_string_length, 1);
2899
2900 // Check that the name has not changed.
2901 __ Cmp(rax, Handle<String>(name));
2902 __ j(not_equal, &miss);
2903
2904 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss);
2905 __ bind(&miss);
2906 __ DecrementCounter(&Counters::keyed_load_string_length, 1);
2907 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2908
2909 // Return the generated code.
2910 return GetCode(CALLBACKS, name);
2911 }
2912
2913
2914 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2915 // ----------- S t a t e -------------
2916 // -- rax : key
2917 // -- rdx : receiver
2918 // -- rsp[0] : return address
2919 // -----------------------------------
2920 Label miss;
2921
2922 __ IncrementCounter(&Counters::keyed_load_function_prototype, 1);
2923
2924 // Check that the name has not changed.
2925 __ Cmp(rax, Handle<String>(name));
2926 __ j(not_equal, &miss);
2927
2928 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2929 __ bind(&miss);
2930 __ DecrementCounter(&Counters::keyed_load_function_prototype, 1);
2931 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2932
2933 // Return the generated code.
2934 return GetCode(CALLBACKS, name);
2935 }
2936
2937
2249 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { 2938 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2250 // ----------- S t a t e ------------- 2939 // ----------- S t a t e -------------
2251 // -- rax : key 2940 // -- rax : key
2252 // -- rdx : receiver 2941 // -- rdx : receiver
2253 // -- esp[0] : return address 2942 // -- esp[0] : return address
2254 // ----------------------------------- 2943 // -----------------------------------
2255 Label miss; 2944 Label miss;
2256 2945
2257 // Check that the receiver isn't a smi.
2258 __ JumpIfSmi(rdx, &miss);
2259
2260 // Check that the map matches.
2261 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2262 Handle<Map>(receiver->map()));
2263 __ j(not_equal, &miss);
2264
2265 // Check that the key is a smi.
2266 __ JumpIfNotSmi(rax, &miss);
2267
2268 // Get the elements array.
2269 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
2270 __ AssertFastElements(rcx);
2271
2272 // Check that the key is within bounds.
2273 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
2274 __ j(above_equal, &miss);
2275
2276 // Load the result and make sure it's not the hole.
2277 SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
2278 __ movq(rbx, FieldOperand(rcx,
2279 index.reg,
2280 index.scale,
2281 FixedArray::kHeaderSize));
2282 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2283 __ j(equal, &miss);
2284 __ movq(rax, rbx);
2285 __ ret(0);
2286
2287 __ bind(&miss);
2288 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2289
2290 // Return the generated code.
2291 return GetCode(NORMAL, NULL);
2292 }
2293
2294
2295 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2296 AccessorInfo* callback,
2297 String* name) {
2298 // ----------- S t a t e -------------
2299 // -- rax : value
2300 // -- rcx : name
2301 // -- rdx : receiver
2302 // -- rsp[0] : return address
2303 // -----------------------------------
2304 Label miss;
2305
2306 // Check that the object isn't a smi.
2307 __ JumpIfSmi(rdx, &miss);
2308
2309 // Check that the map of the object hasn't changed.
2310 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2311 Handle<Map>(object->map()));
2312 __ j(not_equal, &miss);
2313
2314 // Perform global security token check if needed.
2315 if (object->IsJSGlobalProxy()) {
2316 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2317 }
2318
2319 // Stub never generated for non-global objects that require access
2320 // checks.
2321 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2322
2323 __ pop(rbx); // remove the return address
2324 __ push(rdx); // receiver
2325 __ Push(Handle<AccessorInfo>(callback)); // callback info
2326 __ push(rcx); // name
2327 __ push(rax); // value
2328 __ push(rbx); // restore return address
2329
2330 // Do tail-call to the runtime system.
2331 ExternalReference store_callback_property =
2332 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2333 __ TailCallExternalReference(store_callback_property, 4, 1);
2334
2335 // Handle store cache miss.
2336 __ bind(&miss);
2337 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2338 __ Jump(ic, RelocInfo::CODE_TARGET);
2339
2340 // Return the generated code.
2341 return GetCode(CALLBACKS, name);
2342 }
2343
2344
2345 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2346 int index,
2347 Map* transition,
2348 String* name) {
2349 // ----------- S t a t e -------------
2350 // -- rax : value
2351 // -- rcx : name
2352 // -- rdx : receiver
2353 // -- rsp[0] : return address
2354 // -----------------------------------
2355 Label miss;
2356
2357 // Generate store field code. Preserves receiver and name on jump to miss.
2358 GenerateStoreField(masm(),
2359 object,
2360 index,
2361 transition,
2362 rdx, rcx, rbx,
2363 &miss);
2364
2365 // Handle store cache miss.
2366 __ bind(&miss);
2367 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2368 __ Jump(ic, RelocInfo::CODE_TARGET);
2369
2370 // Return the generated code.
2371 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2372 }
2373
2374
2375 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2376 String* name) {
2377 // ----------- S t a t e -------------
2378 // -- rax : value
2379 // -- rcx : name
2380 // -- rdx : receiver
2381 // -- rsp[0] : return address
2382 // -----------------------------------
2383 Label miss;
2384
2385 // Check that the object isn't a smi.
2386 __ JumpIfSmi(rdx, &miss);
2387
2388 // Check that the map of the object hasn't changed.
2389 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2390 Handle<Map>(receiver->map()));
2391 __ j(not_equal, &miss);
2392
2393 // Perform global security token check if needed.
2394 if (receiver->IsJSGlobalProxy()) {
2395 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2396 }
2397
2398 // Stub never generated for non-global objects that require access
2399 // checks.
2400 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2401
2402 __ pop(rbx); // remove the return address
2403 __ push(rdx); // receiver
2404 __ push(rcx); // name
2405 __ push(rax); // value
2406 __ push(rbx); // restore return address
2407
2408 // Do tail-call to the runtime system.
2409 ExternalReference store_ic_property =
2410 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2411 __ TailCallExternalReference(store_ic_property, 3, 1);
2412
2413 // Handle store cache miss.
2414 __ bind(&miss);
2415 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2416 __ Jump(ic, RelocInfo::CODE_TARGET);
2417
2418 // Return the generated code.
2419 return GetCode(INTERCEPTOR, name);
2420 }
2421
2422
2423 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2424 JSGlobalPropertyCell* cell,
2425 String* name) {
2426 // ----------- S t a t e -------------
2427 // -- rax : value
2428 // -- rcx : name
2429 // -- rdx : receiver
2430 // -- rsp[0] : return address
2431 // -----------------------------------
2432 Label miss;
2433
2434 // Check that the map of the global has not changed.
2435 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2436 Handle<Map>(object->map()));
2437 __ j(not_equal, &miss);
2438
2439 // Store the value in the cell.
2440 __ Move(rcx, Handle<JSGlobalPropertyCell>(cell));
2441 __ movq(FieldOperand(rcx, JSGlobalPropertyCell::kValueOffset), rax);
2442
2443 // Return the value (register rax).
2444 __ IncrementCounter(&Counters::named_store_global_inline, 1);
2445 __ ret(0);
2446
2447 // Handle store cache miss.
2448 __ bind(&miss);
2449 __ IncrementCounter(&Counters::named_store_global_inline_miss, 1);
2450 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
2451 __ Jump(ic, RelocInfo::CODE_TARGET);
2452
2453 // Return the generated code.
2454 return GetCode(NORMAL, name);
2455 }
2456
2457
2458 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2459 JSObject* receiver,
2460 JSObject* holder,
2461 int index) {
2462 // ----------- S t a t e -------------
2463 // -- rax : key
2464 // -- rdx : receiver
2465 // -- rsp[0] : return address
2466 // -----------------------------------
2467 Label miss;
2468
2469 __ IncrementCounter(&Counters::keyed_load_field, 1);
2470
2471 // Check that the name has not changed.
2472 __ Cmp(rax, Handle<String>(name));
2473 __ j(not_equal, &miss);
2474
2475 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2476
2477 __ bind(&miss);
2478 __ DecrementCounter(&Counters::keyed_load_field, 1);
2479 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2480
2481 // Return the generated code.
2482 return GetCode(FIELD, name);
2483 }
2484
2485
2486 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2487 int index,
2488 Map* transition,
2489 String* name) {
2490 // ----------- S t a t e -------------
2491 // -- rax : value
2492 // -- rcx : key
2493 // -- rdx : receiver
2494 // -- rsp[0] : return address
2495 // -----------------------------------
2496 Label miss;
2497
2498 __ IncrementCounter(&Counters::keyed_store_field, 1);
2499
2500 // Check that the name has not changed.
2501 __ Cmp(rcx, Handle<String>(name));
2502 __ j(not_equal, &miss);
2503
2504 // Generate store field code. Preserves receiver and name on jump to miss.
2505 GenerateStoreField(masm(),
2506 object,
2507 index,
2508 transition,
2509 rdx, rcx, rbx,
2510 &miss);
2511
2512 // Handle store cache miss.
2513 __ bind(&miss);
2514 __ DecrementCounter(&Counters::keyed_store_field, 1);
2515 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss));
2516 __ Jump(ic, RelocInfo::CODE_TARGET);
2517
2518 // Return the generated code.
2519 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2520 }
2521
2522
2523 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2524 JSObject* receiver) {
2525 // ----------- S t a t e -------------
2526 // -- rax : value
2527 // -- rcx : key
2528 // -- rdx : receiver
2529 // -- rsp[0] : return address
2530 // -----------------------------------
2531 Label miss;
2532
2533 // Check that the receiver isn't a smi. 2946 // Check that the receiver isn't a smi.
2534 __ JumpIfSmi(rdx, &miss); 2947 __ JumpIfSmi(rdx, &miss);
2535 2948
2536 // Check that the map matches. 2949 // Check that the map matches.
2537 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 2950 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2538 Handle<Map>(receiver->map())); 2951 Handle<Map>(receiver->map()));
2539 __ j(not_equal, &miss); 2952 __ j(not_equal, &miss);
2540 2953
2541 // Check that the key is a smi. 2954 // Check that the key is a smi.
2542 __ JumpIfNotSmi(rcx, &miss); 2955 __ JumpIfNotSmi(rax, &miss);
2543 2956
2544 // Get the elements array and make sure it is a fast element array, not 'cow'. 2957 // Get the elements array.
2545 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 2958 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
2546 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), 2959 __ AssertFastElements(rcx);
2547 Factory::fixed_array_map());
2548 __ j(not_equal, &miss);
2549 2960
2550 // Check that the key is within bounds. 2961 // Check that the key is within bounds.
2551 if (receiver->IsJSArray()) { 2962 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
2552 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); 2963 __ j(above_equal, &miss);
2553 __ j(above_equal, &miss);
2554 } else {
2555 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2556 __ j(above_equal, &miss);
2557 }
2558 2964
2559 // Do the store and update the write barrier. Make sure to preserve 2965 // Load the result and make sure it's not the hole.
2560 // the value in register eax. 2966 SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
2561 __ movq(rdx, rax); 2967 __ movq(rbx, FieldOperand(rcx,
2562 __ SmiToInteger32(rcx, rcx); 2968 index.reg,
2563 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize), 2969 index.scale,
2564 rax); 2970 FixedArray::kHeaderSize));
2565 __ RecordWrite(rdi, 0, rdx, rcx); 2971 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2566 2972 __ j(equal, &miss);
2567 // Done. 2973 __ movq(rax, rbx);
2568 __ ret(0); 2974 __ ret(0);
2569 2975
2570 // Handle store cache miss.
2571 __ bind(&miss); 2976 __ bind(&miss);
2572 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Miss)); 2977 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2573 __ jmp(ic, RelocInfo::CODE_TARGET);
2574 2978
2575 // Return the generated code. 2979 // Return the generated code.
2576 return GetCode(NORMAL, NULL); 2980 return GetCode(NORMAL, NULL);
2577 } 2981 }
2578 2982
2579 2983
2580 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
2581 JSObject* interceptor_holder,
2582 LookupResult* lookup,
2583 Register receiver,
2584 Register name_reg,
2585 Register scratch1,
2586 Register scratch2,
2587 Register scratch3,
2588 String* name,
2589 Label* miss) {
2590 ASSERT(interceptor_holder->HasNamedInterceptor());
2591 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
2592
2593 // Check that the receiver isn't a smi.
2594 __ JumpIfSmi(receiver, miss);
2595
2596 // So far the most popular follow ups for interceptor loads are FIELD
2597 // and CALLBACKS, so inline only them, other cases may be added
2598 // later.
2599 bool compile_followup_inline = false;
2600 if (lookup->IsProperty() && lookup->IsCacheable()) {
2601 if (lookup->type() == FIELD) {
2602 compile_followup_inline = true;
2603 } else if (lookup->type() == CALLBACKS &&
2604 lookup->GetCallbackObject()->IsAccessorInfo() &&
2605 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
2606 compile_followup_inline = true;
2607 }
2608 }
2609
2610 if (compile_followup_inline) {
2611 // Compile the interceptor call, followed by inline code to load the
2612 // property from further up the prototype chain if the call fails.
2613 // Check that the maps haven't changed.
2614 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
2615 scratch1, scratch2, scratch3,
2616 name, miss);
2617 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
2618
2619 // Save necessary data before invoking an interceptor.
2620 // Requires a frame to make GC aware of pushed pointers.
2621 __ EnterInternalFrame();
2622
2623 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
2624 // CALLBACKS case needs a receiver to be passed into C++ callback.
2625 __ push(receiver);
2626 }
2627 __ push(holder_reg);
2628 __ push(name_reg);
2629
2630 // Invoke an interceptor. Note: map checks from receiver to
2631 // interceptor's holder has been compiled before (see a caller
2632 // of this method.)
2633 CompileCallLoadPropertyWithInterceptor(masm(),
2634 receiver,
2635 holder_reg,
2636 name_reg,
2637 interceptor_holder);
2638
2639 // Check if interceptor provided a value for property. If it's
2640 // the case, return immediately.
2641 Label interceptor_failed;
2642 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
2643 __ j(equal, &interceptor_failed);
2644 __ LeaveInternalFrame();
2645 __ ret(0);
2646
2647 __ bind(&interceptor_failed);
2648 __ pop(name_reg);
2649 __ pop(holder_reg);
2650 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
2651 __ pop(receiver);
2652 }
2653
2654 __ LeaveInternalFrame();
2655
2656 // Check that the maps from interceptor's holder to lookup's holder
2657 // haven't changed. And load lookup's holder into |holder| register.
2658 if (interceptor_holder != lookup->holder()) {
2659 holder_reg = CheckPrototypes(interceptor_holder,
2660 holder_reg,
2661 lookup->holder(),
2662 scratch1,
2663 scratch2,
2664 scratch3,
2665 name,
2666 miss);
2667 }
2668
2669 if (lookup->type() == FIELD) {
2670 // We found FIELD property in prototype chain of interceptor's holder.
2671 // Retrieve a field from field's holder.
2672 GenerateFastPropertyLoad(masm(), rax, holder_reg,
2673 lookup->holder(), lookup->GetFieldIndex());
2674 __ ret(0);
2675 } else {
2676 // We found CALLBACKS property in prototype chain of interceptor's
2677 // holder.
2678 ASSERT(lookup->type() == CALLBACKS);
2679 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
2680 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
2681 ASSERT(callback != NULL);
2682 ASSERT(callback->getter() != NULL);
2683
2684 // Tail call to runtime.
2685 // Important invariant in CALLBACKS case: the code above must be
2686 // structured to never clobber |receiver| register.
2687 __ pop(scratch2); // return address
2688 __ push(receiver);
2689 __ push(holder_reg);
2690 __ Move(holder_reg, Handle<AccessorInfo>(callback));
2691 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
2692 __ push(holder_reg);
2693 __ push(name_reg);
2694 __ push(scratch2); // restore return address
2695
2696 ExternalReference ref =
2697 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
2698 __ TailCallExternalReference(ref, 5, 1);
2699 }
2700 } else { // !compile_followup_inline
2701 // Call the runtime system to load the interceptor.
2702 // Check that the maps haven't changed.
2703 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
2704 scratch1, scratch2, scratch3,
2705 name, miss);
2706 __ pop(scratch2); // save old return address
2707 PushInterceptorArguments(masm(), receiver, holder_reg,
2708 name_reg, interceptor_holder);
2709 __ push(scratch2); // restore old return address
2710
2711 ExternalReference ref = ExternalReference(
2712 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
2713 __ TailCallExternalReference(ref, 5, 1);
2714 }
2715 }
2716
2717
2718 bool StubCompiler::GenerateLoadCallback(JSObject* object,
2719 JSObject* holder,
2720 Register receiver,
2721 Register name_reg,
2722 Register scratch1,
2723 Register scratch2,
2724 Register scratch3,
2725 AccessorInfo* callback,
2726 String* name,
2727 Label* miss,
2728 Failure** failure) {
2729 // Check that the receiver isn't a smi.
2730 __ JumpIfSmi(receiver, miss);
2731
2732 // Check that the maps haven't changed.
2733 Register reg =
2734 CheckPrototypes(object, receiver, holder, scratch1,
2735 scratch2, scratch3, name, miss);
2736
2737 Handle<AccessorInfo> callback_handle(callback);
2738
2739 // Insert additional parameters into the stack frame above return address.
2740 ASSERT(!scratch2.is(reg));
2741 __ pop(scratch2); // Get return address to place it below.
2742
2743 __ push(receiver); // receiver
2744 __ push(reg); // holder
2745 if (Heap::InNewSpace(callback_handle->data())) {
2746 __ Move(scratch1, callback_handle);
2747 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
2748 } else {
2749 __ Push(Handle<Object>(callback_handle->data()));
2750 }
2751 __ push(name_reg); // name
2752 // Save a pointer to where we pushed the arguments pointer.
2753 // This will be passed as the const AccessorInfo& to the C++ callback.
2754
2755 #ifdef _WIN64
2756 // Win64 uses first register--rcx--for returned value.
2757 Register accessor_info_arg = r8;
2758 Register name_arg = rdx;
2759 #else
2760 Register accessor_info_arg = rsi;
2761 Register name_arg = rdi;
2762 #endif
2763
2764 ASSERT(!name_arg.is(scratch2));
2765 __ movq(name_arg, rsp);
2766 __ push(scratch2); // Restore return address.
2767
2768 // Do call through the api.
2769 Address getter_address = v8::ToCData<Address>(callback->getter());
2770 ApiFunction fun(getter_address);
2771
2772 // 3 elements array for v8::Agruments::values_ and handler for name.
2773 const int kStackSpace = 4;
2774
2775 // Allocate v8::AccessorInfo in non-GCed stack space.
2776 const int kArgStackSpace = 1;
2777
2778 __ PrepareCallApiFunction(kArgStackSpace);
2779 __ lea(rax, Operand(name_arg, 3 * kPointerSize));
2780
2781 // v8::AccessorInfo::args_.
2782 __ movq(StackSpaceOperand(0), rax);
2783
2784 // The context register (rsi) has been saved in PrepareCallApiFunction and
2785 // could be used to pass arguments.
2786 __ lea(accessor_info_arg, StackSpaceOperand(0));
2787
2788 // Emitting a stub call may try to allocate (if the code is not
2789 // already generated). Do not allow the assembler to perform a
2790 // garbage collection but instead return the allocation failure
2791 // object.
2792 MaybeObject* result = masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
2793 if (result->IsFailure()) {
2794 *failure = Failure::cast(result);
2795 return false;
2796 }
2797 return true;
2798 }
2799
2800
2801 Register StubCompiler::CheckPrototypes(JSObject* object,
2802 Register object_reg,
2803 JSObject* holder,
2804 Register holder_reg,
2805 Register scratch1,
2806 Register scratch2,
2807 String* name,
2808 int save_at_depth,
2809 Label* miss) {
2810 // Make sure there's no overlap between holder and object registers.
2811 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
2812 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
2813 && !scratch2.is(scratch1));
2814
2815 // Keep track of the current object in register reg. On the first
2816 // iteration, reg is an alias for object_reg, on later iterations,
2817 // it is an alias for holder_reg.
2818 Register reg = object_reg;
2819 int depth = 0;
2820
2821 if (save_at_depth == depth) {
2822 __ movq(Operand(rsp, kPointerSize), object_reg);
2823 }
2824
2825 // Check the maps in the prototype chain.
2826 // Traverse the prototype chain from the object and do map checks.
2827 JSObject* current = object;
2828 while (current != holder) {
2829 depth++;
2830
2831 // Only global objects and objects that do not require access
2832 // checks are allowed in stubs.
2833 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
2834
2835 JSObject* prototype = JSObject::cast(current->GetPrototype());
2836 if (!current->HasFastProperties() &&
2837 !current->IsJSGlobalObject() &&
2838 !current->IsJSGlobalProxy()) {
2839 if (!name->IsSymbol()) {
2840 MaybeObject* lookup_result = Heap::LookupSymbol(name);
2841 if (lookup_result->IsFailure()) {
2842 set_failure(Failure::cast(lookup_result));
2843 return reg;
2844 } else {
2845 name = String::cast(lookup_result->ToObjectUnchecked());
2846 }
2847 }
2848 ASSERT(current->property_dictionary()->FindEntry(name) ==
2849 StringDictionary::kNotFound);
2850
2851 GenerateDictionaryNegativeLookup(masm(),
2852 miss,
2853 reg,
2854 name,
2855 scratch1,
2856 scratch2);
2857 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2858 reg = holder_reg; // from now the object is in holder_reg
2859 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
2860 } else if (Heap::InNewSpace(prototype)) {
2861 // Get the map of the current object.
2862 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2863 __ Cmp(scratch1, Handle<Map>(current->map()));
2864 // Branch on the result of the map check.
2865 __ j(not_equal, miss);
2866 // Check access rights to the global object. This has to happen
2867 // after the map check so that we know that the object is
2868 // actually a global object.
2869 if (current->IsJSGlobalProxy()) {
2870 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2871
2872 // Restore scratch register to be the map of the object.
2873 // We load the prototype from the map in the scratch register.
2874 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2875 }
2876 // The prototype is in new space; we cannot store a reference
2877 // to it in the code. Load it from the map.
2878 reg = holder_reg; // from now the object is in holder_reg
2879 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
2880
2881 } else {
2882 // Check the map of the current object.
2883 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2884 Handle<Map>(current->map()));
2885 // Branch on the result of the map check.
2886 __ j(not_equal, miss);
2887 // Check access rights to the global object. This has to happen
2888 // after the map check so that we know that the object is
2889 // actually a global object.
2890 if (current->IsJSGlobalProxy()) {
2891 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2892 }
2893 // The prototype is in old space; load it directly.
2894 reg = holder_reg; // from now the object is in holder_reg
2895 __ Move(reg, Handle<JSObject>(prototype));
2896 }
2897
2898 if (save_at_depth == depth) {
2899 __ movq(Operand(rsp, kPointerSize), reg);
2900 }
2901
2902 // Go to the next object in the prototype chain.
2903 current = prototype;
2904 }
2905
2906 // Check the holder map.
2907 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
2908 __ j(not_equal, miss);
2909
2910 // Log the check depth.
2911 LOG(IntEvent("check-maps-depth", depth + 1));
2912
2913 // Perform security check for access to the global object and return
2914 // the holder register.
2915 ASSERT(current == holder);
2916 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
2917 if (current->IsJSGlobalProxy()) {
2918 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2919 }
2920
2921 // If we've skipped any global objects, it's not enough to verify
2922 // that their maps haven't changed. We also need to check that the
2923 // property cell for the property is still empty.
2924 current = object;
2925 while (current != holder) {
2926 if (current->IsGlobalObject()) {
2927 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2928 GlobalObject::cast(current),
2929 name,
2930 scratch1,
2931 miss);
2932 if (cell->IsFailure()) {
2933 set_failure(Failure::cast(cell));
2934 return reg;
2935 }
2936 }
2937 current = JSObject::cast(current->GetPrototype());
2938 }
2939
2940 // Return the register containing the holder.
2941 return reg;
2942 }
2943
2944
2945 void StubCompiler::GenerateLoadField(JSObject* object,
2946 JSObject* holder,
2947 Register receiver,
2948 Register scratch1,
2949 Register scratch2,
2950 Register scratch3,
2951 int index,
2952 String* name,
2953 Label* miss) {
2954 // Check that the receiver isn't a smi.
2955 __ JumpIfSmi(receiver, miss);
2956
2957 // Check the prototype chain.
2958 Register reg =
2959 CheckPrototypes(object, receiver, holder,
2960 scratch1, scratch2, scratch3, name, miss);
2961
2962 // Get the value from the properties.
2963 GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
2964 __ ret(0);
2965 }
2966
2967
2968 void StubCompiler::GenerateLoadConstant(JSObject* object,
2969 JSObject* holder,
2970 Register receiver,
2971 Register scratch1,
2972 Register scratch2,
2973 Register scratch3,
2974 Object* value,
2975 String* name,
2976 Label* miss) {
2977 // Check that the receiver isn't a smi.
2978 __ JumpIfSmi(receiver, miss);
2979
2980 // Check that the maps haven't changed.
2981 Register reg =
2982 CheckPrototypes(object, receiver, holder,
2983 scratch1, scratch2, scratch3, name, miss);
2984
2985 // Return the constant value.
2986 __ Move(rax, Handle<Object>(value));
2987 __ ret(0);
2988 }
2989
2990
2991 // Specialized stub for constructing objects from functions which only have only 2984 // Specialized stub for constructing objects from functions which only have only
2992 // simple assignments of the form this.x = ...; in their body. 2985 // simple assignments of the form this.x = ...; in their body.
2993 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { 2986 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
2994 // ----------- S t a t e ------------- 2987 // ----------- S t a t e -------------
2995 // -- rax : argc 2988 // -- rax : argc
2996 // -- rdi : constructor 2989 // -- rdi : constructor
2997 // -- rsp[0] : return address 2990 // -- rsp[0] : return address
2998 // -- rsp[4] : last argument 2991 // -- rsp[4] : last argument
2999 // ----------------------------------- 2992 // -----------------------------------
3000 Label generic_stub_call; 2993 Label generic_stub_call;
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
3117 // Return the generated code. 3110 // Return the generated code.
3118 return GetCode(); 3111 return GetCode();
3119 } 3112 }
3120 3113
3121 3114
3122 #undef __ 3115 #undef __
3123 3116
3124 } } // namespace v8::internal 3117 } } // namespace v8::internal
3125 3118
3126 #endif // V8_TARGET_ARCH_X64 3119 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698