Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(498)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 6577036: [Isolates] Merge from bleeding_edge to isolates, revisions 6100-6300. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/isolates/
Patch Set: '' Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/regexp-macro-assembler-x64.cc ('k') | src/zone.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28
29 #include "v8.h" 28 #include "v8.h"
30 29
31 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
32 31
33 #include "ic-inl.h" 32 #include "ic-inl.h"
34 #include "code-stubs.h"
35 #include "codegen-inl.h" 33 #include "codegen-inl.h"
36 #include "stub-cache.h" 34 #include "stub-cache.h"
37 #include "macro-assembler.h"
38 35
39 namespace v8 { 36 namespace v8 {
40 namespace internal { 37 namespace internal {
41 38
42 //-----------------------------------------------------------------------------
43 // StubCompiler static helper functions
44
45 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
46 40
47 41
48 static void ProbeTable(Isolate* isolate, 42 static void ProbeTable(Isolate* isolate,
49 MacroAssembler* masm, 43 MacroAssembler* masm,
50 Code::Flags flags, 44 Code::Flags flags,
51 StubCache::Table table, 45 StubCache::Table table,
52 Register name, 46 Register name,
53 Register offset) { 47 Register offset) {
54 ASSERT_EQ(8, kPointerSize); 48 ASSERT_EQ(8, kPointerSize);
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
176 // Give up probing if still not found the undefined value. 170 // Give up probing if still not found the undefined value.
177 __ j(not_equal, miss_label); 171 __ j(not_equal, miss_label);
178 } 172 }
179 } 173 }
180 174
181 __ bind(&done); 175 __ bind(&done);
182 __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1); 176 __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1);
183 } 177 }
184 178
185 179
186 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
187 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
188 Code* code = NULL;
189 if (kind == Code::LOAD_IC) {
190 code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss);
191 } else {
192 code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss);
193 }
194
195 Handle<Code> ic(code);
196 __ Jump(ic, RelocInfo::CODE_TARGET);
197 }
198
199
200 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
201 int index,
202 Register prototype) {
203 // Load the global or builtins object from the current context.
204 __ movq(prototype,
205 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
206 // Load the global context from the global or builtins object.
207 __ movq(prototype,
208 FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
209 // Load the function from the global context.
210 __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
211 // Load the initial map. The global functions all have initial maps.
212 __ movq(prototype,
213 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
214 // Load the prototype from the initial map.
215 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
216 }
217
218
219 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
220 MacroAssembler* masm, int index, Register prototype, Label* miss) {
221 // Check we're still in the same context.
222 __ Move(prototype, Isolate::Current()->global());
223 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
224 prototype);
225 __ j(not_equal, miss);
226 // Get the global function with the given index.
227 JSFunction* function = JSFunction::cast(
228 Isolate::Current()->global_context()->get(index));
229 // Load its initial map. The global functions all have initial maps.
230 __ Move(prototype, Handle<Map>(function->initial_map()));
231 // Load the prototype from the initial map.
232 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
233 }
234
235
236 // Load a fast property out of a holder object (src). In-object properties
237 // are loaded directly otherwise the property is loaded from the properties
238 // fixed array.
239 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
240 Register dst, Register src,
241 JSObject* holder, int index) {
242 // Adjust for the number of properties stored in the holder.
243 index -= holder->map()->inobject_properties();
244 if (index < 0) {
245 // Get the property straight out of the holder.
246 int offset = holder->map()->instance_size() + (index * kPointerSize);
247 __ movq(dst, FieldOperand(src, offset));
248 } else {
249 // Calculate the offset into the properties array.
250 int offset = index * kPointerSize + FixedArray::kHeaderSize;
251 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
252 __ movq(dst, FieldOperand(dst, offset));
253 }
254 }
255
256
257 static void PushInterceptorArguments(MacroAssembler* masm,
258 Register receiver,
259 Register holder,
260 Register name,
261 JSObject* holder_obj) {
262 __ push(name);
263 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
264 ASSERT(!HEAP->InNewSpace(interceptor));
265 __ Move(kScratchRegister, Handle<Object>(interceptor));
266 __ push(kScratchRegister);
267 __ push(receiver);
268 __ push(holder);
269 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
270 }
271
272
273 void StubCache::GenerateProbe(MacroAssembler* masm, 180 void StubCache::GenerateProbe(MacroAssembler* masm,
274 Code::Flags flags, 181 Code::Flags flags,
275 Register receiver, 182 Register receiver,
276 Register name, 183 Register name,
277 Register scratch, 184 Register scratch,
278 Register extra, 185 Register extra,
279 Register extra2) { 186 Register extra2) {
280 Isolate* isolate = Isolate::Current(); 187 Isolate* isolate = Isolate::Current();
281 Label miss; 188 Label miss;
282 USE(extra); // The register extra is not used on the X64 platform. 189 USE(extra); // The register extra is not used on the X64 platform.
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
320 227
321 // Probe the secondary table. 228 // Probe the secondary table.
322 ProbeTable(isolate, masm, flags, kSecondary, name, scratch); 229 ProbeTable(isolate, masm, flags, kSecondary, name, scratch);
323 230
324 // Cache miss: Fall-through and let caller handle the miss by 231 // Cache miss: Fall-through and let caller handle the miss by
325 // entering the runtime system. 232 // entering the runtime system.
326 __ bind(&miss); 233 __ bind(&miss);
327 } 234 }
328 235
329 236
330 // Both name_reg and receiver_reg are preserved on jumps to miss_label, 237 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
331 // but may be destroyed if store is successful. 238 int index,
332 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 239 Register prototype) {
333 JSObject* object, 240 // Load the global or builtins object from the current context.
334 int index, 241 __ movq(prototype,
335 Map* transition, 242 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
336 Register receiver_reg, 243 // Load the global context from the global or builtins object.
337 Register name_reg, 244 __ movq(prototype,
338 Register scratch, 245 FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
339 Label* miss_label) { 246 // Load the function from the global context.
340 // Check that the object isn't a smi. 247 __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
341 __ JumpIfSmi(receiver_reg, miss_label); 248 // Load the initial map. The global functions all have initial maps.
342 249 __ movq(prototype,
343 // Check that the map of the object hasn't changed. 250 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
344 __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset), 251 // Load the prototype from the initial map.
345 Handle<Map>(object->map())); 252 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
346 __ j(not_equal, miss_label);
347
348 // Perform global security token check if needed.
349 if (object->IsJSGlobalProxy()) {
350 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
351 }
352
353 // Stub never generated for non-global objects that require access
354 // checks.
355 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
356
357 // Perform map transition for the receiver if necessary.
358 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
359 // The properties must be extended before we can store the value.
360 // We jump to a runtime call that extends the properties array.
361 __ pop(scratch); // Return address.
362 __ push(receiver_reg);
363 __ Push(Handle<Map>(transition));
364 __ push(rax);
365 __ push(scratch);
366 __ TailCallExternalReference(
367 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
368 return;
369 }
370
371 if (transition != NULL) {
372 // Update the map of the object; no write barrier updating is
373 // needed because the map is never in new space.
374 __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
375 Handle<Map>(transition));
376 }
377
378 // Adjust for the number of properties stored in the object. Even in the
379 // face of a transition we can use the old map here because the size of the
380 // object and the number of in-object properties is not going to change.
381 index -= object->map()->inobject_properties();
382
383 if (index < 0) {
384 // Set the property straight into the object.
385 int offset = object->map()->instance_size() + (index * kPointerSize);
386 __ movq(FieldOperand(receiver_reg, offset), rax);
387
388 // Update the write barrier for the array address.
389 // Pass the value being stored in the now unused name_reg.
390 __ movq(name_reg, rax);
391 __ RecordWrite(receiver_reg, offset, name_reg, scratch);
392 } else {
393 // Write to the properties array.
394 int offset = index * kPointerSize + FixedArray::kHeaderSize;
395 // Get the properties array (optimistically).
396 __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
397 __ movq(FieldOperand(scratch, offset), rax);
398
399 // Update the write barrier for the array address.
400 // Pass the value being stored in the now unused name_reg.
401 __ movq(name_reg, rax);
402 __ RecordWrite(scratch, offset, name_reg, receiver_reg);
403 }
404
405 // Return the value (register rax).
406 __ ret(0);
407 } 253 }
408 254
409 255
256 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
257 MacroAssembler* masm, int index, Register prototype, Label* miss) {
258 // Check we're still in the same context.
259 __ Move(prototype, Isolate::Current()->global());
260 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
261 prototype);
262 __ j(not_equal, miss);
263 // Get the global function with the given index.
264 JSFunction* function = JSFunction::cast(
265 Isolate::Current()->global_context()->get(index));
266 // Load its initial map. The global functions all have initial maps.
267 __ Move(prototype, Handle<Map>(function->initial_map()));
268 // Load the prototype from the initial map.
269 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
270 }
271
272
410 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 273 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
411 Register receiver, 274 Register receiver,
412 Register scratch, 275 Register scratch,
413 Label* miss_label) { 276 Label* miss_label) {
414 // Check that the receiver isn't a smi. 277 // Check that the receiver isn't a smi.
415 __ JumpIfSmi(receiver, miss_label); 278 __ JumpIfSmi(receiver, miss_label);
416 279
417 // Check that the object is a JS array. 280 // Check that the object is a JS array.
418 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); 281 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
419 __ j(not_equal, miss_label); 282 __ j(not_equal, miss_label);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
465 328
466 // Check if the wrapped value is a string and load the length 329 // Check if the wrapped value is a string and load the length
467 // directly if it is. 330 // directly if it is.
468 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); 331 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
469 GenerateStringCheck(masm, scratch2, scratch1, miss, miss); 332 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
470 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset)); 333 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
471 __ ret(0); 334 __ ret(0);
472 } 335 }
473 336
474 337
338 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
339 Register receiver,
340 Register result,
341 Register scratch,
342 Label* miss_label) {
343 __ TryGetFunctionPrototype(receiver, result, miss_label);
344 if (!result.is(rax)) __ movq(rax, result);
345 __ ret(0);
346 }
347
348
349 // Load a fast property out of a holder object (src). In-object properties
350 // are loaded directly otherwise the property is loaded from the properties
351 // fixed array.
352 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
353 Register dst, Register src,
354 JSObject* holder, int index) {
355 // Adjust for the number of properties stored in the holder.
356 index -= holder->map()->inobject_properties();
357 if (index < 0) {
358 // Get the property straight out of the holder.
359 int offset = holder->map()->instance_size() + (index * kPointerSize);
360 __ movq(dst, FieldOperand(src, offset));
361 } else {
362 // Calculate the offset into the properties array.
363 int offset = index * kPointerSize + FixedArray::kHeaderSize;
364 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
365 __ movq(dst, FieldOperand(dst, offset));
366 }
367 }
368
369
370 static void PushInterceptorArguments(MacroAssembler* masm,
371 Register receiver,
372 Register holder,
373 Register name,
374 JSObject* holder_obj) {
375 __ push(name);
376 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
377 ASSERT(!HEAP->InNewSpace(interceptor));
378 __ Move(kScratchRegister, Handle<Object>(interceptor));
379 __ push(kScratchRegister);
380 __ push(receiver);
381 __ push(holder);
382 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
383 }
384
385
475 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, 386 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
476 Register receiver, 387 Register receiver,
477 Register holder, 388 Register holder,
478 Register name, 389 Register name,
479 JSObject* holder_obj) { 390 JSObject* holder_obj) {
480 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 391 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
481 392
482 ExternalReference ref = 393 ExternalReference ref =
483 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)); 394 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
484 __ movq(rax, Immediate(5)); 395 __ movq(rax, Immediate(5));
485 __ movq(rbx, ref); 396 __ movq(rbx, ref);
486 397
487 CEntryStub stub(1); 398 CEntryStub stub(1);
488 __ CallStub(&stub); 399 __ CallStub(&stub);
489 } 400 }
490 401
491 402
492
493 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
494 Register receiver,
495 Register result,
496 Register scratch,
497 Label* miss_label) {
498 __ TryGetFunctionPrototype(receiver, result, miss_label);
499 if (!result.is(rax)) __ movq(rax, result);
500 __ ret(0);
501 }
502
503 // Number of pointers to be reserved on stack for fast API call. 403 // Number of pointers to be reserved on stack for fast API call.
504 static const int kFastApiCallArguments = 3; 404 static const int kFastApiCallArguments = 3;
505 405
406
506 // Reserves space for the extra arguments to API function in the 407 // Reserves space for the extra arguments to API function in the
507 // caller's frame. 408 // caller's frame.
508 // 409 //
509 // These arguments are set by CheckPrototypes and GenerateFastApiCall. 410 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
510 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 411 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
511 // ----------- S t a t e ------------- 412 // ----------- S t a t e -------------
512 // -- rsp[0] : return address 413 // -- rsp[0] : return address
513 // -- rsp[8] : last argument in the internal frame of the caller 414 // -- rsp[8] : last argument in the internal frame of the caller
514 // ----------------------------------- 415 // -----------------------------------
515 __ movq(scratch, Operand(rsp, 0)); 416 __ movq(scratch, Operand(rsp, 0));
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
549 // (last fast api call extra argument, 450 // (last fast api call extra argument,
550 // set by CheckPrototypes) 451 // set by CheckPrototypes)
551 // -- rsp[16] : api function 452 // -- rsp[16] : api function
552 // (first fast api call extra argument) 453 // (first fast api call extra argument)
553 // -- rsp[24] : api call data 454 // -- rsp[24] : api call data
554 // -- rsp[32] : last argument 455 // -- rsp[32] : last argument
555 // -- ... 456 // -- ...
556 // -- rsp[(argc + 3) * 8] : first argument 457 // -- rsp[(argc + 3) * 8] : first argument
557 // -- rsp[(argc + 4) * 8] : receiver 458 // -- rsp[(argc + 4) * 8] : receiver
558 // ----------------------------------- 459 // -----------------------------------
559
560 // Get the function and setup the context. 460 // Get the function and setup the context.
561 JSFunction* function = optimization.constant_function(); 461 JSFunction* function = optimization.constant_function();
562 __ Move(rdi, Handle<JSFunction>(function)); 462 __ Move(rdi, Handle<JSFunction>(function));
563 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 463 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
564 464
565 // Pass the additional arguments. 465 // Pass the additional arguments.
566 __ movq(Operand(rsp, 2 * kPointerSize), rdi); 466 __ movq(Operand(rsp, 2 * kPointerSize), rdi);
567 Object* call_data = optimization.api_call_info()->data(); 467 Object* call_data = optimization.api_call_info()->data();
568 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info()); 468 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
569 if (HEAP->InNewSpace(call_data)) { 469 if (HEAP->InNewSpace(call_data)) {
(...skipping 259 matching lines...) Expand 10 before | Expand all | Expand 10 after
829 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex); 729 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
830 __ j(not_equal, interceptor_succeeded); 730 __ j(not_equal, interceptor_succeeded);
831 } 731 }
832 732
833 StubCompiler* stub_compiler_; 733 StubCompiler* stub_compiler_;
834 const ParameterCount& arguments_; 734 const ParameterCount& arguments_;
835 Register name_; 735 Register name_;
836 }; 736 };
837 737
838 738
739 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
740 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
741 Code* code = NULL;
742 if (kind == Code::LOAD_IC) {
743 code = Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Miss);
744 } else {
745 code = Isolate::Current()->builtins()->builtin(Builtins::KeyedLoadIC_Miss);
746 }
747
748 Handle<Code> ic(code);
749 __ Jump(ic, RelocInfo::CODE_TARGET);
750 }
751
752
753 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
754 // but may be destroyed if store is successful.
755 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
756 JSObject* object,
757 int index,
758 Map* transition,
759 Register receiver_reg,
760 Register name_reg,
761 Register scratch,
762 Label* miss_label) {
763 // Check that the object isn't a smi.
764 __ JumpIfSmi(receiver_reg, miss_label);
765
766 // Check that the map of the object hasn't changed.
767 __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
768 Handle<Map>(object->map()));
769 __ j(not_equal, miss_label);
770
771 // Perform global security token check if needed.
772 if (object->IsJSGlobalProxy()) {
773 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
774 }
775
776 // Stub never generated for non-global objects that require access
777 // checks.
778 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
779
780 // Perform map transition for the receiver if necessary.
781 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
782 // The properties must be extended before we can store the value.
783 // We jump to a runtime call that extends the properties array.
784 __ pop(scratch); // Return address.
785 __ push(receiver_reg);
786 __ Push(Handle<Map>(transition));
787 __ push(rax);
788 __ push(scratch);
789 __ TailCallExternalReference(
790 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
791 return;
792 }
793
794 if (transition != NULL) {
795 // Update the map of the object; no write barrier updating is
796 // needed because the map is never in new space.
797 __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
798 Handle<Map>(transition));
799 }
800
801 // Adjust for the number of properties stored in the object. Even in the
802 // face of a transition we can use the old map here because the size of the
803 // object and the number of in-object properties is not going to change.
804 index -= object->map()->inobject_properties();
805
806 if (index < 0) {
807 // Set the property straight into the object.
808 int offset = object->map()->instance_size() + (index * kPointerSize);
809 __ movq(FieldOperand(receiver_reg, offset), rax);
810
811 // Update the write barrier for the array address.
812 // Pass the value being stored in the now unused name_reg.
813 __ movq(name_reg, rax);
814 __ RecordWrite(receiver_reg, offset, name_reg, scratch);
815 } else {
816 // Write to the properties array.
817 int offset = index * kPointerSize + FixedArray::kHeaderSize;
818 // Get the properties array (optimistically).
819 __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
820 __ movq(FieldOperand(scratch, offset), rax);
821
822 // Update the write barrier for the array address.
823 // Pass the value being stored in the now unused name_reg.
824 __ movq(name_reg, rax);
825 __ RecordWrite(scratch, offset, name_reg, receiver_reg);
826 }
827
828 // Return the value (register rax).
829 __ ret(0);
830 }
831
832
839 // Generate code to check that a global property cell is empty. Create 833 // Generate code to check that a global property cell is empty. Create
840 // the property cell at compilation time if no cell exists for the 834 // the property cell at compilation time if no cell exists for the
841 // property. 835 // property.
842 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell( 836 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
843 MacroAssembler* masm, 837 MacroAssembler* masm,
844 GlobalObject* global, 838 GlobalObject* global,
845 String* name, 839 String* name,
846 Register scratch, 840 Register scratch,
847 Label* miss) { 841 Label* miss) {
848 Object* probe; 842 Object* probe;
849 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name); 843 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
850 if (!maybe_probe->ToObject(&probe)) return maybe_probe; 844 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
851 } 845 }
852 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); 846 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
853 ASSERT(cell->value()->IsTheHole()); 847 ASSERT(cell->value()->IsTheHole());
854 __ Move(scratch, Handle<Object>(cell)); 848 __ Move(scratch, Handle<Object>(cell));
855 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), 849 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
856 FACTORY->the_hole_value()); 850 FACTORY->the_hole_value());
857 __ j(not_equal, miss); 851 __ j(not_equal, miss);
858 return cell; 852 return cell;
859 } 853 }
860 854
861 855
862 #undef __ 856 #undef __
863
864 #define __ ACCESS_MASM((masm())) 857 #define __ ACCESS_MASM((masm()))
865 858
866 859
860 Register StubCompiler::CheckPrototypes(JSObject* object,
861 Register object_reg,
862 JSObject* holder,
863 Register holder_reg,
864 Register scratch1,
865 Register scratch2,
866 String* name,
867 int save_at_depth,
868 Label* miss) {
869 // Make sure there's no overlap between holder and object registers.
870 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
871 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
872 && !scratch2.is(scratch1));
873
874 // Keep track of the current object in register reg. On the first
875 // iteration, reg is an alias for object_reg, on later iterations,
876 // it is an alias for holder_reg.
877 Register reg = object_reg;
878 int depth = 0;
879
880 if (save_at_depth == depth) {
881 __ movq(Operand(rsp, kPointerSize), object_reg);
882 }
883
884 // Check the maps in the prototype chain.
885 // Traverse the prototype chain from the object and do map checks.
886 JSObject* current = object;
887 while (current != holder) {
888 depth++;
889
890 // Only global objects and objects that do not require access
891 // checks are allowed in stubs.
892 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
893
894 JSObject* prototype = JSObject::cast(current->GetPrototype());
895 if (!current->HasFastProperties() &&
896 !current->IsJSGlobalObject() &&
897 !current->IsJSGlobalProxy()) {
898 if (!name->IsSymbol()) {
899 MaybeObject* lookup_result = HEAP->LookupSymbol(name);
900 if (lookup_result->IsFailure()) {
901 set_failure(Failure::cast(lookup_result));
902 return reg;
903 } else {
904 name = String::cast(lookup_result->ToObjectUnchecked());
905 }
906 }
907 ASSERT(current->property_dictionary()->FindEntry(name) ==
908 StringDictionary::kNotFound);
909
910 GenerateDictionaryNegativeLookup(masm(),
911 miss,
912 reg,
913 name,
914 scratch1,
915 scratch2);
916 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
917 reg = holder_reg; // from now the object is in holder_reg
918 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
919 } else if (HEAP->InNewSpace(prototype)) {
920 // Get the map of the current object.
921 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
922 __ Cmp(scratch1, Handle<Map>(current->map()));
923 // Branch on the result of the map check.
924 __ j(not_equal, miss);
925 // Check access rights to the global object. This has to happen
926 // after the map check so that we know that the object is
927 // actually a global object.
928 if (current->IsJSGlobalProxy()) {
929 __ CheckAccessGlobalProxy(reg, scratch1, miss);
930
931 // Restore scratch register to be the map of the object.
932 // We load the prototype from the map in the scratch register.
933 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
934 }
935 // The prototype is in new space; we cannot store a reference
936 // to it in the code. Load it from the map.
937 reg = holder_reg; // from now the object is in holder_reg
938 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
939
940 } else {
941 // Check the map of the current object.
942 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
943 Handle<Map>(current->map()));
944 // Branch on the result of the map check.
945 __ j(not_equal, miss);
946 // Check access rights to the global object. This has to happen
947 // after the map check so that we know that the object is
948 // actually a global object.
949 if (current->IsJSGlobalProxy()) {
950 __ CheckAccessGlobalProxy(reg, scratch1, miss);
951 }
952 // The prototype is in old space; load it directly.
953 reg = holder_reg; // from now the object is in holder_reg
954 __ Move(reg, Handle<JSObject>(prototype));
955 }
956
957 if (save_at_depth == depth) {
958 __ movq(Operand(rsp, kPointerSize), reg);
959 }
960
961 // Go to the next object in the prototype chain.
962 current = prototype;
963 }
964
965 // Check the holder map.
966 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
967 __ j(not_equal, miss);
968
969 // Log the check depth.
970 LOG(IntEvent("check-maps-depth", depth + 1));
971
972 // Perform security check for access to the global object and return
973 // the holder register.
974 ASSERT(current == holder);
975 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
976 if (current->IsJSGlobalProxy()) {
977 __ CheckAccessGlobalProxy(reg, scratch1, miss);
978 }
979
980 // If we've skipped any global objects, it's not enough to verify
981 // that their maps haven't changed. We also need to check that the
982 // property cell for the property is still empty.
983 current = object;
984 while (current != holder) {
985 if (current->IsGlobalObject()) {
986 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
987 GlobalObject::cast(current),
988 name,
989 scratch1,
990 miss);
991 if (cell->IsFailure()) {
992 set_failure(Failure::cast(cell));
993 return reg;
994 }
995 }
996 current = JSObject::cast(current->GetPrototype());
997 }
998
999 // Return the register containing the holder.
1000 return reg;
1001 }
1002
1003
1004 void StubCompiler::GenerateLoadField(JSObject* object,
1005 JSObject* holder,
1006 Register receiver,
1007 Register scratch1,
1008 Register scratch2,
1009 Register scratch3,
1010 int index,
1011 String* name,
1012 Label* miss) {
1013 // Check that the receiver isn't a smi.
1014 __ JumpIfSmi(receiver, miss);
1015
1016 // Check the prototype chain.
1017 Register reg =
1018 CheckPrototypes(object, receiver, holder,
1019 scratch1, scratch2, scratch3, name, miss);
1020
1021 // Get the value from the properties.
1022 GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1023 __ ret(0);
1024 }
1025
1026
1027 bool StubCompiler::GenerateLoadCallback(JSObject* object,
1028 JSObject* holder,
1029 Register receiver,
1030 Register name_reg,
1031 Register scratch1,
1032 Register scratch2,
1033 Register scratch3,
1034 AccessorInfo* callback,
1035 String* name,
1036 Label* miss,
1037 Failure** failure) {
1038 // Check that the receiver isn't a smi.
1039 __ JumpIfSmi(receiver, miss);
1040
1041 // Check that the maps haven't changed.
1042 Register reg =
1043 CheckPrototypes(object, receiver, holder, scratch1,
1044 scratch2, scratch3, name, miss);
1045
1046 Handle<AccessorInfo> callback_handle(callback);
1047
1048 // Insert additional parameters into the stack frame above return address.
1049 ASSERT(!scratch2.is(reg));
1050 __ pop(scratch2); // Get return address to place it below.
1051
1052 __ push(receiver); // receiver
1053 __ push(reg); // holder
1054 if (HEAP->InNewSpace(callback_handle->data())) {
1055 __ Move(scratch1, callback_handle);
1056 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1057 } else {
1058 __ Push(Handle<Object>(callback_handle->data()));
1059 }
1060 __ push(name_reg); // name
1061 // Save a pointer to where we pushed the arguments pointer.
1062 // This will be passed as the const AccessorInfo& to the C++ callback.
1063
1064 #ifdef _WIN64
1065 // Win64 uses first register--rcx--for returned value.
1066 Register accessor_info_arg = r8;
1067 Register name_arg = rdx;
1068 #else
1069 Register accessor_info_arg = rsi;
1070 Register name_arg = rdi;
1071 #endif
1072
1073 ASSERT(!name_arg.is(scratch2));
1074 __ movq(name_arg, rsp);
1075 __ push(scratch2); // Restore return address.
1076
1077 // Do call through the api.
1078 Address getter_address = v8::ToCData<Address>(callback->getter());
1079 ApiFunction fun(getter_address);
1080
1081 // 3 elements array for v8::Agruments::values_ and handler for name.
1082 const int kStackSpace = 4;
1083
1084 // Allocate v8::AccessorInfo in non-GCed stack space.
1085 const int kArgStackSpace = 1;
1086
1087 __ PrepareCallApiFunction(kArgStackSpace);
1088 __ lea(rax, Operand(name_arg, 3 * kPointerSize));
1089
1090 // v8::AccessorInfo::args_.
1091 __ movq(StackSpaceOperand(0), rax);
1092
1093 // The context register (rsi) has been saved in PrepareCallApiFunction and
1094 // could be used to pass arguments.
1095 __ lea(accessor_info_arg, StackSpaceOperand(0));
1096
1097 // Emitting a stub call may try to allocate (if the code is not
1098 // already generated). Do not allow the assembler to perform a
1099 // garbage collection but instead return the allocation failure
1100 // object.
1101 MaybeObject* result = masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1102 if (result->IsFailure()) {
1103 *failure = Failure::cast(result);
1104 return false;
1105 }
1106 return true;
1107 }
1108
1109
1110 void StubCompiler::GenerateLoadConstant(JSObject* object,
1111 JSObject* holder,
1112 Register receiver,
1113 Register scratch1,
1114 Register scratch2,
1115 Register scratch3,
1116 Object* value,
1117 String* name,
1118 Label* miss) {
1119 // Check that the receiver isn't a smi.
1120 __ JumpIfSmi(receiver, miss);
1121
1122 // Check that the maps haven't changed.
1123 Register reg =
1124 CheckPrototypes(object, receiver, holder,
1125 scratch1, scratch2, scratch3, name, miss);
1126
1127 // Return the constant value.
1128 __ Move(rax, Handle<Object>(value));
1129 __ ret(0);
1130 }
1131
1132
1133 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1134 JSObject* interceptor_holder,
1135 LookupResult* lookup,
1136 Register receiver,
1137 Register name_reg,
1138 Register scratch1,
1139 Register scratch2,
1140 Register scratch3,
1141 String* name,
1142 Label* miss) {
1143 ASSERT(interceptor_holder->HasNamedInterceptor());
1144 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1145
1146 // Check that the receiver isn't a smi.
1147 __ JumpIfSmi(receiver, miss);
1148
1149 // So far the most popular follow ups for interceptor loads are FIELD
1150 // and CALLBACKS, so inline only them, other cases may be added
1151 // later.
1152 bool compile_followup_inline = false;
1153 if (lookup->IsProperty() && lookup->IsCacheable()) {
1154 if (lookup->type() == FIELD) {
1155 compile_followup_inline = true;
1156 } else if (lookup->type() == CALLBACKS &&
1157 lookup->GetCallbackObject()->IsAccessorInfo() &&
1158 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1159 compile_followup_inline = true;
1160 }
1161 }
1162
1163 if (compile_followup_inline) {
1164 // Compile the interceptor call, followed by inline code to load the
1165 // property from further up the prototype chain if the call fails.
1166 // Check that the maps haven't changed.
1167 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1168 scratch1, scratch2, scratch3,
1169 name, miss);
1170 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1171
1172 // Save necessary data before invoking an interceptor.
1173 // Requires a frame to make GC aware of pushed pointers.
1174 __ EnterInternalFrame();
1175
1176 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1177 // CALLBACKS case needs a receiver to be passed into C++ callback.
1178 __ push(receiver);
1179 }
1180 __ push(holder_reg);
1181 __ push(name_reg);
1182
1183 // Invoke an interceptor. Note: map checks from receiver to
1184 // interceptor's holder has been compiled before (see a caller
1185 // of this method.)
1186 CompileCallLoadPropertyWithInterceptor(masm(),
1187 receiver,
1188 holder_reg,
1189 name_reg,
1190 interceptor_holder);
1191
1192 // Check if interceptor provided a value for property. If it's
1193 // the case, return immediately.
1194 Label interceptor_failed;
1195 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1196 __ j(equal, &interceptor_failed);
1197 __ LeaveInternalFrame();
1198 __ ret(0);
1199
1200 __ bind(&interceptor_failed);
1201 __ pop(name_reg);
1202 __ pop(holder_reg);
1203 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1204 __ pop(receiver);
1205 }
1206
1207 __ LeaveInternalFrame();
1208
1209 // Check that the maps from interceptor's holder to lookup's holder
1210 // haven't changed. And load lookup's holder into |holder| register.
1211 if (interceptor_holder != lookup->holder()) {
1212 holder_reg = CheckPrototypes(interceptor_holder,
1213 holder_reg,
1214 lookup->holder(),
1215 scratch1,
1216 scratch2,
1217 scratch3,
1218 name,
1219 miss);
1220 }
1221
1222 if (lookup->type() == FIELD) {
1223 // We found FIELD property in prototype chain of interceptor's holder.
1224 // Retrieve a field from field's holder.
1225 GenerateFastPropertyLoad(masm(), rax, holder_reg,
1226 lookup->holder(), lookup->GetFieldIndex());
1227 __ ret(0);
1228 } else {
1229 // We found CALLBACKS property in prototype chain of interceptor's
1230 // holder.
1231 ASSERT(lookup->type() == CALLBACKS);
1232 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1233 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1234 ASSERT(callback != NULL);
1235 ASSERT(callback->getter() != NULL);
1236
1237 // Tail call to runtime.
1238 // Important invariant in CALLBACKS case: the code above must be
1239 // structured to never clobber |receiver| register.
1240 __ pop(scratch2); // return address
1241 __ push(receiver);
1242 __ push(holder_reg);
1243 __ Move(holder_reg, Handle<AccessorInfo>(callback));
1244 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1245 __ push(holder_reg);
1246 __ push(name_reg);
1247 __ push(scratch2); // restore return address
1248
1249 ExternalReference ref =
1250 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
1251 __ TailCallExternalReference(ref, 5, 1);
1252 }
1253 } else { // !compile_followup_inline
1254 // Call the runtime system to load the interceptor.
1255 // Check that the maps haven't changed.
1256 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1257 scratch1, scratch2, scratch3,
1258 name, miss);
1259 __ pop(scratch2); // save old return address
1260 PushInterceptorArguments(masm(), receiver, holder_reg,
1261 name_reg, interceptor_holder);
1262 __ push(scratch2); // restore old return address
1263
1264 ExternalReference ref = ExternalReference(
1265 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
1266 __ TailCallExternalReference(ref, 5, 1);
1267 }
1268 }
1269
1270
867 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) { 1271 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
868 if (kind_ == Code::KEYED_CALL_IC) { 1272 if (kind_ == Code::KEYED_CALL_IC) {
869 __ Cmp(rcx, Handle<String>(name)); 1273 __ Cmp(rcx, Handle<String>(name));
870 __ j(not_equal, miss); 1274 __ j(not_equal, miss);
871 } 1275 }
872 } 1276 }
873 1277
874 1278
875 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object, 1279 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
876 JSObject* holder, 1280 JSObject* holder,
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
928 MaybeObject* CallStubCompiler::GenerateMissBranch() { 1332 MaybeObject* CallStubCompiler::GenerateMissBranch() {
929 MaybeObject* maybe_obj = Isolate::Current()->stub_cache()-> 1333 MaybeObject* maybe_obj = Isolate::Current()->stub_cache()->
930 ComputeCallMiss(arguments().immediate(), kind_); 1334 ComputeCallMiss(arguments().immediate(), kind_);
931 Object* obj; 1335 Object* obj;
932 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1336 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
933 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET); 1337 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
934 return obj; 1338 return obj;
935 } 1339 }
936 1340
937 1341
938 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
939 JSObject* holder,
940 JSFunction* function,
941 String* name,
942 CheckType check) {
943 // ----------- S t a t e -------------
944 // rcx : function name
945 // rsp[0] : return address
946 // rsp[8] : argument argc
947 // rsp[16] : argument argc - 1
948 // ...
949 // rsp[argc * 8] : argument 1
950 // rsp[(argc + 1) * 8] : argument 0 = receiver
951 // -----------------------------------
952
953 SharedFunctionInfo* function_info = function->shared();
954 if (function_info->HasBuiltinFunctionId()) {
955 BuiltinFunctionId id = function_info->builtin_function_id();
956 MaybeObject* maybe_result = CompileCustomCall(
957 id, object, holder, NULL, function, name);
958 Object* result;
959 if (!maybe_result->ToObject(&result)) return maybe_result;
960 // undefined means bail out to regular compiler.
961 if (!result->IsUndefined()) return result;
962 }
963
964 Label miss_in_smi_check;
965
966 GenerateNameCheck(name, &miss_in_smi_check);
967
968 // Get the receiver from the stack.
969 const int argc = arguments().immediate();
970 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
971
972 // Check that the receiver isn't a smi.
973 if (check != NUMBER_CHECK) {
974 __ JumpIfSmi(rdx, &miss_in_smi_check);
975 }
976
977 // Make sure that it's okay not to patch the on stack receiver
978 // unless we're doing a receiver map check.
979 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
980
981 CallOptimization optimization(function);
982 int depth = kInvalidProtoDepth;
983 Label miss;
984
985 switch (check) {
986 case RECEIVER_MAP_CHECK:
987 __ IncrementCounter(COUNTERS->call_const(), 1);
988
989 if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
990 depth = optimization.GetPrototypeDepthOfExpectedType(
991 JSObject::cast(object), holder);
992 }
993
994 if (depth != kInvalidProtoDepth) {
995 __ IncrementCounter(COUNTERS->call_const_fast_api(), 1);
996 // Allocate space for v8::Arguments implicit values. Must be initialized
997 // before to call any runtime function.
998 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
999 }
1000
1001 // Check that the maps haven't changed.
1002 CheckPrototypes(JSObject::cast(object), rdx, holder,
1003 rbx, rax, rdi, name, depth, &miss);
1004
1005 // Patch the receiver on the stack with the global proxy if
1006 // necessary.
1007 if (object->IsGlobalObject()) {
1008 ASSERT(depth == kInvalidProtoDepth);
1009 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1010 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1011 }
1012 break;
1013
1014 case STRING_CHECK:
1015 if (!function->IsBuiltin()) {
1016 // Calling non-builtins with a value as receiver requires boxing.
1017 __ jmp(&miss);
1018 } else {
1019 // Check that the object is a two-byte string or a symbol.
1020 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
1021 __ j(above_equal, &miss);
1022 // Check that the maps starting from the prototype haven't changed.
1023 GenerateDirectLoadGlobalFunctionPrototype(
1024 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
1025 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1026 rbx, rdx, rdi, name, &miss);
1027 }
1028 break;
1029
1030 case NUMBER_CHECK: {
1031 if (!function->IsBuiltin()) {
1032 // Calling non-builtins with a value as receiver requires boxing.
1033 __ jmp(&miss);
1034 } else {
1035 Label fast;
1036 // Check that the object is a smi or a heap number.
1037 __ JumpIfSmi(rdx, &fast);
1038 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
1039 __ j(not_equal, &miss);
1040 __ bind(&fast);
1041 // Check that the maps starting from the prototype haven't changed.
1042 GenerateDirectLoadGlobalFunctionPrototype(
1043 masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
1044 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1045 rbx, rdx, rdi, name, &miss);
1046 }
1047 break;
1048 }
1049
1050 case BOOLEAN_CHECK: {
1051 if (!function->IsBuiltin()) {
1052 // Calling non-builtins with a value as receiver requires boxing.
1053 __ jmp(&miss);
1054 } else {
1055 Label fast;
1056 // Check that the object is a boolean.
1057 __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
1058 __ j(equal, &fast);
1059 __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
1060 __ j(not_equal, &miss);
1061 __ bind(&fast);
1062 // Check that the maps starting from the prototype haven't changed.
1063 GenerateDirectLoadGlobalFunctionPrototype(
1064 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
1065 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1066 rbx, rdx, rdi, name, &miss);
1067 }
1068 break;
1069 }
1070
1071 default:
1072 UNREACHABLE();
1073 }
1074
1075 if (depth != kInvalidProtoDepth) {
1076 Failure* failure;
1077 // Move the return address on top of the stack.
1078 __ movq(rax, Operand(rsp, 3 * kPointerSize));
1079 __ movq(Operand(rsp, 0 * kPointerSize), rax);
1080
1081 // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
1082 // duplicate of return address and will be overwritten.
1083 bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
1084 if (!success) {
1085 return failure;
1086 }
1087 } else {
1088 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1089 }
1090
1091 // Handle call cache miss.
1092 __ bind(&miss);
1093 if (depth != kInvalidProtoDepth) {
1094 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
1095 }
1096
1097 // Handle call cache miss.
1098 __ bind(&miss_in_smi_check);
1099 Object* obj;
1100 { MaybeObject* maybe_obj = GenerateMissBranch();
1101 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1102 }
1103
1104 // Return the generated code.
1105 return GetCode(function);
1106 }
1107
1108
1109 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object, 1342 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1110 JSObject* holder, 1343 JSObject* holder,
1111 int index, 1344 int index,
1112 String* name) { 1345 String* name) {
1113 // ----------- S t a t e ------------- 1346 // ----------- S t a t e -------------
1114 // rcx : function name 1347 // rcx : function name
1115 // rsp[0] : return address 1348 // rsp[0] : return address
1116 // rsp[8] : argument argc 1349 // rsp[8] : argument argc
1117 // rsp[16] : argument argc - 1 1350 // rsp[16] : argument argc - 1
1118 // ... 1351 // ...
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after
1244 1477
1245 __ JumpIfNotSmi(rcx, &with_write_barrier); 1478 __ JumpIfNotSmi(rcx, &with_write_barrier);
1246 1479
1247 __ bind(&exit); 1480 __ bind(&exit);
1248 __ ret((argc + 1) * kPointerSize); 1481 __ ret((argc + 1) * kPointerSize);
1249 1482
1250 __ bind(&with_write_barrier); 1483 __ bind(&with_write_barrier);
1251 1484
1252 __ InNewSpace(rbx, rcx, equal, &exit); 1485 __ InNewSpace(rbx, rcx, equal, &exit);
1253 1486
1254 RecordWriteStub stub(rbx, rdx, rcx); 1487 __ RecordWriteHelper(rbx, rdx, rcx);
1255 __ CallStub(&stub);
1256 1488
1257 __ ret((argc + 1) * kPointerSize); 1489 __ ret((argc + 1) * kPointerSize);
1258 1490
1259 __ bind(&attempt_to_grow_elements); 1491 __ bind(&attempt_to_grow_elements);
1260 if (!FLAG_inline_new) { 1492 if (!FLAG_inline_new) {
1261 __ jmp(&call_builtin); 1493 __ jmp(&call_builtin);
1262 } 1494 }
1263 1495
1264 ExternalReference new_space_allocation_top = 1496 ExternalReference new_space_allocation_top =
1265 ExternalReference::new_space_allocation_top_address(); 1497 ExternalReference::new_space_allocation_top_address();
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
1404 Object* obj; 1636 Object* obj;
1405 { MaybeObject* maybe_obj = GenerateMissBranch(); 1637 { MaybeObject* maybe_obj = GenerateMissBranch();
1406 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1638 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1407 } 1639 }
1408 1640
1409 // Return the generated code. 1641 // Return the generated code.
1410 return GetCode(function); 1642 return GetCode(function);
1411 } 1643 }
1412 1644
1413 1645
1414 MaybeObject* CallStubCompiler::CompileStringCharAtCall( 1646 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1415 Object* object, 1647 Object* object,
1416 JSObject* holder, 1648 JSObject* holder,
1417 JSGlobalPropertyCell* cell, 1649 JSGlobalPropertyCell* cell,
1418 JSFunction* function, 1650 JSFunction* function,
1419 String* name) { 1651 String* name) {
1420 // ----------- S t a t e ------------- 1652 // ----------- S t a t e -------------
1421 // -- rcx : function name 1653 // -- rcx : function name
1422 // -- rsp[0] : return address 1654 // -- rsp[0] : return address
1423 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1655 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1424 // -- ... 1656 // -- ...
(...skipping 12 matching lines...) Expand all
1437 1669
1438 // Check that the maps starting from the prototype haven't changed. 1670 // Check that the maps starting from the prototype haven't changed.
1439 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1671 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1440 Context::STRING_FUNCTION_INDEX, 1672 Context::STRING_FUNCTION_INDEX,
1441 rax, 1673 rax,
1442 &miss); 1674 &miss);
1443 ASSERT(object != holder); 1675 ASSERT(object != holder);
1444 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder, 1676 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1445 rbx, rdx, rdi, name, &miss); 1677 rbx, rdx, rdi, name, &miss);
1446 1678
1447 Register receiver = rax; 1679 Register receiver = rbx;
1448 Register index = rdi; 1680 Register index = rdi;
1449 Register scratch1 = rbx; 1681 Register scratch = rdx;
1450 Register scratch2 = rdx;
1451 Register result = rax; 1682 Register result = rax;
1452 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); 1683 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1453 if (argc > 0) { 1684 if (argc > 0) {
1454 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); 1685 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1455 } else { 1686 } else {
1456 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 1687 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1457 } 1688 }
1458 1689
1459 StringCharAtGenerator char_at_generator(receiver, 1690 StringCharCodeAtGenerator char_code_at_generator(receiver,
1460 index, 1691 index,
1461 scratch1, 1692 scratch,
1462 scratch2, 1693 result,
1463 result, 1694 &miss, // When not a string.
1464 &miss, // When not a string. 1695 &miss, // When not a number.
1465 &miss, // When not a number. 1696 &index_out_of_range,
1466 &index_out_of_range, 1697 STRING_INDEX_IS_NUMBER);
1467 STRING_INDEX_IS_NUMBER); 1698 char_code_at_generator.GenerateFast(masm());
1468 char_at_generator.GenerateFast(masm());
1469 __ ret((argc + 1) * kPointerSize); 1699 __ ret((argc + 1) * kPointerSize);
1470 1700
1471 StubRuntimeCallHelper call_helper; 1701 StubRuntimeCallHelper call_helper;
1472 char_at_generator.GenerateSlow(masm(), call_helper); 1702 char_code_at_generator.GenerateSlow(masm(), call_helper);
1473 1703
1474 __ bind(&index_out_of_range); 1704 __ bind(&index_out_of_range);
1475 __ LoadRoot(rax, Heap::kEmptyStringRootIndex); 1705 __ LoadRoot(rax, Heap::kNanValueRootIndex);
1476 __ ret((argc + 1) * kPointerSize); 1706 __ ret((argc + 1) * kPointerSize);
1477 1707
1478 __ bind(&miss); 1708 __ bind(&miss);
1479 Object* obj; 1709 Object* obj;
1480 { MaybeObject* maybe_obj = GenerateMissBranch(); 1710 { MaybeObject* maybe_obj = GenerateMissBranch();
1481 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1711 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1482 } 1712 }
1483 1713
1484 // Return the generated code. 1714 // Return the generated code.
1485 return GetCode(function); 1715 return GetCode(function);
1486 } 1716 }
1487 1717
1488 1718
1489 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( 1719 MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1490 Object* object, 1720 Object* object,
1491 JSObject* holder, 1721 JSObject* holder,
1492 JSGlobalPropertyCell* cell, 1722 JSGlobalPropertyCell* cell,
1493 JSFunction* function, 1723 JSFunction* function,
1494 String* name) { 1724 String* name) {
1495 // ----------- S t a t e ------------- 1725 // ----------- S t a t e -------------
1496 // -- rcx : function name 1726 // -- rcx : function name
1497 // -- rsp[0] : return address 1727 // -- rsp[0] : return address
1498 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1728 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1499 // -- ... 1729 // -- ...
1500 // -- rsp[(argc + 1) * 8] : receiver 1730 // -- rsp[(argc + 1) * 8] : receiver
1501 // ----------------------------------- 1731 // -----------------------------------
1502 1732
1503 // If object is not a string, bail out to regular call. 1733 // If object is not a string, bail out to regular call.
1504 if (!object->IsString() || cell != NULL) return HEAP->undefined_value(); 1734 if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
1505 1735
1506 const int argc = arguments().immediate(); 1736 const int argc = arguments().immediate();
1507 1737
1508 Label miss; 1738 Label miss;
1509 Label index_out_of_range; 1739 Label index_out_of_range;
1740
1510 GenerateNameCheck(name, &miss); 1741 GenerateNameCheck(name, &miss);
1511 1742
1512 // Check that the maps starting from the prototype haven't changed. 1743 // Check that the maps starting from the prototype haven't changed.
1513 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1744 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1514 Context::STRING_FUNCTION_INDEX, 1745 Context::STRING_FUNCTION_INDEX,
1515 rax, 1746 rax,
1516 &miss); 1747 &miss);
1517 ASSERT(object != holder); 1748 ASSERT(object != holder);
1518 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder, 1749 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1519 rbx, rdx, rdi, name, &miss); 1750 rbx, rdx, rdi, name, &miss);
1520 1751
1521 Register receiver = rbx; 1752 Register receiver = rax;
1522 Register index = rdi; 1753 Register index = rdi;
1523 Register scratch = rdx; 1754 Register scratch1 = rbx;
1755 Register scratch2 = rdx;
1524 Register result = rax; 1756 Register result = rax;
1525 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); 1757 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1526 if (argc > 0) { 1758 if (argc > 0) {
1527 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); 1759 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1528 } else { 1760 } else {
1529 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 1761 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1530 } 1762 }
1531 1763
1532 StringCharCodeAtGenerator char_code_at_generator(receiver, 1764 StringCharAtGenerator char_at_generator(receiver,
1533 index, 1765 index,
1534 scratch, 1766 scratch1,
1535 result, 1767 scratch2,
1536 &miss, // When not a string. 1768 result,
1537 &miss, // When not a number. 1769 &miss, // When not a string.
1538 &index_out_of_range, 1770 &miss, // When not a number.
1539 STRING_INDEX_IS_NUMBER); 1771 &index_out_of_range,
1540 char_code_at_generator.GenerateFast(masm()); 1772 STRING_INDEX_IS_NUMBER);
1773 char_at_generator.GenerateFast(masm());
1541 __ ret((argc + 1) * kPointerSize); 1774 __ ret((argc + 1) * kPointerSize);
1542 1775
1543 StubRuntimeCallHelper call_helper; 1776 StubRuntimeCallHelper call_helper;
1544 char_code_at_generator.GenerateSlow(masm(), call_helper); 1777 char_at_generator.GenerateSlow(masm(), call_helper);
1545 1778
1546 __ bind(&index_out_of_range); 1779 __ bind(&index_out_of_range);
1547 __ LoadRoot(rax, Heap::kNanValueRootIndex); 1780 __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
1548 __ ret((argc + 1) * kPointerSize); 1781 __ ret((argc + 1) * kPointerSize);
1549 1782
1550 __ bind(&miss); 1783 __ bind(&miss);
1551 Object* obj; 1784 Object* obj;
1552 { MaybeObject* maybe_obj = GenerateMissBranch(); 1785 { MaybeObject* maybe_obj = GenerateMissBranch();
1553 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1786 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1554 } 1787 }
1555 1788
1556 // Return the generated code. 1789 // Return the generated code.
1557 return GetCode(function); 1790 return GetCode(function);
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
1737 Object* obj; 1970 Object* obj;
1738 { MaybeObject* maybe_obj = GenerateMissBranch(); 1971 { MaybeObject* maybe_obj = GenerateMissBranch();
1739 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 1972 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1740 } 1973 }
1741 1974
1742 // Return the generated code. 1975 // Return the generated code.
1743 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name); 1976 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1744 } 1977 }
1745 1978
1746 1979
1980 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
1981 JSObject* holder,
1982 JSFunction* function,
1983 String* name,
1984 CheckType check) {
1985 // ----------- S t a t e -------------
1986 // rcx : function name
1987 // rsp[0] : return address
1988 // rsp[8] : argument argc
1989 // rsp[16] : argument argc - 1
1990 // ...
1991 // rsp[argc * 8] : argument 1
1992 // rsp[(argc + 1) * 8] : argument 0 = receiver
1993 // -----------------------------------
1994
1995 SharedFunctionInfo* function_info = function->shared();
1996 if (function_info->HasBuiltinFunctionId()) {
1997 BuiltinFunctionId id = function_info->builtin_function_id();
1998 MaybeObject* maybe_result = CompileCustomCall(
1999 id, object, holder, NULL, function, name);
2000 Object* result;
2001 if (!maybe_result->ToObject(&result)) return maybe_result;
2002 // undefined means bail out to regular compiler.
2003 if (!result->IsUndefined()) return result;
2004 }
2005
2006 Label miss_in_smi_check;
2007
2008 GenerateNameCheck(name, &miss_in_smi_check);
2009
2010 // Get the receiver from the stack.
2011 const int argc = arguments().immediate();
2012 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2013
2014 // Check that the receiver isn't a smi.
2015 if (check != NUMBER_CHECK) {
2016 __ JumpIfSmi(rdx, &miss_in_smi_check);
2017 }
2018
2019 // Make sure that it's okay not to patch the on stack receiver
2020 // unless we're doing a receiver map check.
2021 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2022
2023 CallOptimization optimization(function);
2024 int depth = kInvalidProtoDepth;
2025 Label miss;
2026
2027 switch (check) {
2028 case RECEIVER_MAP_CHECK:
2029 __ IncrementCounter(COUNTERS->call_const(), 1);
2030
2031 if (optimization.is_simple_api_call() && !object->IsGlobalObject()) {
2032 depth = optimization.GetPrototypeDepthOfExpectedType(
2033 JSObject::cast(object), holder);
2034 }
2035
2036 if (depth != kInvalidProtoDepth) {
2037 __ IncrementCounter(COUNTERS->call_const_fast_api(), 1);
2038
2039 // Allocate space for v8::Arguments implicit values. Must be initialized
2040 // before to call any runtime function.
2041 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2042 }
2043
2044 // Check that the maps haven't changed.
2045 CheckPrototypes(JSObject::cast(object), rdx, holder,
2046 rbx, rax, rdi, name, depth, &miss);
2047
2048 // Patch the receiver on the stack with the global proxy if
2049 // necessary.
2050 if (object->IsGlobalObject()) {
2051 ASSERT(depth == kInvalidProtoDepth);
2052 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2053 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2054 }
2055 break;
2056
2057 case STRING_CHECK:
2058 if (!function->IsBuiltin()) {
2059 // Calling non-builtins with a value as receiver requires boxing.
2060 __ jmp(&miss);
2061 } else {
2062 // Check that the object is a two-byte string or a symbol.
2063 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2064 __ j(above_equal, &miss);
2065 // Check that the maps starting from the prototype haven't changed.
2066 GenerateDirectLoadGlobalFunctionPrototype(
2067 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2068 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2069 rbx, rdx, rdi, name, &miss);
2070 }
2071 break;
2072
2073 case NUMBER_CHECK: {
2074 if (!function->IsBuiltin()) {
2075 // Calling non-builtins with a value as receiver requires boxing.
2076 __ jmp(&miss);
2077 } else {
2078 Label fast;
2079 // Check that the object is a smi or a heap number.
2080 __ JumpIfSmi(rdx, &fast);
2081 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2082 __ j(not_equal, &miss);
2083 __ bind(&fast);
2084 // Check that the maps starting from the prototype haven't changed.
2085 GenerateDirectLoadGlobalFunctionPrototype(
2086 masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2087 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2088 rbx, rdx, rdi, name, &miss);
2089 }
2090 break;
2091 }
2092
2093 case BOOLEAN_CHECK: {
2094 if (!function->IsBuiltin()) {
2095 // Calling non-builtins with a value as receiver requires boxing.
2096 __ jmp(&miss);
2097 } else {
2098 Label fast;
2099 // Check that the object is a boolean.
2100 __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2101 __ j(equal, &fast);
2102 __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2103 __ j(not_equal, &miss);
2104 __ bind(&fast);
2105 // Check that the maps starting from the prototype haven't changed.
2106 GenerateDirectLoadGlobalFunctionPrototype(
2107 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2108 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2109 rbx, rdx, rdi, name, &miss);
2110 }
2111 break;
2112 }
2113
2114 default:
2115 UNREACHABLE();
2116 }
2117
2118 if (depth != kInvalidProtoDepth) {
2119 Failure* failure;
2120 // Move the return address on top of the stack.
2121 __ movq(rax, Operand(rsp, 3 * kPointerSize));
2122 __ movq(Operand(rsp, 0 * kPointerSize), rax);
2123
2124 // rsp[2 * kPointerSize] is uninitialized, rsp[3 * kPointerSize] contains
2125 // duplicate of return address and will be overwritten.
2126 bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
2127 if (!success) {
2128 return failure;
2129 }
2130 } else {
2131 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2132 }
2133
2134 // Handle call cache miss.
2135 __ bind(&miss);
2136 if (depth != kInvalidProtoDepth) {
2137 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2138 }
2139
2140 // Handle call cache miss.
2141 __ bind(&miss_in_smi_check);
2142 Object* obj;
2143 { MaybeObject* maybe_obj = GenerateMissBranch();
2144 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
2145 }
2146
2147 // Return the generated code.
2148 return GetCode(function);
2149 }
2150
2151
1747 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object, 2152 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
1748 JSObject* holder, 2153 JSObject* holder,
1749 String* name) { 2154 String* name) {
1750 // ----------- S t a t e ------------- 2155 // ----------- S t a t e -------------
1751 // rcx : function name 2156 // rcx : function name
1752 // rsp[0] : return address 2157 // rsp[0] : return address
1753 // rsp[8] : argument argc 2158 // rsp[8] : argument argc
1754 // rsp[16] : argument argc - 1 2159 // rsp[16] : argument argc - 1
1755 // ... 2160 // ...
1756 // rsp[argc * 8] : argument 1 2161 // rsp[argc * 8] : argument 1
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
1877 Object* obj; 2282 Object* obj;
1878 { MaybeObject* maybe_obj = GenerateMissBranch(); 2283 { MaybeObject* maybe_obj = GenerateMissBranch();
1879 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 2284 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1880 } 2285 }
1881 2286
1882 // Return the generated code. 2287 // Return the generated code.
1883 return GetCode(NORMAL, name); 2288 return GetCode(NORMAL, name);
1884 } 2289 }
1885 2290
1886 2291
1887 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name, 2292 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
1888 JSObject* object, 2293 int index,
1889 JSObject* holder, 2294 Map* transition,
1890 AccessorInfo* callback) { 2295 String* name) {
1891 // ----------- S t a t e ------------- 2296 // ----------- S t a t e -------------
1892 // -- rax : receiver 2297 // -- rax : value
1893 // -- rcx : name 2298 // -- rcx : name
1894 // -- rsp[0] : return address 2299 // -- rdx : receiver
1895 // ----------------------------------- 2300 // -- rsp[0] : return address
1896 Label miss; 2301 // -----------------------------------
1897 2302 Label miss;
1898 Failure* failure = Failure::InternalError(); 2303
1899 bool success = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi, 2304 // Generate store field code. Preserves receiver and name on jump to miss.
1900 callback, name, &miss, &failure); 2305 GenerateStoreField(masm(),
1901 if (!success) { 2306 object,
1902 miss.Unuse(); 2307 index,
1903 return failure; 2308 transition,
2309 rdx, rcx, rbx,
2310 &miss);
2311
2312 // Handle store cache miss.
2313 __ bind(&miss);
2314 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2315 Builtins::StoreIC_Miss));
2316 __ Jump(ic, RelocInfo::CODE_TARGET);
2317
2318 // Return the generated code.
2319 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2320 }
2321
2322
2323 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2324 AccessorInfo* callback,
2325 String* name) {
2326 // ----------- S t a t e -------------
2327 // -- rax : value
2328 // -- rcx : name
2329 // -- rdx : receiver
2330 // -- rsp[0] : return address
2331 // -----------------------------------
2332 Label miss;
2333
2334 // Check that the object isn't a smi.
2335 __ JumpIfSmi(rdx, &miss);
2336
2337 // Check that the map of the object hasn't changed.
2338 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2339 Handle<Map>(object->map()));
2340 __ j(not_equal, &miss);
2341
2342 // Perform global security token check if needed.
2343 if (object->IsJSGlobalProxy()) {
2344 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
1904 } 2345 }
1905 2346
1906 __ bind(&miss); 2347 // Stub never generated for non-global objects that require access
1907 GenerateLoadMiss(masm(), Code::LOAD_IC); 2348 // checks.
2349 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2350
2351 __ pop(rbx); // remove the return address
2352 __ push(rdx); // receiver
2353 __ Push(Handle<AccessorInfo>(callback)); // callback info
2354 __ push(rcx); // name
2355 __ push(rax); // value
2356 __ push(rbx); // restore return address
2357
2358 // Do tail-call to the runtime system.
2359 ExternalReference store_callback_property =
2360 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2361 __ TailCallExternalReference(store_callback_property, 4, 1);
2362
2363 // Handle store cache miss.
2364 __ bind(&miss);
2365 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2366 Builtins::StoreIC_Miss));
2367 __ Jump(ic, RelocInfo::CODE_TARGET);
1908 2368
1909 // Return the generated code. 2369 // Return the generated code.
1910 return GetCode(CALLBACKS, name); 2370 return GetCode(CALLBACKS, name);
1911 } 2371 }
1912 2372
1913 2373
1914 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object, 2374 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
1915 JSObject* holder, 2375 String* name) {
1916 Object* value, 2376 // ----------- S t a t e -------------
2377 // -- rax : value
2378 // -- rcx : name
2379 // -- rdx : receiver
2380 // -- rsp[0] : return address
2381 // -----------------------------------
2382 Label miss;
2383
2384 // Check that the object isn't a smi.
2385 __ JumpIfSmi(rdx, &miss);
2386
2387 // Check that the map of the object hasn't changed.
2388 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2389 Handle<Map>(receiver->map()));
2390 __ j(not_equal, &miss);
2391
2392 // Perform global security token check if needed.
2393 if (receiver->IsJSGlobalProxy()) {
2394 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2395 }
2396
2397 // Stub never generated for non-global objects that require access
2398 // checks.
2399 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2400
2401 __ pop(rbx); // remove the return address
2402 __ push(rdx); // receiver
2403 __ push(rcx); // name
2404 __ push(rax); // value
2405 __ push(rbx); // restore return address
2406
2407 // Do tail-call to the runtime system.
2408 ExternalReference store_ic_property =
2409 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2410 __ TailCallExternalReference(store_ic_property, 3, 1);
2411
2412 // Handle store cache miss.
2413 __ bind(&miss);
2414 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2415 Builtins::StoreIC_Miss));
2416 __ Jump(ic, RelocInfo::CODE_TARGET);
2417
2418 // Return the generated code.
2419 return GetCode(INTERCEPTOR, name);
2420 }
2421
2422
2423 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2424 JSGlobalPropertyCell* cell,
1917 String* name) { 2425 String* name) {
1918 // ----------- S t a t e ------------- 2426 // ----------- S t a t e -------------
1919 // -- rax : receiver 2427 // -- rax : value
1920 // -- rcx : name 2428 // -- rcx : name
1921 // -- rsp[0] : return address 2429 // -- rdx : receiver
1922 // ----------------------------------- 2430 // -- rsp[0] : return address
1923 Label miss; 2431 // -----------------------------------
1924 2432 Label miss;
1925 GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss); 2433
1926 __ bind(&miss); 2434 // Check that the map of the global has not changed.
1927 GenerateLoadMiss(masm(), Code::LOAD_IC); 2435 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1928 2436 Handle<Map>(object->map()));
1929 // Return the generated code. 2437 __ j(not_equal, &miss);
1930 return GetCode(CONSTANT_FUNCTION, name); 2438
2439 // Store the value in the cell.
2440 __ Move(rcx, Handle<JSGlobalPropertyCell>(cell));
2441 __ movq(FieldOperand(rcx, JSGlobalPropertyCell::kValueOffset), rax);
2442
2443 // Return the value (register rax).
2444 __ IncrementCounter(COUNTERS->named_store_global_inline(), 1);
2445 __ ret(0);
2446
2447 // Handle store cache miss.
2448 __ bind(&miss);
2449 __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1);
2450 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2451 Builtins::StoreIC_Miss));
2452 __ Jump(ic, RelocInfo::CODE_TARGET);
2453
2454 // Return the generated code.
2455 return GetCode(NORMAL, name);
2456 }
2457
2458
2459 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2460 int index,
2461 Map* transition,
2462 String* name) {
2463 // ----------- S t a t e -------------
2464 // -- rax : value
2465 // -- rcx : key
2466 // -- rdx : receiver
2467 // -- rsp[0] : return address
2468 // -----------------------------------
2469 Label miss;
2470
2471 __ IncrementCounter(COUNTERS->keyed_store_field(), 1);
2472
2473 // Check that the name has not changed.
2474 __ Cmp(rcx, Handle<String>(name));
2475 __ j(not_equal, &miss);
2476
2477 // Generate store field code. Preserves receiver and name on jump to miss.
2478 GenerateStoreField(masm(),
2479 object,
2480 index,
2481 transition,
2482 rdx, rcx, rbx,
2483 &miss);
2484
2485 // Handle store cache miss.
2486 __ bind(&miss);
2487 __ DecrementCounter(COUNTERS->keyed_store_field(), 1);
2488 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2489 Builtins::KeyedStoreIC_Miss));
2490 __ Jump(ic, RelocInfo::CODE_TARGET);
2491
2492 // Return the generated code.
2493 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2494 }
2495
2496
2497 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2498 JSObject* receiver) {
2499 // ----------- S t a t e -------------
2500 // -- rax : value
2501 // -- rcx : key
2502 // -- rdx : receiver
2503 // -- rsp[0] : return address
2504 // -----------------------------------
2505 Label miss;
2506
2507 // Check that the receiver isn't a smi.
2508 __ JumpIfSmi(rdx, &miss);
2509
2510 // Check that the map matches.
2511 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2512 Handle<Map>(receiver->map()));
2513 __ j(not_equal, &miss);
2514
2515 // Check that the key is a smi.
2516 __ JumpIfNotSmi(rcx, &miss);
2517
2518 // Get the elements array and make sure it is a fast element array, not 'cow'.
2519 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2520 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2521 FACTORY->fixed_array_map());
2522 __ j(not_equal, &miss);
2523
2524 // Check that the key is within bounds.
2525 if (receiver->IsJSArray()) {
2526 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
2527 __ j(above_equal, &miss);
2528 } else {
2529 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2530 __ j(above_equal, &miss);
2531 }
2532
2533 // Do the store and update the write barrier. Make sure to preserve
2534 // the value in register eax.
2535 __ movq(rdx, rax);
2536 __ SmiToInteger32(rcx, rcx);
2537 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
2538 rax);
2539 __ RecordWrite(rdi, 0, rdx, rcx);
2540
2541 // Done.
2542 __ ret(0);
2543
2544 // Handle store cache miss.
2545 __ bind(&miss);
2546 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2547 Builtins::KeyedStoreIC_Miss));
2548 __ jmp(ic, RelocInfo::CODE_TARGET);
2549
2550 // Return the generated code.
2551 return GetCode(NORMAL, NULL);
1931 } 2552 }
1932 2553
1933 2554
1934 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name, 2555 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
1935 JSObject* object, 2556 JSObject* object,
1936 JSObject* last) { 2557 JSObject* last) {
1937 // ----------- S t a t e ------------- 2558 // ----------- S t a t e -------------
1938 // -- rax : receiver 2559 // -- rax : receiver
1939 // -- rcx : name 2560 // -- rcx : name
1940 // -- rsp[0] : return address 2561 // -- rsp[0] : return address
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
1989 2610
1990 GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss); 2611 GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss);
1991 __ bind(&miss); 2612 __ bind(&miss);
1992 GenerateLoadMiss(masm(), Code::LOAD_IC); 2613 GenerateLoadMiss(masm(), Code::LOAD_IC);
1993 2614
1994 // Return the generated code. 2615 // Return the generated code.
1995 return GetCode(FIELD, name); 2616 return GetCode(FIELD, name);
1996 } 2617 }
1997 2618
1998 2619
2620 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2621 JSObject* object,
2622 JSObject* holder,
2623 AccessorInfo* callback) {
2624 // ----------- S t a t e -------------
2625 // -- rax : receiver
2626 // -- rcx : name
2627 // -- rsp[0] : return address
2628 // -----------------------------------
2629 Label miss;
2630
2631 Failure* failure = Failure::InternalError();
2632 bool success = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi,
2633 callback, name, &miss, &failure);
2634 if (!success) {
2635 miss.Unuse();
2636 return failure;
2637 }
2638
2639 __ bind(&miss);
2640 GenerateLoadMiss(masm(), Code::LOAD_IC);
2641
2642 // Return the generated code.
2643 return GetCode(CALLBACKS, name);
2644 }
2645
2646
2647 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2648 JSObject* holder,
2649 Object* value,
2650 String* name) {
2651 // ----------- S t a t e -------------
2652 // -- rax : receiver
2653 // -- rcx : name
2654 // -- rsp[0] : return address
2655 // -----------------------------------
2656 Label miss;
2657
2658 GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
2659 __ bind(&miss);
2660 GenerateLoadMiss(masm(), Code::LOAD_IC);
2661
2662 // Return the generated code.
2663 return GetCode(CONSTANT_FUNCTION, name);
2664 }
2665
2666
1999 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2667 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2000 JSObject* holder, 2668 JSObject* holder,
2001 String* name) { 2669 String* name) {
2002 // ----------- S t a t e ------------- 2670 // ----------- S t a t e -------------
2003 // -- rax : receiver 2671 // -- rax : receiver
2004 // -- rcx : name 2672 // -- rcx : name
2005 // -- rsp[0] : return address 2673 // -- rsp[0] : return address
2006 // ----------------------------------- 2674 // -----------------------------------
2007 Label miss; 2675 Label miss;
2008 2676
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
2071 2739
2072 __ bind(&miss); 2740 __ bind(&miss);
2073 __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1); 2741 __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1);
2074 GenerateLoadMiss(masm(), Code::LOAD_IC); 2742 GenerateLoadMiss(masm(), Code::LOAD_IC);
2075 2743
2076 // Return the generated code. 2744 // Return the generated code.
2077 return GetCode(NORMAL, name); 2745 return GetCode(NORMAL, name);
2078 } 2746 }
2079 2747
2080 2748
2081 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback( 2749 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2082 String* name, 2750 JSObject* receiver,
2083 JSObject* receiver, 2751 JSObject* holder,
2084 JSObject* holder, 2752 int index) {
2085 AccessorInfo* callback) {
2086 // ----------- S t a t e ------------- 2753 // ----------- S t a t e -------------
2087 // -- rax : key 2754 // -- rax : key
2088 // -- rdx : receiver 2755 // -- rdx : receiver
2089 // -- rsp[0] : return address 2756 // -- rsp[0] : return address
2090 // ----------------------------------- 2757 // -----------------------------------
2091 Label miss; 2758 Label miss;
2092 2759
2760 __ IncrementCounter(COUNTERS->keyed_load_field(), 1);
2761
2762 // Check that the name has not changed.
2763 __ Cmp(rax, Handle<String>(name));
2764 __ j(not_equal, &miss);
2765
2766 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2767
2768 __ bind(&miss);
2769 __ DecrementCounter(COUNTERS->keyed_load_field(), 1);
2770 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2771
2772 // Return the generated code.
2773 return GetCode(FIELD, name);
2774 }
2775
2776
2777 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2778 String* name,
2779 JSObject* receiver,
2780 JSObject* holder,
2781 AccessorInfo* callback) {
2782 // ----------- S t a t e -------------
2783 // -- rax : key
2784 // -- rdx : receiver
2785 // -- rsp[0] : return address
2786 // -----------------------------------
2787 Label miss;
2788
2093 __ IncrementCounter(COUNTERS->keyed_load_callback(), 1); 2789 __ IncrementCounter(COUNTERS->keyed_load_callback(), 1);
2094 2790
2095 // Check that the name has not changed. 2791 // Check that the name has not changed.
2096 __ Cmp(rax, Handle<String>(name)); 2792 __ Cmp(rax, Handle<String>(name));
2097 __ j(not_equal, &miss); 2793 __ j(not_equal, &miss);
2098 2794
2099 Failure* failure = Failure::InternalError(); 2795 Failure* failure = Failure::InternalError();
2100 bool success = GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi, 2796 bool success = GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi,
2101 callback, name, &miss, &failure); 2797 callback, name, &miss, &failure);
2102 if (!success) { 2798 if (!success) {
2103 miss.Unuse(); 2799 miss.Unuse();
2104 return failure; 2800 return failure;
2105 } 2801 }
2106 2802
2107 __ bind(&miss); 2803 __ bind(&miss);
2804
2108 __ DecrementCounter(COUNTERS->keyed_load_callback(), 1); 2805 __ DecrementCounter(COUNTERS->keyed_load_callback(), 1);
2109 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2806 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2110 2807
2111 // Return the generated code. 2808 // Return the generated code.
2112 return GetCode(CALLBACKS, name);
2113 }
2114
2115
2116 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2117 // ----------- S t a t e -------------
2118 // -- rax : key
2119 // -- rdx : receiver
2120 // -- rsp[0] : return address
2121 // -----------------------------------
2122 Label miss;
2123
2124 __ IncrementCounter(COUNTERS->keyed_load_array_length(), 1);
2125
2126 // Check that the name has not changed.
2127 __ Cmp(rax, Handle<String>(name));
2128 __ j(not_equal, &miss);
2129
2130 GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2131 __ bind(&miss);
2132 __ DecrementCounter(COUNTERS->keyed_load_array_length(), 1);
2133 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2134
2135 // Return the generated code.
2136 return GetCode(CALLBACKS, name); 2809 return GetCode(CALLBACKS, name);
2137 } 2810 }
2138 2811
2139 2812
2140 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name, 2813 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2141 JSObject* receiver, 2814 JSObject* receiver,
2142 JSObject* holder, 2815 JSObject* holder,
2143 Object* value) { 2816 Object* value) {
2144 // ----------- S t a t e ------------- 2817 // ----------- S t a t e -------------
2145 // -- rax : key 2818 // -- rax : key
(...skipping 12 matching lines...) Expand all
2158 value, name, &miss); 2831 value, name, &miss);
2159 __ bind(&miss); 2832 __ bind(&miss);
2160 __ DecrementCounter(COUNTERS->keyed_load_constant_function(), 1); 2833 __ DecrementCounter(COUNTERS->keyed_load_constant_function(), 1);
2161 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2834 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2162 2835
2163 // Return the generated code. 2836 // Return the generated code.
2164 return GetCode(CONSTANT_FUNCTION, name); 2837 return GetCode(CONSTANT_FUNCTION, name);
2165 } 2838 }
2166 2839
2167 2840
2168 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2169 // ----------- S t a t e -------------
2170 // -- rax : key
2171 // -- rdx : receiver
2172 // -- rsp[0] : return address
2173 // -----------------------------------
2174 Label miss;
2175
2176 __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
2177
2178 // Check that the name has not changed.
2179 __ Cmp(rax, Handle<String>(name));
2180 __ j(not_equal, &miss);
2181
2182 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2183 __ bind(&miss);
2184 __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
2185 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2186
2187 // Return the generated code.
2188 return GetCode(CALLBACKS, name);
2189 }
2190
2191
2192 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, 2841 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2193 JSObject* holder, 2842 JSObject* holder,
2194 String* name) { 2843 String* name) {
2195 // ----------- S t a t e ------------- 2844 // ----------- S t a t e -------------
2196 // -- rax : key 2845 // -- rax : key
2197 // -- rdx : receiver 2846 // -- rdx : receiver
2198 // -- rsp[0] : return address 2847 // -- rsp[0] : return address
2199 // ----------------------------------- 2848 // -----------------------------------
2200 Label miss; 2849 Label miss;
2201 2850
(...skipping 17 matching lines...) Expand all
2219 &miss); 2868 &miss);
2220 __ bind(&miss); 2869 __ bind(&miss);
2221 __ DecrementCounter(COUNTERS->keyed_load_interceptor(), 1); 2870 __ DecrementCounter(COUNTERS->keyed_load_interceptor(), 1);
2222 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2871 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2223 2872
2224 // Return the generated code. 2873 // Return the generated code.
2225 return GetCode(INTERCEPTOR, name); 2874 return GetCode(INTERCEPTOR, name);
2226 } 2875 }
2227 2876
2228 2877
2229 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { 2878 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2230 // ----------- S t a t e ------------- 2879 // ----------- S t a t e -------------
2231 // -- rax : key 2880 // -- rax : key
2232 // -- rdx : receiver 2881 // -- rdx : receiver
2233 // -- rsp[0] : return address 2882 // -- rsp[0] : return address
2234 // ----------------------------------- 2883 // -----------------------------------
2235 Label miss; 2884 Label miss;
2236 2885
2237 __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1); 2886 __ IncrementCounter(COUNTERS->keyed_load_array_length(), 1);
2238 2887
2239 // Check that the name has not changed. 2888 // Check that the name has not changed.
2240 __ Cmp(rax, Handle<String>(name)); 2889 __ Cmp(rax, Handle<String>(name));
2241 __ j(not_equal, &miss); 2890 __ j(not_equal, &miss);
2242 2891
2243 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss); 2892 GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2244 __ bind(&miss); 2893 __ bind(&miss);
2245 __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1); 2894 __ DecrementCounter(COUNTERS->keyed_load_array_length(), 1);
2246 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2895 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2247 2896
2248 // Return the generated code. 2897 // Return the generated code.
2249 return GetCode(CALLBACKS, name); 2898 return GetCode(CALLBACKS, name);
2250 } 2899 }
2251 2900
2252 2901
2902 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2903 // ----------- S t a t e -------------
2904 // -- rax : key
2905 // -- rdx : receiver
2906 // -- rsp[0] : return address
2907 // -----------------------------------
2908 Label miss;
2909
2910 __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1);
2911
2912 // Check that the name has not changed.
2913 __ Cmp(rax, Handle<String>(name));
2914 __ j(not_equal, &miss);
2915
2916 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss);
2917 __ bind(&miss);
2918 __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1);
2919 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2920
2921 // Return the generated code.
2922 return GetCode(CALLBACKS, name);
2923 }
2924
2925
2926 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2927 // ----------- S t a t e -------------
2928 // -- rax : key
2929 // -- rdx : receiver
2930 // -- rsp[0] : return address
2931 // -----------------------------------
2932 Label miss;
2933
2934 __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
2935
2936 // Check that the name has not changed.
2937 __ Cmp(rax, Handle<String>(name));
2938 __ j(not_equal, &miss);
2939
2940 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2941 __ bind(&miss);
2942 __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
2943 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2944
2945 // Return the generated code.
2946 return GetCode(CALLBACKS, name);
2947 }
2948
2949
2253 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) { 2950 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2254 // ----------- S t a t e ------------- 2951 // ----------- S t a t e -------------
2255 // -- rax : key 2952 // -- rax : key
2256 // -- rdx : receiver 2953 // -- rdx : receiver
2257 // -- esp[0] : return address 2954 // -- esp[0] : return address
2258 // ----------------------------------- 2955 // -----------------------------------
2259 Label miss; 2956 Label miss;
2260 2957
2261 // Check that the receiver isn't a smi.
2262 __ JumpIfSmi(rdx, &miss);
2263
2264 // Check that the map matches.
2265 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2266 Handle<Map>(receiver->map()));
2267 __ j(not_equal, &miss);
2268
2269 // Check that the key is a smi.
2270 __ JumpIfNotSmi(rax, &miss);
2271
2272 // Get the elements array.
2273 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
2274 __ AssertFastElements(rcx);
2275
2276 // Check that the key is within bounds.
2277 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
2278 __ j(above_equal, &miss);
2279
2280 // Load the result and make sure it's not the hole.
2281 SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
2282 __ movq(rbx, FieldOperand(rcx,
2283 index.reg,
2284 index.scale,
2285 FixedArray::kHeaderSize));
2286 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2287 __ j(equal, &miss);
2288 __ movq(rax, rbx);
2289 __ ret(0);
2290
2291 __ bind(&miss);
2292 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2293
2294 // Return the generated code.
2295 return GetCode(NORMAL, NULL);
2296 }
2297
2298
2299 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2300 AccessorInfo* callback,
2301 String* name) {
2302 // ----------- S t a t e -------------
2303 // -- rax : value
2304 // -- rcx : name
2305 // -- rdx : receiver
2306 // -- rsp[0] : return address
2307 // -----------------------------------
2308 Label miss;
2309
2310 // Check that the object isn't a smi.
2311 __ JumpIfSmi(rdx, &miss);
2312
2313 // Check that the map of the object hasn't changed.
2314 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2315 Handle<Map>(object->map()));
2316 __ j(not_equal, &miss);
2317
2318 // Perform global security token check if needed.
2319 if (object->IsJSGlobalProxy()) {
2320 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2321 }
2322
2323 // Stub never generated for non-global objects that require access
2324 // checks.
2325 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2326
2327 __ pop(rbx); // remove the return address
2328 __ push(rdx); // receiver
2329 __ Push(Handle<AccessorInfo>(callback)); // callback info
2330 __ push(rcx); // name
2331 __ push(rax); // value
2332 __ push(rbx); // restore return address
2333
2334 // Do tail-call to the runtime system.
2335 ExternalReference store_callback_property =
2336 ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
2337 __ TailCallExternalReference(store_callback_property, 4, 1);
2338
2339 // Handle store cache miss.
2340 __ bind(&miss);
2341 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2342 Builtins::StoreIC_Miss));
2343 __ Jump(ic, RelocInfo::CODE_TARGET);
2344
2345 // Return the generated code.
2346 return GetCode(CALLBACKS, name);
2347 }
2348
2349
2350 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2351 int index,
2352 Map* transition,
2353 String* name) {
2354 // ----------- S t a t e -------------
2355 // -- rax : value
2356 // -- rcx : name
2357 // -- rdx : receiver
2358 // -- rsp[0] : return address
2359 // -----------------------------------
2360 Label miss;
2361
2362 // Generate store field code. Preserves receiver and name on jump to miss.
2363 GenerateStoreField(masm(),
2364 object,
2365 index,
2366 transition,
2367 rdx, rcx, rbx,
2368 &miss);
2369
2370 // Handle store cache miss.
2371 __ bind(&miss);
2372 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2373 Builtins::StoreIC_Miss));
2374 __ Jump(ic, RelocInfo::CODE_TARGET);
2375
2376 // Return the generated code.
2377 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2378 }
2379
2380
2381 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2382 String* name) {
2383 // ----------- S t a t e -------------
2384 // -- rax : value
2385 // -- rcx : name
2386 // -- rdx : receiver
2387 // -- rsp[0] : return address
2388 // -----------------------------------
2389 Label miss;
2390
2391 // Check that the object isn't a smi.
2392 __ JumpIfSmi(rdx, &miss);
2393
2394 // Check that the map of the object hasn't changed.
2395 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2396 Handle<Map>(receiver->map()));
2397 __ j(not_equal, &miss);
2398
2399 // Perform global security token check if needed.
2400 if (receiver->IsJSGlobalProxy()) {
2401 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2402 }
2403
2404 // Stub never generated for non-global objects that require access
2405 // checks.
2406 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2407
2408 __ pop(rbx); // remove the return address
2409 __ push(rdx); // receiver
2410 __ push(rcx); // name
2411 __ push(rax); // value
2412 __ push(rbx); // restore return address
2413
2414 // Do tail-call to the runtime system.
2415 ExternalReference store_ic_property =
2416 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
2417 __ TailCallExternalReference(store_ic_property, 3, 1);
2418
2419 // Handle store cache miss.
2420 __ bind(&miss);
2421 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2422 Builtins::StoreIC_Miss));
2423 __ Jump(ic, RelocInfo::CODE_TARGET);
2424
2425 // Return the generated code.
2426 return GetCode(INTERCEPTOR, name);
2427 }
2428
2429
2430 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2431 JSGlobalPropertyCell* cell,
2432 String* name) {
2433 // ----------- S t a t e -------------
2434 // -- rax : value
2435 // -- rcx : name
2436 // -- rdx : receiver
2437 // -- rsp[0] : return address
2438 // -----------------------------------
2439 Label miss;
2440
2441 // Check that the map of the global has not changed.
2442 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2443 Handle<Map>(object->map()));
2444 __ j(not_equal, &miss);
2445
2446 // Store the value in the cell.
2447 __ Move(rcx, Handle<JSGlobalPropertyCell>(cell));
2448 __ movq(FieldOperand(rcx, JSGlobalPropertyCell::kValueOffset), rax);
2449
2450 // Return the value (register rax).
2451 __ IncrementCounter(COUNTERS->named_store_global_inline(), 1);
2452 __ ret(0);
2453
2454 // Handle store cache miss.
2455 __ bind(&miss);
2456 __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1);
2457 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2458 Builtins::StoreIC_Miss));
2459 __ Jump(ic, RelocInfo::CODE_TARGET);
2460
2461 // Return the generated code.
2462 return GetCode(NORMAL, name);
2463 }
2464
2465
2466 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2467 JSObject* receiver,
2468 JSObject* holder,
2469 int index) {
2470 // ----------- S t a t e -------------
2471 // -- rax : key
2472 // -- rdx : receiver
2473 // -- rsp[0] : return address
2474 // -----------------------------------
2475 Label miss;
2476
2477 __ IncrementCounter(COUNTERS->keyed_load_field(), 1);
2478
2479 // Check that the name has not changed.
2480 __ Cmp(rax, Handle<String>(name));
2481 __ j(not_equal, &miss);
2482
2483 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2484
2485 __ bind(&miss);
2486 __ DecrementCounter(COUNTERS->keyed_load_field(), 1);
2487 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2488
2489 // Return the generated code.
2490 return GetCode(FIELD, name);
2491 }
2492
2493
2494 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2495 int index,
2496 Map* transition,
2497 String* name) {
2498 // ----------- S t a t e -------------
2499 // -- rax : value
2500 // -- rcx : key
2501 // -- rdx : receiver
2502 // -- rsp[0] : return address
2503 // -----------------------------------
2504 Label miss;
2505
2506 __ IncrementCounter(COUNTERS->keyed_store_field(), 1);
2507
2508 // Check that the name has not changed.
2509 __ Cmp(rcx, Handle<String>(name));
2510 __ j(not_equal, &miss);
2511
2512 // Generate store field code. Preserves receiver and name on jump to miss.
2513 GenerateStoreField(masm(),
2514 object,
2515 index,
2516 transition,
2517 rdx, rcx, rbx,
2518 &miss);
2519
2520 // Handle store cache miss.
2521 __ bind(&miss);
2522 __ DecrementCounter(COUNTERS->keyed_store_field(), 1);
2523 Handle<Code> ic(Isolate::Current()->builtins()->builtin(
2524 Builtins::KeyedStoreIC_Miss));
2525 __ Jump(ic, RelocInfo::CODE_TARGET);
2526
2527 // Return the generated code.
2528 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2529 }
2530
2531
2532 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2533 JSObject* receiver) {
2534 // ----------- S t a t e -------------
2535 // -- rax : value
2536 // -- rcx : key
2537 // -- rdx : receiver
2538 // -- rsp[0] : return address
2539 // -----------------------------------
2540 Label miss;
2541
2542 // Check that the receiver isn't a smi. 2958 // Check that the receiver isn't a smi.
2543 __ JumpIfSmi(rdx, &miss); 2959 __ JumpIfSmi(rdx, &miss);
2544 2960
2545 // Check that the map matches. 2961 // Check that the map matches.
2546 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 2962 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2547 Handle<Map>(receiver->map())); 2963 Handle<Map>(receiver->map()));
2548 __ j(not_equal, &miss); 2964 __ j(not_equal, &miss);
2549 2965
2550 // Check that the key is a smi. 2966 // Check that the key is a smi.
2551 __ JumpIfNotSmi(rcx, &miss); 2967 __ JumpIfNotSmi(rax, &miss);
2552 2968
2553 // Get the elements array and make sure it is a fast element array, not 'cow'. 2969 // Get the elements array.
2554 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 2970 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
2555 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), 2971 __ AssertFastElements(rcx);
2556 FACTORY->fixed_array_map());
2557 __ j(not_equal, &miss);
2558 2972
2559 // Check that the key is within bounds. 2973 // Check that the key is within bounds.
2560 if (receiver->IsJSArray()) { 2974 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
2561 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); 2975 __ j(above_equal, &miss);
2562 __ j(above_equal, &miss);
2563 } else {
2564 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2565 __ j(above_equal, &miss);
2566 }
2567 2976
2568 // Do the store and update the write barrier. Make sure to preserve 2977 // Load the result and make sure it's not the hole.
2569 // the value in register eax. 2978 SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
2570 __ movq(rdx, rax); 2979 __ movq(rbx, FieldOperand(rcx,
2571 __ SmiToInteger32(rcx, rcx); 2980 index.reg,
2572 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize), 2981 index.scale,
2573 rax); 2982 FixedArray::kHeaderSize));
2574 __ RecordWrite(rdi, 0, rdx, rcx); 2983 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2575 2984 __ j(equal, &miss);
2576 // Done. 2985 __ movq(rax, rbx);
2577 __ ret(0); 2986 __ ret(0);
2578 2987
2579 // Handle store cache miss.
2580 __ bind(&miss); 2988 __ bind(&miss);
2581 Handle<Code> ic( 2989 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2582 Isolate::Current()->builtins()->builtin(Builtins::KeyedStoreIC_Miss));
2583 __ jmp(ic, RelocInfo::CODE_TARGET);
2584 2990
2585 // Return the generated code. 2991 // Return the generated code.
2586 return GetCode(NORMAL, NULL); 2992 return GetCode(NORMAL, NULL);
2587 } 2993 }
2588 2994
2589 2995
2590 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
2591 JSObject* interceptor_holder,
2592 LookupResult* lookup,
2593 Register receiver,
2594 Register name_reg,
2595 Register scratch1,
2596 Register scratch2,
2597 Register scratch3,
2598 String* name,
2599 Label* miss) {
2600 ASSERT(interceptor_holder->HasNamedInterceptor());
2601 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
2602
2603 // Check that the receiver isn't a smi.
2604 __ JumpIfSmi(receiver, miss);
2605
2606 // So far the most popular follow ups for interceptor loads are FIELD
2607 // and CALLBACKS, so inline only them, other cases may be added
2608 // later.
2609 bool compile_followup_inline = false;
2610 if (lookup->IsProperty() && lookup->IsCacheable()) {
2611 if (lookup->type() == FIELD) {
2612 compile_followup_inline = true;
2613 } else if (lookup->type() == CALLBACKS &&
2614 lookup->GetCallbackObject()->IsAccessorInfo() &&
2615 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
2616 compile_followup_inline = true;
2617 }
2618 }
2619
2620 if (compile_followup_inline) {
2621 // Compile the interceptor call, followed by inline code to load the
2622 // property from further up the prototype chain if the call fails.
2623 // Check that the maps haven't changed.
2624 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
2625 scratch1, scratch2, scratch3,
2626 name, miss);
2627 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
2628
2629 // Save necessary data before invoking an interceptor.
2630 // Requires a frame to make GC aware of pushed pointers.
2631 __ EnterInternalFrame();
2632
2633 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
2634 // CALLBACKS case needs a receiver to be passed into C++ callback.
2635 __ push(receiver);
2636 }
2637 __ push(holder_reg);
2638 __ push(name_reg);
2639
2640 // Invoke an interceptor. Note: map checks from receiver to
2641 // interceptor's holder has been compiled before (see a caller
2642 // of this method.)
2643 CompileCallLoadPropertyWithInterceptor(masm(),
2644 receiver,
2645 holder_reg,
2646 name_reg,
2647 interceptor_holder);
2648
2649 // Check if interceptor provided a value for property. If it's
2650 // the case, return immediately.
2651 Label interceptor_failed;
2652 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
2653 __ j(equal, &interceptor_failed);
2654 __ LeaveInternalFrame();
2655 __ ret(0);
2656
2657 __ bind(&interceptor_failed);
2658 __ pop(name_reg);
2659 __ pop(holder_reg);
2660 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
2661 __ pop(receiver);
2662 }
2663
2664 __ LeaveInternalFrame();
2665
2666 // Check that the maps from interceptor's holder to lookup's holder
2667 // haven't changed. And load lookup's holder into |holder| register.
2668 if (interceptor_holder != lookup->holder()) {
2669 holder_reg = CheckPrototypes(interceptor_holder,
2670 holder_reg,
2671 lookup->holder(),
2672 scratch1,
2673 scratch2,
2674 scratch3,
2675 name,
2676 miss);
2677 }
2678
2679 if (lookup->type() == FIELD) {
2680 // We found FIELD property in prototype chain of interceptor's holder.
2681 // Retrieve a field from field's holder.
2682 GenerateFastPropertyLoad(masm(), rax, holder_reg,
2683 lookup->holder(), lookup->GetFieldIndex());
2684 __ ret(0);
2685 } else {
2686 // We found CALLBACKS property in prototype chain of interceptor's
2687 // holder.
2688 ASSERT(lookup->type() == CALLBACKS);
2689 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
2690 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
2691 ASSERT(callback != NULL);
2692 ASSERT(callback->getter() != NULL);
2693
2694 // Tail call to runtime.
2695 // Important invariant in CALLBACKS case: the code above must be
2696 // structured to never clobber |receiver| register.
2697 __ pop(scratch2); // return address
2698 __ push(receiver);
2699 __ push(holder_reg);
2700 __ Move(holder_reg, Handle<AccessorInfo>(callback));
2701 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
2702 __ push(holder_reg);
2703 __ push(name_reg);
2704 __ push(scratch2); // restore return address
2705
2706 ExternalReference ref =
2707 ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
2708 __ TailCallExternalReference(ref, 5, 1);
2709 }
2710 } else { // !compile_followup_inline
2711 // Call the runtime system to load the interceptor.
2712 // Check that the maps haven't changed.
2713 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
2714 scratch1, scratch2, scratch3,
2715 name, miss);
2716 __ pop(scratch2); // save old return address
2717 PushInterceptorArguments(masm(), receiver, holder_reg,
2718 name_reg, interceptor_holder);
2719 __ push(scratch2); // restore old return address
2720
2721 ExternalReference ref = ExternalReference(
2722 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
2723 __ TailCallExternalReference(ref, 5, 1);
2724 }
2725 }
2726
2727
2728 bool StubCompiler::GenerateLoadCallback(JSObject* object,
2729 JSObject* holder,
2730 Register receiver,
2731 Register name_reg,
2732 Register scratch1,
2733 Register scratch2,
2734 Register scratch3,
2735 AccessorInfo* callback,
2736 String* name,
2737 Label* miss,
2738 Failure** failure) {
2739 // Check that the receiver isn't a smi.
2740 __ JumpIfSmi(receiver, miss);
2741
2742 // Check that the maps haven't changed.
2743 Register reg =
2744 CheckPrototypes(object, receiver, holder, scratch1,
2745 scratch2, scratch3, name, miss);
2746
2747 Handle<AccessorInfo> callback_handle(callback);
2748
2749 // Insert additional parameters into the stack frame above return address.
2750 ASSERT(!scratch2.is(reg));
2751 __ pop(scratch2); // Get return address to place it below.
2752
2753 __ push(receiver); // receiver
2754 __ push(reg); // holder
2755 if (object->GetHeap()->InNewSpace(callback_handle->data())) {
2756 __ Move(scratch1, callback_handle);
2757 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
2758 } else {
2759 __ Push(Handle<Object>(callback_handle->data()));
2760 }
2761 __ push(name_reg); // name
2762 // Save a pointer to where we pushed the arguments pointer.
2763 // This will be passed as the const AccessorInfo& to the C++ callback.
2764
2765 #ifdef _WIN64
2766 // Win64 uses first register--rcx--for returned value.
2767 Register accessor_info_arg = r8;
2768 Register name_arg = rdx;
2769 #else
2770 Register accessor_info_arg = rsi;
2771 Register name_arg = rdi;
2772 #endif
2773
2774 ASSERT(!name_arg.is(scratch2));
2775 __ movq(name_arg, rsp);
2776 __ push(scratch2); // Restore return address.
2777
2778 // Do call through the api.
2779 Address getter_address = v8::ToCData<Address>(callback->getter());
2780 ApiFunction fun(getter_address);
2781
2782 // 3 elements array for v8::Agruments::values_ and handler for name.
2783 const int kStackSpace = 4;
2784
2785 // Allocate v8::AccessorInfo in non-GCed stack space.
2786 const int kArgStackSpace = 1;
2787
2788 __ PrepareCallApiFunction(kArgStackSpace);
2789 __ lea(rax, Operand(name_arg, 3 * kPointerSize));
2790
2791 // v8::AccessorInfo::args_.
2792 __ movq(StackSpaceOperand(0), rax);
2793
2794 // The context register (rsi) has been saved in PrepareCallApiFunction and
2795 // could be used to pass arguments.
2796 __ lea(accessor_info_arg, StackSpaceOperand(0));
2797
2798 // Emitting a stub call may try to allocate (if the code is not
2799 // already generated). Do not allow the assembler to perform a
2800 // garbage collection but instead return the allocation failure
2801 // object.
2802 MaybeObject* result = masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
2803 if (result->IsFailure()) {
2804 *failure = Failure::cast(result);
2805 return false;
2806 }
2807 return true;
2808 }
2809
2810
2811 Register StubCompiler::CheckPrototypes(JSObject* object,
2812 Register object_reg,
2813 JSObject* holder,
2814 Register holder_reg,
2815 Register scratch1,
2816 Register scratch2,
2817 String* name,
2818 int save_at_depth,
2819 Label* miss) {
2820 // Make sure there's no overlap between holder and object registers.
2821 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
2822 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
2823 && !scratch2.is(scratch1));
2824
2825 // Keep track of the current object in register reg. On the first
2826 // iteration, reg is an alias for object_reg, on later iterations,
2827 // it is an alias for holder_reg.
2828 Register reg = object_reg;
2829 int depth = 0;
2830
2831 if (save_at_depth == depth) {
2832 __ movq(Operand(rsp, kPointerSize), object_reg);
2833 }
2834
2835 // Check the maps in the prototype chain.
2836 // Traverse the prototype chain from the object and do map checks.
2837 JSObject* current = object;
2838 while (current != holder) {
2839 depth++;
2840
2841 // Only global objects and objects that do not require access
2842 // checks are allowed in stubs.
2843 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
2844
2845 JSObject* prototype = JSObject::cast(current->GetPrototype());
2846 if (!current->HasFastProperties() &&
2847 !current->IsJSGlobalObject() &&
2848 !current->IsJSGlobalProxy()) {
2849 if (!name->IsSymbol()) {
2850 MaybeObject* lookup_result = HEAP->LookupSymbol(name);
2851 if (lookup_result->IsFailure()) {
2852 set_failure(Failure::cast(lookup_result));
2853 return reg;
2854 } else {
2855 name = String::cast(lookup_result->ToObjectUnchecked());
2856 }
2857 }
2858 ASSERT(current->property_dictionary()->FindEntry(name) ==
2859 StringDictionary::kNotFound);
2860
2861 GenerateDictionaryNegativeLookup(masm(),
2862 miss,
2863 reg,
2864 name,
2865 scratch1,
2866 scratch2);
2867 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2868 reg = holder_reg; // from now the object is in holder_reg
2869 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
2870 } else if (HEAP->InNewSpace(prototype)) {
2871 // Get the map of the current object.
2872 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2873 __ Cmp(scratch1, Handle<Map>(current->map()));
2874 // Branch on the result of the map check.
2875 __ j(not_equal, miss);
2876 // Check access rights to the global object. This has to happen
2877 // after the map check so that we know that the object is
2878 // actually a global object.
2879 if (current->IsJSGlobalProxy()) {
2880 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2881
2882 // Restore scratch register to be the map of the object.
2883 // We load the prototype from the map in the scratch register.
2884 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
2885 }
2886 // The prototype is in new space; we cannot store a reference
2887 // to it in the code. Load it from the map.
2888 reg = holder_reg; // from now the object is in holder_reg
2889 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
2890
2891 } else {
2892 // Check the map of the current object.
2893 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
2894 Handle<Map>(current->map()));
2895 // Branch on the result of the map check.
2896 __ j(not_equal, miss);
2897 // Check access rights to the global object. This has to happen
2898 // after the map check so that we know that the object is
2899 // actually a global object.
2900 if (current->IsJSGlobalProxy()) {
2901 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2902 }
2903 // The prototype is in old space; load it directly.
2904 reg = holder_reg; // from now the object is in holder_reg
2905 __ Move(reg, Handle<JSObject>(prototype));
2906 }
2907
2908 if (save_at_depth == depth) {
2909 __ movq(Operand(rsp, kPointerSize), reg);
2910 }
2911
2912 // Go to the next object in the prototype chain.
2913 current = prototype;
2914 }
2915
2916 // Check the holder map.
2917 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
2918 __ j(not_equal, miss);
2919
2920 // Log the check depth.
2921 LOG(IntEvent("check-maps-depth", depth + 1));
2922
2923 // Perform security check for access to the global object and return
2924 // the holder register.
2925 ASSERT(current == holder);
2926 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
2927 if (current->IsJSGlobalProxy()) {
2928 __ CheckAccessGlobalProxy(reg, scratch1, miss);
2929 }
2930
2931 // If we've skipped any global objects, it's not enough to verify
2932 // that their maps haven't changed. We also need to check that the
2933 // property cell for the property is still empty.
2934 current = object;
2935 while (current != holder) {
2936 if (current->IsGlobalObject()) {
2937 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2938 GlobalObject::cast(current),
2939 name,
2940 scratch1,
2941 miss);
2942 if (cell->IsFailure()) {
2943 set_failure(Failure::cast(cell));
2944 return reg;
2945 }
2946 }
2947 current = JSObject::cast(current->GetPrototype());
2948 }
2949
2950 // Return the register containing the holder.
2951 return reg;
2952 }
2953
2954
2955 void StubCompiler::GenerateLoadField(JSObject* object,
2956 JSObject* holder,
2957 Register receiver,
2958 Register scratch1,
2959 Register scratch2,
2960 Register scratch3,
2961 int index,
2962 String* name,
2963 Label* miss) {
2964 // Check that the receiver isn't a smi.
2965 __ JumpIfSmi(receiver, miss);
2966
2967 // Check the prototype chain.
2968 Register reg =
2969 CheckPrototypes(object, receiver, holder,
2970 scratch1, scratch2, scratch3, name, miss);
2971
2972 // Get the value from the properties.
2973 GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
2974 __ ret(0);
2975 }
2976
2977
2978 void StubCompiler::GenerateLoadConstant(JSObject* object,
2979 JSObject* holder,
2980 Register receiver,
2981 Register scratch1,
2982 Register scratch2,
2983 Register scratch3,
2984 Object* value,
2985 String* name,
2986 Label* miss) {
2987 // Check that the receiver isn't a smi.
2988 __ JumpIfSmi(receiver, miss);
2989
2990 // Check that the maps haven't changed.
2991 Register reg =
2992 CheckPrototypes(object, receiver, holder,
2993 scratch1, scratch2, scratch3, name, miss);
2994
2995 // Return the constant value.
2996 __ Move(rax, Handle<Object>(value));
2997 __ ret(0);
2998 }
2999
3000
3001 // Specialized stub for constructing objects from functions which only have only 2996 // Specialized stub for constructing objects from functions which only have only
3002 // simple assignments of the form this.x = ...; in their body. 2997 // simple assignments of the form this.x = ...; in their body.
3003 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) { 2998 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3004 // ----------- S t a t e ------------- 2999 // ----------- S t a t e -------------
3005 // -- rax : argc 3000 // -- rax : argc
3006 // -- rdi : constructor 3001 // -- rdi : constructor
3007 // -- rsp[0] : return address 3002 // -- rsp[0] : return address
3008 // -- rsp[4] : last argument 3003 // -- rsp[4] : last argument
3009 // ----------------------------------- 3004 // -----------------------------------
3010 Label generic_stub_call; 3005 Label generic_stub_call;
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
3128 // Return the generated code. 3123 // Return the generated code.
3129 return GetCode(); 3124 return GetCode();
3130 } 3125 }
3131 3126
3132 3127
3133 #undef __ 3128 #undef __
3134 3129
3135 } } // namespace v8::internal 3130 } } // namespace v8::internal
3136 3131
3137 #endif // V8_TARGET_ARCH_X64 3132 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/regexp-macro-assembler-x64.cc ('k') | src/zone.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698