Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(281)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 18014003: Add X32 port into V8 (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 19 matching lines...) Expand all
30 #if V8_TARGET_ARCH_X64 30 #if V8_TARGET_ARCH_X64
31 31
32 #include "ic-inl.h" 32 #include "ic-inl.h"
33 #include "codegen.h" 33 #include "codegen.h"
34 #include "stub-cache.h" 34 #include "stub-cache.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
40 #define __k __
41 #define __a __
40 42
41 43
42 static void ProbeTable(Isolate* isolate, 44 static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm, 45 MacroAssembler* masm,
44 Code::Flags flags, 46 Code::Flags flags,
45 StubCache::Table table, 47 StubCache::Table table,
46 Register receiver, 48 Register receiver,
47 Register name, 49 Register name,
48 // The offset is scaled by 4, based on 50 // The offset is scaled by 4, based on
49 // kHeapObjectTagSize, which is two bits 51 // kHeapObjectTagSize, which is two bits
50 Register offset) { 52 Register offset) {
51 // We need to scale up the pointer by 2 because the offset is scaled by less 53 // We need to scale up the pointer by 2 because the offset is scaled by less
52 // than the pointer size. 54 // than the pointer size.
55 #ifndef V8_TARGET_ARCH_X32
53 ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1); 56 ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1);
54 ScaleFactor scale_factor = times_2; 57 ScaleFactor scale_factor = times_2;
58 #else
59 ASSERT(kPointerSizeLog2 == kHeapObjectTagSize);
60 ScaleFactor scale_factor = times_1;
61 #endif
55 62
56 ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry)); 63 ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
57 // The offset register holds the entry offset times four (due to masking 64 // The offset register holds the entry offset times four (due to masking
58 // and shifting optimizations). 65 // and shifting optimizations).
59 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 66 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
60 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); 67 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
61 Label miss; 68 Label miss;
62 69
63 // Multiply by 3 because there are 3 fields per entry (name, code, map). 70 // Multiply by 3 because there are 3 fields per entry (name, code, map).
64 __ lea(offset, Operand(offset, offset, times_2, 0)); 71 __ lea(offset, Operand(offset, offset, times_2, 0));
(...skipping 338 matching lines...) Expand 10 before | Expand all | Expand 10 after
403 410
404 // Reserves space for the extra arguments to API function in the 411 // Reserves space for the extra arguments to API function in the
405 // caller's frame. 412 // caller's frame.
406 // 413 //
407 // These arguments are set by CheckPrototypes and GenerateFastApiCall. 414 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
408 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 415 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
409 // ----------- S t a t e ------------- 416 // ----------- S t a t e -------------
410 // -- rsp[0] : return address 417 // -- rsp[0] : return address
411 // -- rsp[8] : last argument in the internal frame of the caller 418 // -- rsp[8] : last argument in the internal frame of the caller
412 // ----------------------------------- 419 // -----------------------------------
413 __ movq(scratch, Operand(rsp, 0)); 420 __k movq(scratch, Operand(rsp, 0));
414 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); 421 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
415 __ movq(Operand(rsp, 0), scratch); 422 __k movq(Operand(rsp, 0), scratch);
416 __ Move(scratch, Smi::FromInt(0)); 423 __ Move(scratch, Smi::FromInt(0));
417 for (int i = 1; i <= kFastApiCallArguments; i++) { 424 for (int i = 1; i <= kFastApiCallArguments; i++) {
418 __ movq(Operand(rsp, i * kPointerSize), scratch); 425 __a movq(Operand(rsp, i * kPointerSize), scratch);
419 } 426 }
420 } 427 }
421 428
422 429
423 // Undoes the effects of ReserveSpaceForFastApiCall. 430 // Undoes the effects of ReserveSpaceForFastApiCall.
424 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 431 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
425 // ----------- S t a t e ------------- 432 // ----------- S t a t e -------------
426 // -- rsp[0] : return address. 433 // -- rsp[0] : return address.
427 // -- rsp[8] : last fast api call extra argument. 434 // -- rsp[8] : last fast api call extra argument.
428 // -- ... 435 // -- ...
429 // -- rsp[kFastApiCallArguments * 8] : first fast api call extra 436 // -- rsp[kFastApiCallArguments * 8] : first fast api call extra
430 // argument. 437 // argument.
431 // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal 438 // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
432 // frame. 439 // frame.
433 // ----------------------------------- 440 // -----------------------------------
434 __ movq(scratch, Operand(rsp, 0)); 441 __k movq(scratch, Operand(rsp, 0));
435 __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch); 442 __k movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
436 __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments)); 443 __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
437 } 444 }
438 445
439 446
440 // Generates call to API function. 447 // Generates call to API function.
441 static void GenerateFastApiCall(MacroAssembler* masm, 448 static void GenerateFastApiCall(MacroAssembler* masm,
442 const CallOptimization& optimization, 449 const CallOptimization& optimization,
443 int argc) { 450 int argc) {
444 // ----------- S t a t e ------------- 451 // ----------- S t a t e -------------
445 // -- rsp[0] : return address 452 // -- rsp[0] : return address
(...skipping 11 matching lines...) Expand all
457 // -- ... 464 // -- ...
458 // -- rsp[(argc + 6) * 8] : first argument 465 // -- rsp[(argc + 6) * 8] : first argument
459 // -- rsp[(argc + 7) * 8] : receiver 466 // -- rsp[(argc + 7) * 8] : receiver
460 // ----------------------------------- 467 // -----------------------------------
461 // Get the function and setup the context. 468 // Get the function and setup the context.
462 Handle<JSFunction> function = optimization.constant_function(); 469 Handle<JSFunction> function = optimization.constant_function();
463 __ LoadHeapObject(rdi, function); 470 __ LoadHeapObject(rdi, function);
464 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 471 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
465 472
466 // Pass the additional arguments. 473 // Pass the additional arguments.
467 __ movq(Operand(rsp, 2 * kPointerSize), rdi); 474 __a movq(Operand(rsp, 2 * kPointerSize), rdi);
468 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 475 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
469 Handle<Object> call_data(api_call_info->data(), masm->isolate()); 476 Handle<Object> call_data(api_call_info->data(), masm->isolate());
470 if (masm->isolate()->heap()->InNewSpace(*call_data)) { 477 if (masm->isolate()->heap()->InNewSpace(*call_data)) {
471 __ Move(rcx, api_call_info); 478 __ Move(rcx, api_call_info);
472 __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset)); 479 __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
473 __ movq(Operand(rsp, 3 * kPointerSize), rbx); 480 __a movq(Operand(rsp, 3 * kPointerSize), rbx);
474 } else { 481 } else {
475 __ Move(Operand(rsp, 3 * kPointerSize), call_data); 482 __a Move(Operand(rsp, 3 * kPointerSize), call_data);
476 } 483 }
477 __ movq(kScratchRegister, 484 __ movq(kScratchRegister,
478 ExternalReference::isolate_address(masm->isolate())); 485 ExternalReference::isolate_address(masm->isolate()));
479 __ movq(Operand(rsp, 4 * kPointerSize), kScratchRegister); 486 __a movq(Operand(rsp, 4 * kPointerSize), kScratchRegister);
480 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 487 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
481 __ movq(Operand(rsp, 5 * kPointerSize), kScratchRegister); 488 __a movq(Operand(rsp, 5 * kPointerSize), kScratchRegister);
482 __ movq(Operand(rsp, 6 * kPointerSize), kScratchRegister); 489 __a movq(Operand(rsp, 6 * kPointerSize), kScratchRegister);
483 490
484 // Prepare arguments. 491 // Prepare arguments.
485 STATIC_ASSERT(kFastApiCallArguments == 6); 492 STATIC_ASSERT(kFastApiCallArguments == 6);
493 #ifndef V8_TARGET_ARCH_X32
486 __ lea(rbx, Operand(rsp, kFastApiCallArguments * kPointerSize)); 494 __ lea(rbx, Operand(rsp, kFastApiCallArguments * kPointerSize));
495 #else
496 __ leal(rbx, Operand(rsp, 1 * kHWRegSize +
497 (kFastApiCallArguments - 1) * kPointerSize));
498 #endif
487 499
488 // Function address is a foreign pointer outside V8's heap. 500 // Function address is a foreign pointer outside V8's heap.
489 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 501 Address function_address = v8::ToCData<Address>(api_call_info->callback());
490 bool returns_handle = 502 bool returns_handle =
491 !CallbackTable::ReturnsVoid(masm->isolate(), function_address); 503 !CallbackTable::ReturnsVoid(masm->isolate(), function_address);
492 504
493 #if defined(__MINGW64__) 505 #if defined(__MINGW64__)
494 Register arguments_arg = rcx; 506 Register arguments_arg = rcx;
495 Register callback_arg = rdx; 507 Register callback_arg = rdx;
496 #elif defined(_WIN64) 508 #elif defined(_WIN64)
(...skipping 360 matching lines...) Expand 10 before | Expand all | Expand 10 after
857 869
858 // Stub never generated for non-global objects that require access 870 // Stub never generated for non-global objects that require access
859 // checks. 871 // checks.
860 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 872 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
861 873
862 // Perform map transition for the receiver if necessary. 874 // Perform map transition for the receiver if necessary.
863 if (details.type() == FIELD && 875 if (details.type() == FIELD &&
864 object->map()->unused_property_fields() == 0) { 876 object->map()->unused_property_fields() == 0) {
865 // The properties must be extended before we can store the value. 877 // The properties must be extended before we can store the value.
866 // We jump to a runtime call that extends the properties array. 878 // We jump to a runtime call that extends the properties array.
867 __ pop(scratch1); // Return address. 879 __k pop(scratch1); // Return address.
868 __ push(receiver_reg); 880 __ push(receiver_reg);
869 __ Push(transition); 881 __ Push(transition);
870 __ push(value_reg); 882 __ push(value_reg);
871 __ push(scratch1); 883 __k push(scratch1);
872 __ TailCallExternalReference( 884 __ TailCallExternalReference(
873 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), 885 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
874 masm->isolate()), 886 masm->isolate()),
875 3, 887 3,
876 1); 888 1);
877 return; 889 return;
878 } 890 }
879 891
880 // Update the map of the object. 892 // Update the map of the object.
881 __ Move(scratch1, transition); 893 __ Move(scratch1, transition);
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after
1113 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 1125 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1114 && !scratch2.is(scratch1)); 1126 && !scratch2.is(scratch1));
1115 1127
1116 // Keep track of the current object in register reg. On the first 1128 // Keep track of the current object in register reg. On the first
1117 // iteration, reg is an alias for object_reg, on later iterations, 1129 // iteration, reg is an alias for object_reg, on later iterations,
1118 // it is an alias for holder_reg. 1130 // it is an alias for holder_reg.
1119 Register reg = object_reg; 1131 Register reg = object_reg;
1120 int depth = 0; 1132 int depth = 0;
1121 1133
1122 if (save_at_depth == depth) { 1134 if (save_at_depth == depth) {
1123 __ movq(Operand(rsp, kPointerSize), object_reg); 1135 __a movq(Operand(rsp, 1 * kPointerSize), object_reg);
1124 } 1136 }
1125 1137
1126 // Check the maps in the prototype chain. 1138 // Check the maps in the prototype chain.
1127 // Traverse the prototype chain from the object and do map checks. 1139 // Traverse the prototype chain from the object and do map checks.
1128 Handle<JSObject> current = object; 1140 Handle<JSObject> current = object;
1129 while (!current.is_identical_to(holder)) { 1141 while (!current.is_identical_to(holder)) {
1130 ++depth; 1142 ++depth;
1131 1143
1132 // Only global objects and objects that do not require access 1144 // Only global objects and objects that do not require access
1133 // checks are allowed in stubs. 1145 // checks are allowed in stubs.
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
1173 // The prototype is in new space; we cannot store a reference to it 1185 // The prototype is in new space; we cannot store a reference to it
1174 // in the code. Load it from the map. 1186 // in the code. Load it from the map.
1175 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 1187 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
1176 } else { 1188 } else {
1177 // The prototype is in old space; load it directly. 1189 // The prototype is in old space; load it directly.
1178 __ Move(reg, prototype); 1190 __ Move(reg, prototype);
1179 } 1191 }
1180 } 1192 }
1181 1193
1182 if (save_at_depth == depth) { 1194 if (save_at_depth == depth) {
1183 __ movq(Operand(rsp, kPointerSize), reg); 1195 __a movq(Operand(rsp, 1 * kPointerSize), reg);
1184 } 1196 }
1185 1197
1186 // Go to the next object in the prototype chain. 1198 // Go to the next object in the prototype chain.
1187 current = prototype; 1199 current = prototype;
1188 } 1200 }
1189 ASSERT(current.is_identical_to(holder)); 1201 ASSERT(current.is_identical_to(holder));
1190 1202
1191 // Log the check depth. 1203 // Log the check depth.
1192 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 1204 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
1193 1205
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
1310 GenerateTailCall(masm(), stub.GetCode(isolate())); 1322 GenerateTailCall(masm(), stub.GetCode(isolate()));
1311 } 1323 }
1312 } 1324 }
1313 1325
1314 1326
1315 void BaseLoadStubCompiler::GenerateLoadCallback( 1327 void BaseLoadStubCompiler::GenerateLoadCallback(
1316 Register reg, 1328 Register reg,
1317 Handle<ExecutableAccessorInfo> callback) { 1329 Handle<ExecutableAccessorInfo> callback) {
1318 // Insert additional parameters into the stack frame above return address. 1330 // Insert additional parameters into the stack frame above return address.
1319 ASSERT(!scratch4().is(reg)); 1331 ASSERT(!scratch4().is(reg));
1320 __ pop(scratch4()); // Get return address to place it below. 1332 __k pop(scratch4()); // Get return address to place it below.
1321 1333
1322 __ push(receiver()); // receiver 1334 __ push(receiver()); // receiver
1323 __ push(reg); // holder 1335 __ push(reg); // holder
1324 if (heap()->InNewSpace(callback->data())) { 1336 if (heap()->InNewSpace(callback->data())) {
1325 __ Move(scratch1(), callback); 1337 __ Move(scratch1(), callback);
1326 __ push(FieldOperand(scratch1(), 1338 __ push(FieldOperand(scratch1(),
1327 ExecutableAccessorInfo::kDataOffset)); // data 1339 ExecutableAccessorInfo::kDataOffset)); // data
1328 } else { 1340 } else {
1329 __ Push(Handle<Object>(callback->data(), isolate())); 1341 __ Push(Handle<Object>(callback->data(), isolate()));
1330 } 1342 }
(...skipping 19 matching lines...) Expand all
1350 Register accessor_info_arg = returns_handle ? r8 : rdx; 1362 Register accessor_info_arg = returns_handle ? r8 : rdx;
1351 Register name_arg = returns_handle ? rdx : rcx; 1363 Register name_arg = returns_handle ? rdx : rcx;
1352 #else 1364 #else
1353 Register getter_arg = rdx; 1365 Register getter_arg = rdx;
1354 Register accessor_info_arg = rsi; 1366 Register accessor_info_arg = rsi;
1355 Register name_arg = rdi; 1367 Register name_arg = rdi;
1356 #endif 1368 #endif
1357 1369
1358 ASSERT(!name_arg.is(scratch4())); 1370 ASSERT(!name_arg.is(scratch4()));
1359 __ movq(name_arg, rsp); 1371 __ movq(name_arg, rsp);
1360 __ push(scratch4()); // Restore return address. 1372 __k push(scratch4()); // Restore return address.
1361 1373
1362 // v8::Arguments::values_ and handler for name. 1374 // v8::Arguments::values_ and handler for name.
1363 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1; 1375 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
1364 1376
1365 // Allocate v8::AccessorInfo in non-GCed stack space. 1377 // Allocate v8::AccessorInfo in non-GCed stack space.
1366 const int kArgStackSpace = 1; 1378 const int kArgStackSpace = 1;
1367 1379
1368 __ PrepareCallApiFunction(kArgStackSpace, returns_handle); 1380 __ PrepareCallApiFunction(kArgStackSpace, returns_handle);
1369 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6); 1381 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1370 __ lea(rax, Operand(name_arg, 6 * kPointerSize)); 1382 __ lea(rax, Operand(name_arg, 6 * kPointerSize));
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
1470 __ pop(receiver()); 1482 __ pop(receiver());
1471 } 1483 }
1472 1484
1473 // Leave the internal frame. 1485 // Leave the internal frame.
1474 } 1486 }
1475 1487
1476 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup); 1488 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1477 } else { // !compile_followup_inline 1489 } else { // !compile_followup_inline
1478 // Call the runtime system to load the interceptor. 1490 // Call the runtime system to load the interceptor.
1479 // Check that the maps haven't changed. 1491 // Check that the maps haven't changed.
1480 __ pop(scratch2()); // save old return address 1492 __k pop(scratch2()); // save old return address
1481 PushInterceptorArguments(masm(), receiver(), holder_reg, 1493 PushInterceptorArguments(masm(), receiver(), holder_reg,
1482 this->name(), interceptor_holder); 1494 this->name(), interceptor_holder);
1483 __ push(scratch2()); // restore old return address 1495 __k push(scratch2()); // restore old return address
1484 1496
1485 ExternalReference ref = ExternalReference( 1497 ExternalReference ref = ExternalReference(
1486 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate()); 1498 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1487 __ TailCallExternalReference(ref, 6, 1); 1499 __ TailCallExternalReference(ref, 6, 1);
1488 } 1500 }
1489 } 1501 }
1490 1502
1491 1503
1492 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) { 1504 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
1493 if (kind_ == Code::KEYED_CALL_IC) { 1505 if (kind_ == Code::KEYED_CALL_IC) {
1494 __ Cmp(rcx, name); 1506 __ Cmp(rcx, name);
1495 __ j(not_equal, miss); 1507 __ j(not_equal, miss);
1496 } 1508 }
1497 } 1509 }
1498 1510
1499 1511
1500 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object, 1512 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object,
1501 Handle<JSObject> holder, 1513 Handle<JSObject> holder,
1502 Handle<Name> name, 1514 Handle<Name> name,
1503 Label* miss) { 1515 Label* miss) {
1504 ASSERT(holder->IsGlobalObject()); 1516 ASSERT(holder->IsGlobalObject());
1505 1517
1506 // Get the number of arguments. 1518 // Get the number of arguments.
1507 const int argc = arguments().immediate(); 1519 const int argc = arguments().immediate();
1508 1520
1509 // Get the receiver from the stack. 1521 // Get the receiver from the stack.
1510 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1522 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1511 1523
1512 1524
1513 // Check that the maps haven't changed. 1525 // Check that the maps haven't changed.
1514 __ JumpIfSmi(rdx, miss); 1526 __ JumpIfSmi(rdx, miss);
1515 CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss); 1527 CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
1516 } 1528 }
1517 1529
1518 1530
1519 void CallStubCompiler::GenerateLoadFunctionFromCell( 1531 void CallStubCompiler::GenerateLoadFunctionFromCell(
1520 Handle<Cell> cell, 1532 Handle<Cell> cell,
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
1566 // ... 1578 // ...
1567 // rsp[argc * 8] : argument 1 1579 // rsp[argc * 8] : argument 1
1568 // rsp[(argc + 1) * 8] : argument 0 = receiver 1580 // rsp[(argc + 1) * 8] : argument 0 = receiver
1569 // ----------------------------------- 1581 // -----------------------------------
1570 Label miss; 1582 Label miss;
1571 1583
1572 GenerateNameCheck(name, &miss); 1584 GenerateNameCheck(name, &miss);
1573 1585
1574 // Get the receiver from the stack. 1586 // Get the receiver from the stack.
1575 const int argc = arguments().immediate(); 1587 const int argc = arguments().immediate();
1576 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1588 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1577 1589
1578 // Check that the receiver isn't a smi. 1590 // Check that the receiver isn't a smi.
1579 __ JumpIfSmi(rdx, &miss); 1591 __ JumpIfSmi(rdx, &miss);
1580 1592
1581 // Do the right check and compute the holder register. 1593 // Do the right check and compute the holder register.
1582 Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi, 1594 Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi,
1583 name, &miss); 1595 name, &miss);
1584 1596
1585 GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder), 1597 GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder),
1586 index.translate(holder), Representation::Tagged()); 1598 index.translate(holder), Representation::Tagged());
1587 1599
1588 // Check that the function really is a function. 1600 // Check that the function really is a function.
1589 __ JumpIfSmi(rdi, &miss); 1601 __ JumpIfSmi(rdi, &miss);
1590 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx); 1602 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
1591 __ j(not_equal, &miss); 1603 __ j(not_equal, &miss);
1592 1604
1593 // Patch the receiver on the stack with the global proxy if 1605 // Patch the receiver on the stack with the global proxy if
1594 // necessary. 1606 // necessary.
1595 if (object->IsGlobalObject()) { 1607 if (object->IsGlobalObject()) {
1596 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 1608 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1597 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 1609 __a movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1598 } 1610 }
1599 1611
1600 // Invoke the function. 1612 // Invoke the function.
1601 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 1613 CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
1602 ? CALL_AS_FUNCTION 1614 ? CALL_AS_FUNCTION
1603 : CALL_AS_METHOD; 1615 : CALL_AS_METHOD;
1604 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, 1616 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
1605 NullCallWrapper(), call_kind); 1617 NullCallWrapper(), call_kind);
1606 1618
1607 // Handle call cache miss. 1619 // Handle call cache miss.
(...skipping 13 matching lines...) Expand all
1621 Handle<String> name, 1633 Handle<String> name,
1622 Code::StubType type) { 1634 Code::StubType type) {
1623 Label miss; 1635 Label miss;
1624 1636
1625 // Check that function is still array 1637 // Check that function is still array
1626 const int argc = arguments().immediate(); 1638 const int argc = arguments().immediate();
1627 GenerateNameCheck(name, &miss); 1639 GenerateNameCheck(name, &miss);
1628 1640
1629 if (cell.is_null()) { 1641 if (cell.is_null()) {
1630 // Get the receiver from the stack. 1642 // Get the receiver from the stack.
1631 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1643 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1632 1644
1633 // Check that the receiver isn't a smi. 1645 // Check that the receiver isn't a smi.
1634 __ JumpIfSmi(rdx, &miss); 1646 __ JumpIfSmi(rdx, &miss);
1635 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 1647 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1636 name, &miss); 1648 name, &miss);
1637 } else { 1649 } else {
1638 ASSERT(cell->value() == *function); 1650 ASSERT(cell->value() == *function);
1639 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, 1651 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
1640 &miss); 1652 &miss);
1641 GenerateLoadFunctionFromCell(cell, function, &miss); 1653 GenerateLoadFunctionFromCell(cell, function, &miss);
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
1675 // ----------------------------------- 1687 // -----------------------------------
1676 1688
1677 // If object is not an array, bail out to regular call. 1689 // If object is not an array, bail out to regular call.
1678 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null(); 1690 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1679 1691
1680 Label miss; 1692 Label miss;
1681 GenerateNameCheck(name, &miss); 1693 GenerateNameCheck(name, &miss);
1682 1694
1683 // Get the receiver from the stack. 1695 // Get the receiver from the stack.
1684 const int argc = arguments().immediate(); 1696 const int argc = arguments().immediate();
1685 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1697 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1686 1698
1687 // Check that the receiver isn't a smi. 1699 // Check that the receiver isn't a smi.
1688 __ JumpIfSmi(rdx, &miss); 1700 __ JumpIfSmi(rdx, &miss);
1689 1701
1690 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 1702 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1691 name, &miss); 1703 name, &miss);
1692 1704
1693 if (argc == 0) { 1705 if (argc == 0) {
1694 // Noop, return the length. 1706 // Noop, return the length.
1695 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); 1707 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
(...skipping 18 matching lines...) Expand all
1714 __ addl(rax, Immediate(argc)); 1726 __ addl(rax, Immediate(argc));
1715 1727
1716 // Get the elements' length into rcx. 1728 // Get the elements' length into rcx.
1717 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); 1729 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
1718 1730
1719 // Check if we could survive without allocation. 1731 // Check if we could survive without allocation.
1720 __ cmpl(rax, rcx); 1732 __ cmpl(rax, rcx);
1721 __ j(greater, &attempt_to_grow_elements); 1733 __ j(greater, &attempt_to_grow_elements);
1722 1734
1723 // Check if value is a smi. 1735 // Check if value is a smi.
1724 __ movq(rcx, Operand(rsp, argc * kPointerSize)); 1736 __a movq(rcx, Operand(rsp, argc * kPointerSize));
1725 __ JumpIfNotSmi(rcx, &with_write_barrier); 1737 __ JumpIfNotSmi(rcx, &with_write_barrier);
1726 1738
1727 // Save new length. 1739 // Save new length.
1728 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); 1740 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1729 1741
1730 // Store the value. 1742 // Store the value.
1731 __ movq(FieldOperand(rdi, 1743 __ movq(FieldOperand(rdi,
1732 rax, 1744 rax,
1733 times_pointer_size, 1745 times_pointer_size,
1734 FixedArray::kHeaderSize - argc * kPointerSize), 1746 FixedArray::kHeaderSize - argc * kPointerSize),
(...skipping 14 matching lines...) Expand all
1749 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); 1761 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1750 __ addl(rax, Immediate(argc)); 1762 __ addl(rax, Immediate(argc));
1751 1763
1752 // Get the elements' length into rcx. 1764 // Get the elements' length into rcx.
1753 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); 1765 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
1754 1766
1755 // Check if we could survive without allocation. 1767 // Check if we could survive without allocation.
1756 __ cmpl(rax, rcx); 1768 __ cmpl(rax, rcx);
1757 __ j(greater, &call_builtin); 1769 __ j(greater, &call_builtin);
1758 1770
1759 __ movq(rcx, Operand(rsp, argc * kPointerSize)); 1771 __a movq(rcx, Operand(rsp, argc * kPointerSize));
1760 __ StoreNumberToDoubleElements( 1772 __ StoreNumberToDoubleElements(
1761 rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize); 1773 rcx, rdi, rax, xmm0, &call_builtin, argc * kDoubleSize);
1762 1774
1763 // Save new length. 1775 // Save new length.
1764 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); 1776 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1765 __ Integer32ToSmi(rax, rax); // Return new length as smi. 1777 __ Integer32ToSmi(rax, rax); // Return new length as smi.
1766 __ ret((argc + 1) * kPointerSize); 1778 __ ret((argc + 1) * kPointerSize);
1767 1779
1768 __ bind(&with_write_barrier); 1780 __ bind(&with_write_barrier);
1769 1781
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
1826 OMIT_SMI_CHECK); 1838 OMIT_SMI_CHECK);
1827 1839
1828 __ Integer32ToSmi(rax, rax); // Return new length as smi. 1840 __ Integer32ToSmi(rax, rax); // Return new length as smi.
1829 __ ret((argc + 1) * kPointerSize); 1841 __ ret((argc + 1) * kPointerSize);
1830 1842
1831 __ bind(&attempt_to_grow_elements); 1843 __ bind(&attempt_to_grow_elements);
1832 if (!FLAG_inline_new) { 1844 if (!FLAG_inline_new) {
1833 __ jmp(&call_builtin); 1845 __ jmp(&call_builtin);
1834 } 1846 }
1835 1847
1836 __ movq(rbx, Operand(rsp, argc * kPointerSize)); 1848 __a movq(rbx, Operand(rsp, argc * kPointerSize));
1837 // Growing elements that are SMI-only requires special handling in case 1849 // Growing elements that are SMI-only requires special handling in case
1838 // the new element is non-Smi. For now, delegate to the builtin. 1850 // the new element is non-Smi. For now, delegate to the builtin.
1839 Label no_fast_elements_check; 1851 Label no_fast_elements_check;
1840 __ JumpIfSmi(rbx, &no_fast_elements_check); 1852 __ JumpIfSmi(rbx, &no_fast_elements_check);
1841 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); 1853 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
1842 __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar); 1854 __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar);
1843 __ bind(&no_fast_elements_check); 1855 __ bind(&no_fast_elements_check);
1844 1856
1845 ExternalReference new_space_allocation_top = 1857 ExternalReference new_space_allocation_top =
1846 ExternalReference::new_space_allocation_top_address(isolate()); 1858 ExternalReference::new_space_allocation_top_address(isolate());
(...skipping 28 matching lines...) Expand all
1875 } 1887 }
1876 1888
1877 // We know the elements array is in new space so we don't need the 1889 // We know the elements array is in new space so we don't need the
1878 // remembered set, but we just pushed a value onto it so we may have to 1890 // remembered set, but we just pushed a value onto it so we may have to
1879 // tell the incremental marker to rescan the object that we just grew. We 1891 // tell the incremental marker to rescan the object that we just grew. We
1880 // don't need to worry about the holes because they are in old space and 1892 // don't need to worry about the holes because they are in old space and
1881 // already marked black. 1893 // already marked black.
1882 __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); 1894 __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET);
1883 1895
1884 // Restore receiver to rdx as finish sequence assumes it's here. 1896 // Restore receiver to rdx as finish sequence assumes it's here.
1885 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1897 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1886 1898
1887 // Increment element's and array's sizes. 1899 // Increment element's and array's sizes.
1888 __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset), 1900 __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset),
1889 Smi::FromInt(kAllocationDelta)); 1901 Smi::FromInt(kAllocationDelta));
1890 1902
1891 // Make new length a smi before returning it. 1903 // Make new length a smi before returning it.
1892 __ Integer32ToSmi(rax, rax); 1904 __ Integer32ToSmi(rax, rax);
1893 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); 1905 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1894 1906
1895 __ ret((argc + 1) * kPointerSize); 1907 __ ret((argc + 1) * kPointerSize);
(...skipping 30 matching lines...) Expand all
1926 // ----------------------------------- 1938 // -----------------------------------
1927 1939
1928 // If object is not an array, bail out to regular call. 1940 // If object is not an array, bail out to regular call.
1929 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null(); 1941 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null();
1930 1942
1931 Label miss, return_undefined, call_builtin; 1943 Label miss, return_undefined, call_builtin;
1932 GenerateNameCheck(name, &miss); 1944 GenerateNameCheck(name, &miss);
1933 1945
1934 // Get the receiver from the stack. 1946 // Get the receiver from the stack.
1935 const int argc = arguments().immediate(); 1947 const int argc = arguments().immediate();
1936 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1948 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1937 1949
1938 // Check that the receiver isn't a smi. 1950 // Check that the receiver isn't a smi.
1939 __ JumpIfSmi(rdx, &miss); 1951 __ JumpIfSmi(rdx, &miss);
1940 1952
1941 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 1953 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
1942 name, &miss); 1954 name, &miss);
1943 1955
1944 // Get the elements array of the object. 1956 // Get the elements array of the object.
1945 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); 1957 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1946 1958
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
2029 rax, 2041 rax,
2030 &miss); 2042 &miss);
2031 ASSERT(!object.is_identical_to(holder)); 2043 ASSERT(!object.is_identical_to(holder));
2032 CheckPrototypes( 2044 CheckPrototypes(
2033 Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))), 2045 Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2034 rax, holder, rbx, rdx, rdi, name, &miss); 2046 rax, holder, rbx, rdx, rdi, name, &miss);
2035 2047
2036 Register receiver = rbx; 2048 Register receiver = rbx;
2037 Register index = rdi; 2049 Register index = rdi;
2038 Register result = rax; 2050 Register result = rax;
2039 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); 2051 __a movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
2040 if (argc > 0) { 2052 if (argc > 0) {
2041 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); 2053 __a movq(index, Operand(rsp, (argc - 0) * kPointerSize));
2042 } else { 2054 } else {
2043 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 2055 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2044 } 2056 }
2045 2057
2046 StringCharCodeAtGenerator generator(receiver, 2058 StringCharCodeAtGenerator generator(receiver,
2047 index, 2059 index,
2048 result, 2060 result,
2049 &miss, // When not a string. 2061 &miss, // When not a string.
2050 &miss, // When not a number. 2062 &miss, // When not a number.
2051 index_out_of_range_label, 2063 index_out_of_range_label,
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
2110 &miss); 2122 &miss);
2111 ASSERT(!object.is_identical_to(holder)); 2123 ASSERT(!object.is_identical_to(holder));
2112 CheckPrototypes( 2124 CheckPrototypes(
2113 Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))), 2125 Handle<JSObject>(JSObject::cast(object->GetPrototype(isolate()))),
2114 rax, holder, rbx, rdx, rdi, name, &miss); 2126 rax, holder, rbx, rdx, rdi, name, &miss);
2115 2127
2116 Register receiver = rax; 2128 Register receiver = rax;
2117 Register index = rdi; 2129 Register index = rdi;
2118 Register scratch = rdx; 2130 Register scratch = rdx;
2119 Register result = rax; 2131 Register result = rax;
2120 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); 2132 __a movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
2121 if (argc > 0) { 2133 if (argc > 0) {
2122 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); 2134 __a movq(index, Operand(rsp, (argc - 0) * kPointerSize));
2123 } else { 2135 } else {
2124 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 2136 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2125 } 2137 }
2126 2138
2127 StringCharAtGenerator generator(receiver, 2139 StringCharAtGenerator generator(receiver,
2128 index, 2140 index,
2129 scratch, 2141 scratch,
2130 result, 2142 result,
2131 &miss, // When not a string. 2143 &miss, // When not a string.
2132 &miss, // When not a number. 2144 &miss, // When not a number.
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
2171 2183
2172 // If the object is not a JSObject or we got an unexpected number of 2184 // If the object is not a JSObject or we got an unexpected number of
2173 // arguments, bail out to the regular call. 2185 // arguments, bail out to the regular call.
2174 const int argc = arguments().immediate(); 2186 const int argc = arguments().immediate();
2175 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); 2187 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2176 2188
2177 Label miss; 2189 Label miss;
2178 GenerateNameCheck(name, &miss); 2190 GenerateNameCheck(name, &miss);
2179 2191
2180 if (cell.is_null()) { 2192 if (cell.is_null()) {
2181 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2193 __a movq(rdx, Operand(rsp, 2 * kPointerSize));
2182 __ JumpIfSmi(rdx, &miss); 2194 __ JumpIfSmi(rdx, &miss);
2183 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 2195 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
2184 name, &miss); 2196 name, &miss);
2185 } else { 2197 } else {
2186 ASSERT(cell->value() == *function); 2198 ASSERT(cell->value() == *function);
2187 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, 2199 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2188 &miss); 2200 &miss);
2189 GenerateLoadFunctionFromCell(cell, function, &miss); 2201 GenerateLoadFunctionFromCell(cell, function, &miss);
2190 } 2202 }
2191 2203
2192 // Load the char code argument. 2204 // Load the char code argument.
2193 Register code = rbx; 2205 Register code = rbx;
2194 __ movq(code, Operand(rsp, 1 * kPointerSize)); 2206 __a movq(code, Operand(rsp, 1 * kPointerSize));
2195 2207
2196 // Check the code is a smi. 2208 // Check the code is a smi.
2197 Label slow; 2209 Label slow;
2198 __ JumpIfNotSmi(code, &slow); 2210 __ JumpIfNotSmi(code, &slow);
2199 2211
2200 // Convert the smi code to uint16. 2212 // Convert the smi code to uint16.
2201 __ SmiAndConstant(code, code, Smi::FromInt(0xffff)); 2213 __ SmiAndConstant(code, code, Smi::FromInt(0xffff));
2202 2214
2203 StringCharFromCodeGenerator generator(code, rax); 2215 StringCharFromCodeGenerator generator(code, rax);
2204 generator.GenerateFast(masm()); 2216 generator.GenerateFast(masm());
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
2255 2267
2256 // If the object is not a JSObject or we got an unexpected number of 2268 // If the object is not a JSObject or we got an unexpected number of
2257 // arguments, bail out to the regular call. 2269 // arguments, bail out to the regular call.
2258 const int argc = arguments().immediate(); 2270 const int argc = arguments().immediate();
2259 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); 2271 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2260 2272
2261 Label miss; 2273 Label miss;
2262 GenerateNameCheck(name, &miss); 2274 GenerateNameCheck(name, &miss);
2263 2275
2264 if (cell.is_null()) { 2276 if (cell.is_null()) {
2265 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 2277 __a movq(rdx, Operand(rsp, 2 * kPointerSize));
2266 __ JumpIfSmi(rdx, &miss); 2278 __ JumpIfSmi(rdx, &miss);
2267 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 2279 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
2268 name, &miss); 2280 name, &miss);
2269 } else { 2281 } else {
2270 ASSERT(cell->value() == *function); 2282 ASSERT(cell->value() == *function);
2271 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, 2283 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name,
2272 &miss); 2284 &miss);
2273 GenerateLoadFunctionFromCell(cell, function, &miss); 2285 GenerateLoadFunctionFromCell(cell, function, &miss);
2274 } 2286 }
2275 // Load the (only) argument into rax. 2287 // Load the (only) argument into rax.
2276 __ movq(rax, Operand(rsp, 1 * kPointerSize)); 2288 __a movq(rax, Operand(rsp, 1 * kPointerSize));
2277 2289
2278 // Check if the argument is a smi. 2290 // Check if the argument is a smi.
2279 Label not_smi; 2291 Label not_smi;
2280 STATIC_ASSERT(kSmiTag == 0); 2292 STATIC_ASSERT(kSmiTag == 0);
2281 __ JumpIfNotSmi(rax, &not_smi); 2293 __ JumpIfNotSmi(rax, &not_smi);
2282 __ SmiToInteger32(rax, rax); 2294 __ SmiToInteger32(rax, rax);
2283 2295
2284 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0 2296 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
2285 // otherwise. 2297 // otherwise.
2286 __ movl(rbx, rax); 2298 __ movl(rbx, rax);
(...skipping 10 matching lines...) Expand all
2297 Label slow; 2309 Label slow;
2298 __ j(negative, &slow); 2310 __ j(negative, &slow);
2299 2311
2300 // Smi case done. 2312 // Smi case done.
2301 __ Integer32ToSmi(rax, rax); 2313 __ Integer32ToSmi(rax, rax);
2302 __ ret(2 * kPointerSize); 2314 __ ret(2 * kPointerSize);
2303 2315
2304 // Check if the argument is a heap number and load its value. 2316 // Check if the argument is a heap number and load its value.
2305 __ bind(&not_smi); 2317 __ bind(&not_smi);
2306 __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK); 2318 __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK);
2307 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); 2319 __k movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
2308 2320
2309 // Check the sign of the argument. If the argument is positive, 2321 // Check the sign of the argument. If the argument is positive,
2310 // just return it. 2322 // just return it.
2311 Label negative_sign; 2323 Label negative_sign;
2312 const int sign_mask_shift = 2324 const int sign_mask_shift =
2313 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte; 2325 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte;
2314 __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift, 2326 __k movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
2315 RelocInfo::NONE64); 2327 RelocInfo::NONE64);
2316 __ testq(rbx, rdi); 2328 __k testq(rbx, rdi);
2317 __ j(not_zero, &negative_sign); 2329 __ j(not_zero, &negative_sign);
2318 __ ret(2 * kPointerSize); 2330 __ ret(2 * kPointerSize);
2319 2331
2320 // If the argument is negative, clear the sign, and return a new 2332 // If the argument is negative, clear the sign, and return a new
2321 // number. We still have the sign mask in rdi. 2333 // number. We still have the sign mask in rdi.
2322 __ bind(&negative_sign); 2334 __ bind(&negative_sign);
2323 __ xor_(rbx, rdi); 2335 __k xor_(rbx, rdi);
2324 __ AllocateHeapNumber(rax, rdx, &slow); 2336 __ AllocateHeapNumber(rax, rdx, &slow);
2325 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx); 2337 __k movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx);
2326 __ ret(2 * kPointerSize); 2338 __ ret(2 * kPointerSize);
2327 2339
2328 // Tail call the full function. We do not have to patch the receiver 2340 // Tail call the full function. We do not have to patch the receiver
2329 // because the function makes no use of it. 2341 // because the function makes no use of it.
2330 __ bind(&slow); 2342 __ bind(&slow);
2331 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 2343 CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2332 ? CALL_AS_FUNCTION 2344 ? CALL_AS_FUNCTION
2333 : CALL_AS_METHOD; 2345 : CALL_AS_METHOD;
2334 ParameterCount expected(function); 2346 ParameterCount expected(function);
2335 __ InvokeFunction(function, expected, arguments(), 2347 __ InvokeFunction(function, expected, arguments(),
(...skipping 23 matching lines...) Expand all
2359 if (!object->IsJSObject()) return Handle<Code>::null(); 2371 if (!object->IsJSObject()) return Handle<Code>::null();
2360 int depth = optimization.GetPrototypeDepthOfExpectedType( 2372 int depth = optimization.GetPrototypeDepthOfExpectedType(
2361 Handle<JSObject>::cast(object), holder); 2373 Handle<JSObject>::cast(object), holder);
2362 if (depth == kInvalidProtoDepth) return Handle<Code>::null(); 2374 if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2363 2375
2364 Label miss, miss_before_stack_reserved; 2376 Label miss, miss_before_stack_reserved;
2365 GenerateNameCheck(name, &miss_before_stack_reserved); 2377 GenerateNameCheck(name, &miss_before_stack_reserved);
2366 2378
2367 // Get the receiver from the stack. 2379 // Get the receiver from the stack.
2368 const int argc = arguments().immediate(); 2380 const int argc = arguments().immediate();
2369 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 2381 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2370 2382
2371 // Check that the receiver isn't a smi. 2383 // Check that the receiver isn't a smi.
2372 __ JumpIfSmi(rdx, &miss_before_stack_reserved); 2384 __ JumpIfSmi(rdx, &miss_before_stack_reserved);
2373 2385
2374 Counters* counters = isolate()->counters(); 2386 Counters* counters = isolate()->counters();
2375 __ IncrementCounter(counters->call_const(), 1); 2387 __ IncrementCounter(counters->call_const(), 1);
2376 __ IncrementCounter(counters->call_const_fast_api(), 1); 2388 __ IncrementCounter(counters->call_const_fast_api(), 1);
2377 2389
2378 // Allocate space for v8::Arguments implicit values. Must be initialized 2390 // Allocate space for v8::Arguments implicit values. Must be initialized
2379 // before calling any runtime function. 2391 // before calling any runtime function.
2380 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); 2392 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2381 2393
2382 // Check that the maps haven't changed and find a Holder as a side effect. 2394 // Check that the maps haven't changed and find a Holder as a side effect.
2383 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 2395 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi,
2384 name, depth, &miss); 2396 name, depth, &miss);
2385 2397
2386 // Move the return address on top of the stack. 2398 // Move the return address on top of the stack.
2387 __ movq(rax, Operand(rsp, kFastApiCallArguments * kPointerSize)); 2399 __k movq(rax, Operand(rsp, kFastApiCallArguments * kPointerSize));
2388 __ movq(Operand(rsp, 0 * kPointerSize), rax); 2400 __k movq(Operand(rsp, 0 * kPointerSize), rax);
2389 2401
2390 GenerateFastApiCall(masm(), optimization, argc); 2402 GenerateFastApiCall(masm(), optimization, argc);
2391 2403
2392 __ bind(&miss); 2404 __ bind(&miss);
2393 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); 2405 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2394 2406
2395 __ bind(&miss_before_stack_reserved); 2407 __ bind(&miss_before_stack_reserved);
2396 GenerateMissBranch(); 2408 GenerateMissBranch();
2397 2409
2398 // Return the generated code. 2410 // Return the generated code.
(...skipping 13 matching lines...) Expand all
2412 // rsp[16] : argument argc - 1 2424 // rsp[16] : argument argc - 1
2413 // ... 2425 // ...
2414 // rsp[argc * 8] : argument 1 2426 // rsp[argc * 8] : argument 1
2415 // rsp[(argc + 1) * 8] : argument 0 = receiver 2427 // rsp[(argc + 1) * 8] : argument 0 = receiver
2416 // ----------------------------------- 2428 // -----------------------------------
2417 Label miss; 2429 Label miss;
2418 GenerateNameCheck(name, &miss); 2430 GenerateNameCheck(name, &miss);
2419 2431
2420 // Get the receiver from the stack. 2432 // Get the receiver from the stack.
2421 const int argc = arguments().immediate(); 2433 const int argc = arguments().immediate();
2422 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 2434 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2423 2435
2424 // Check that the receiver isn't a smi. 2436 // Check that the receiver isn't a smi.
2425 if (check != NUMBER_CHECK) { 2437 if (check != NUMBER_CHECK) {
2426 __ JumpIfSmi(rdx, &miss); 2438 __ JumpIfSmi(rdx, &miss);
2427 } 2439 }
2428 2440
2429 // Make sure that it's okay not to patch the on stack receiver 2441 // Make sure that it's okay not to patch the on stack receiver
2430 // unless we're doing a receiver map check. 2442 // unless we're doing a receiver map check.
2431 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); 2443 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2432 2444
2433 Counters* counters = isolate()->counters(); 2445 Counters* counters = isolate()->counters();
2434 switch (check) { 2446 switch (check) {
2435 case RECEIVER_MAP_CHECK: 2447 case RECEIVER_MAP_CHECK:
2436 __ IncrementCounter(counters->call_const(), 1); 2448 __ IncrementCounter(counters->call_const(), 1);
2437 2449
2438 // Check that the maps haven't changed. 2450 // Check that the maps haven't changed.
2439 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, 2451 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax,
2440 rdi, name, &miss); 2452 rdi, name, &miss);
2441 2453
2442 // Patch the receiver on the stack with the global proxy if 2454 // Patch the receiver on the stack with the global proxy if
2443 // necessary. 2455 // necessary.
2444 if (object->IsGlobalObject()) { 2456 if (object->IsGlobalObject()) {
2445 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 2457 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2446 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 2458 __a movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2447 } 2459 }
2448 break; 2460 break;
2449 2461
2450 case STRING_CHECK: 2462 case STRING_CHECK:
2451 // Check that the object is a string. 2463 // Check that the object is a string.
2452 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax); 2464 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2453 __ j(above_equal, &miss); 2465 __ j(above_equal, &miss);
2454 // Check that the maps starting from the prototype haven't changed. 2466 // Check that the maps starting from the prototype haven't changed.
2455 GenerateDirectLoadGlobalFunctionPrototype( 2467 GenerateDirectLoadGlobalFunctionPrototype(
2456 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss); 2468 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
(...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after
2563 Label miss; 2575 Label miss;
2564 GenerateNameCheck(name, &miss); 2576 GenerateNameCheck(name, &miss);
2565 2577
2566 // Get the number of arguments. 2578 // Get the number of arguments.
2567 const int argc = arguments().immediate(); 2579 const int argc = arguments().immediate();
2568 2580
2569 LookupResult lookup(isolate()); 2581 LookupResult lookup(isolate());
2570 LookupPostInterceptor(holder, name, &lookup); 2582 LookupPostInterceptor(holder, name, &lookup);
2571 2583
2572 // Get the receiver from the stack. 2584 // Get the receiver from the stack.
2573 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 2585 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2574 2586
2575 CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_); 2587 CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_);
2576 compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax, 2588 compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax,
2577 &miss); 2589 &miss);
2578 2590
2579 // Restore receiver. 2591 // Restore receiver.
2580 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 2592 __a movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2581 2593
2582 // Check that the function really is a function. 2594 // Check that the function really is a function.
2583 __ JumpIfSmi(rax, &miss); 2595 __ JumpIfSmi(rax, &miss);
2584 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); 2596 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2585 __ j(not_equal, &miss); 2597 __ j(not_equal, &miss);
2586 2598
2587 // Patch the receiver on the stack with the global proxy if 2599 // Patch the receiver on the stack with the global proxy if
2588 // necessary. 2600 // necessary.
2589 if (object->IsGlobalObject()) { 2601 if (object->IsGlobalObject()) {
2590 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 2602 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2591 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 2603 __a movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2592 } 2604 }
2593 2605
2594 // Invoke the function. 2606 // Invoke the function.
2595 __ movq(rdi, rax); 2607 __ movq(rdi, rax);
2596 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 2608 CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
2597 ? CALL_AS_FUNCTION 2609 ? CALL_AS_FUNCTION
2598 : CALL_AS_METHOD; 2610 : CALL_AS_METHOD;
2599 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, 2611 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION,
2600 NullCallWrapper(), call_kind); 2612 NullCallWrapper(), call_kind);
2601 2613
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
2636 GenerateNameCheck(name, &miss); 2648 GenerateNameCheck(name, &miss);
2637 2649
2638 // Get the number of arguments. 2650 // Get the number of arguments.
2639 const int argc = arguments().immediate(); 2651 const int argc = arguments().immediate();
2640 GenerateGlobalReceiverCheck(object, holder, name, &miss); 2652 GenerateGlobalReceiverCheck(object, holder, name, &miss);
2641 GenerateLoadFunctionFromCell(cell, function, &miss); 2653 GenerateLoadFunctionFromCell(cell, function, &miss);
2642 2654
2643 // Patch the receiver on the stack with the global proxy. 2655 // Patch the receiver on the stack with the global proxy.
2644 if (object->IsGlobalObject()) { 2656 if (object->IsGlobalObject()) {
2645 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 2657 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2646 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 2658 __a movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2647 } 2659 }
2648 2660
2649 // Set up the context (function already in rdi). 2661 // Set up the context (function already in rdi).
2650 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 2662 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2651 2663
2652 // Jump to the cached code (tail call). 2664 // Jump to the cached code (tail call).
2653 Counters* counters = isolate()->counters(); 2665 Counters* counters = isolate()->counters();
2654 __ IncrementCounter(counters->call_global_inline(), 1); 2666 __ IncrementCounter(counters->call_global_inline(), 1);
2655 ParameterCount expected(function->shared()->formal_parameter_count()); 2667 ParameterCount expected(function->shared()->formal_parameter_count());
2656 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 2668 CallKind call_kind = CallICBase::Contextual::decode(extra_state_)
(...skipping 23 matching lines...) Expand all
2680 Handle<ExecutableAccessorInfo> callback) { 2692 Handle<ExecutableAccessorInfo> callback) {
2681 Label miss; 2693 Label miss;
2682 // Check that the maps haven't changed. 2694 // Check that the maps haven't changed.
2683 __ JumpIfSmi(receiver(), &miss); 2695 __ JumpIfSmi(receiver(), &miss);
2684 CheckPrototypes(object, receiver(), holder, 2696 CheckPrototypes(object, receiver(), holder,
2685 scratch1(), scratch2(), scratch3(), name, &miss); 2697 scratch1(), scratch2(), scratch3(), name, &miss);
2686 2698
2687 // Stub never generated for non-global objects that require access checks. 2699 // Stub never generated for non-global objects that require access checks.
2688 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded()); 2700 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2689 2701
2690 __ pop(scratch1()); // remove the return address 2702 __k pop(scratch1()); // remove the return address
2691 __ push(receiver()); 2703 __ push(receiver());
2692 __ Push(callback); // callback info 2704 __ Push(callback); // callback info
2693 __ push(this->name()); 2705 __ push(this->name());
2694 __ push(value()); 2706 __ push(value());
2695 __ push(scratch1()); // restore return address 2707 __k push(scratch1()); // restore return address
2696 2708
2697 // Do tail-call to the runtime system. 2709 // Do tail-call to the runtime system.
2698 ExternalReference store_callback_property = 2710 ExternalReference store_callback_property =
2699 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); 2711 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2700 __ TailCallExternalReference(store_callback_property, 4, 1); 2712 __ TailCallExternalReference(store_callback_property, 4, 1);
2701 2713
2702 // Handle store cache miss. 2714 // Handle store cache miss.
2703 __ bind(&miss); 2715 __ bind(&miss);
2704 TailCallBuiltin(masm(), MissBuiltin(kind())); 2716 TailCallBuiltin(masm(), MissBuiltin(kind()));
2705 2717
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
2765 2777
2766 // Perform global security token check if needed. 2778 // Perform global security token check if needed.
2767 if (object->IsJSGlobalProxy()) { 2779 if (object->IsJSGlobalProxy()) {
2768 __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss); 2780 __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
2769 } 2781 }
2770 2782
2771 // Stub never generated for non-global objects that require access 2783 // Stub never generated for non-global objects that require access
2772 // checks. 2784 // checks.
2773 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 2785 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2774 2786
2775 __ pop(scratch1()); // remove the return address 2787 __k pop(scratch1()); // remove the return address
2776 __ push(receiver()); 2788 __ push(receiver());
2777 __ push(this->name()); 2789 __ push(this->name());
2778 __ push(value()); 2790 __ push(value());
2779 __ Push(Smi::FromInt(strict_mode())); 2791 __ Push(Smi::FromInt(strict_mode()));
2780 __ push(scratch1()); // restore return address 2792 __k push(scratch1()); // restore return address
2781 2793
2782 // Do tail-call to the runtime system. 2794 // Do tail-call to the runtime system.
2783 ExternalReference store_ic_property = 2795 ExternalReference store_ic_property =
2784 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate()); 2796 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2785 __ TailCallExternalReference(store_ic_property, 4, 1); 2797 __ TailCallExternalReference(store_ic_property, 4, 1);
2786 2798
2787 // Handle store cache miss. 2799 // Handle store cache miss.
2788 __ bind(&miss); 2800 __ bind(&miss);
2789 TailCallBuiltin(masm(), MissBuiltin(kind())); 2801 TailCallBuiltin(masm(), MissBuiltin(kind()));
2790 2802
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after
3110 __ CheckMap(key, 3122 __ CheckMap(key,
3111 masm->isolate()->factory()->heap_number_map(), 3123 masm->isolate()->factory()->heap_number_map(),
3112 fail, 3124 fail,
3113 DONT_DO_SMI_CHECK); 3125 DONT_DO_SMI_CHECK);
3114 __ movsd(xmm_scratch0, FieldOperand(key, HeapNumber::kValueOffset)); 3126 __ movsd(xmm_scratch0, FieldOperand(key, HeapNumber::kValueOffset));
3115 __ cvttsd2si(scratch, xmm_scratch0); 3127 __ cvttsd2si(scratch, xmm_scratch0);
3116 __ cvtlsi2sd(xmm_scratch1, scratch); 3128 __ cvtlsi2sd(xmm_scratch1, scratch);
3117 __ ucomisd(xmm_scratch1, xmm_scratch0); 3129 __ ucomisd(xmm_scratch1, xmm_scratch0);
3118 __ j(not_equal, fail); 3130 __ j(not_equal, fail);
3119 __ j(parity_even, fail); // NaN. 3131 __ j(parity_even, fail); // NaN.
3132 #ifdef V8_TARGET_ARCH_X32
3133 __ cmpl(scratch, Immediate(0xc0000000));
3134 __ j(sign, fail);
3135 #endif
3120 __ Integer32ToSmi(key, scratch); 3136 __ Integer32ToSmi(key, scratch);
3121 __ bind(&key_ok); 3137 __ bind(&key_ok);
3122 } 3138 }
3123 3139
3124 3140
3125 void KeyedStoreStubCompiler::GenerateStoreExternalArray( 3141 void KeyedStoreStubCompiler::GenerateStoreExternalArray(
3126 MacroAssembler* masm, 3142 MacroAssembler* masm,
3127 ElementsKind elements_kind) { 3143 ElementsKind elements_kind) {
3128 // ----------- S t a t e ------------- 3144 // ----------- S t a t e -------------
3129 // -- rax : value 3145 // -- rax : value
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after
3238 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { 3254 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3239 __ movsd(Operand(rbx, rdi, times_8, 0), xmm0); 3255 __ movsd(Operand(rbx, rdi, times_8, 0), xmm0);
3240 __ ret(0); 3256 __ ret(0);
3241 } else { 3257 } else {
3242 // Perform float-to-int conversion with truncation (round-to-zero) 3258 // Perform float-to-int conversion with truncation (round-to-zero)
3243 // behavior. 3259 // behavior.
3244 // Fast path: use machine instruction to convert to int64. If that 3260 // Fast path: use machine instruction to convert to int64. If that
3245 // fails (out-of-range), go into the runtime. 3261 // fails (out-of-range), go into the runtime.
3246 __ cvttsd2siq(r8, xmm0); 3262 __ cvttsd2siq(r8, xmm0);
3247 __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000)); 3263 __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
3248 __ cmpq(r8, kScratchRegister); 3264 __k cmpq(r8, kScratchRegister);
3249 __ j(equal, &slow); 3265 __ j(equal, &slow);
3250 3266
3251 // rdx: value (converted to an untagged integer) 3267 // rdx: value (converted to an untagged integer)
3252 // rdi: untagged index 3268 // rdi: untagged index
3253 // rbx: base pointer of external storage 3269 // rbx: base pointer of external storage
3254 switch (elements_kind) { 3270 switch (elements_kind) {
3255 case EXTERNAL_BYTE_ELEMENTS: 3271 case EXTERNAL_BYTE_ELEMENTS:
3256 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 3272 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3257 __ movb(Operand(rbx, rdi, times_1, 0), r8); 3273 __ movb(Operand(rbx, rdi, times_1, 0), r8);
3258 break; 3274 break;
(...skipping 274 matching lines...) Expand 10 before | Expand all | Expand 10 after
3533 __ Move(FieldOperand(rdi, JSObject::kMapOffset), 3549 __ Move(FieldOperand(rdi, JSObject::kMapOffset),
3534 masm->isolate()->factory()->fixed_double_array_map()); 3550 masm->isolate()->factory()->fixed_double_array_map());
3535 __ Move(FieldOperand(rdi, FixedDoubleArray::kLengthOffset), 3551 __ Move(FieldOperand(rdi, FixedDoubleArray::kLengthOffset),
3536 Smi::FromInt(JSArray::kPreallocatedArrayElements)); 3552 Smi::FromInt(JSArray::kPreallocatedArrayElements));
3537 3553
3538 // Increment the length of the array. 3554 // Increment the length of the array.
3539 __ SmiToInteger32(rcx, rcx); 3555 __ SmiToInteger32(rcx, rcx);
3540 __ StoreNumberToDoubleElements(rax, rdi, rcx, xmm0, 3556 __ StoreNumberToDoubleElements(rax, rdi, rcx, xmm0,
3541 &restore_key_transition_elements_kind); 3557 &restore_key_transition_elements_kind);
3542 3558
3543 __ movq(r8, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE64); 3559 __k movq(r8, BitCast<int64_t, uint64_t>(kHoleNanInt64), RelocInfo::NONE64);
3544 for (int i = 1; i < JSArray::kPreallocatedArrayElements; i++) { 3560 for (int i = 1; i < JSArray::kPreallocatedArrayElements; i++) {
3545 __ movq(FieldOperand(rdi, FixedDoubleArray::OffsetOfElementAt(i)), r8); 3561 __k movq(FieldOperand(rdi, FixedDoubleArray::OffsetOfElementAt(i)), r8);
3546 } 3562 }
3547 3563
3548 // Install the new backing store in the JSArray. 3564 // Install the new backing store in the JSArray.
3549 __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi); 3565 __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi);
3550 __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx, 3566 __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx,
3551 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3567 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
3552 3568
3553 // Increment the length of the array. 3569 // Increment the length of the array.
3554 __ Move(FieldOperand(rdx, JSArray::kLengthOffset), Smi::FromInt(1)); 3570 __ Move(FieldOperand(rdx, JSArray::kLengthOffset), Smi::FromInt(1));
3555 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 3571 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
(...skipping 11 matching lines...) Expand all
3567 // Grow the array and finish the store. 3583 // Grow the array and finish the store.
3568 __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset), 3584 __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset),
3569 Smi::FromInt(1)); 3585 Smi::FromInt(1));
3570 __ jmp(&finish_store); 3586 __ jmp(&finish_store);
3571 3587
3572 __ bind(&slow); 3588 __ bind(&slow);
3573 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow); 3589 TailCallBuiltin(masm, Builtins::kKeyedStoreIC_Slow);
3574 } 3590 }
3575 } 3591 }
3576 3592
3577 3593 #undef __a
3594 #undef __k
3578 #undef __ 3595 #undef __
3579 3596
3580 } } // namespace v8::internal 3597 } } // namespace v8::internal
3581 3598
3582 #endif // V8_TARGET_ARCH_X64 3599 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698