Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/x64/stub-cache-x64.cc

Issue 196133017: Experimental parser: merge r19949 (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/parser
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.cc ('k') | test/cctest/cctest.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after
274 // Check that the object is a JS array. 274 // Check that the object is a JS array.
275 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); 275 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
276 __ j(not_equal, miss_label); 276 __ j(not_equal, miss_label);
277 277
278 // Load length directly from the JS array. 278 // Load length directly from the JS array.
279 __ movp(rax, FieldOperand(receiver, JSArray::kLengthOffset)); 279 __ movp(rax, FieldOperand(receiver, JSArray::kLengthOffset));
280 __ ret(0); 280 __ ret(0);
281 } 281 }
282 282
283 283
284 // Generate code to check if an object is a string. If the object is
285 // a string, the map's instance type is left in the scratch register.
286 static void GenerateStringCheck(MacroAssembler* masm,
287 Register receiver,
288 Register scratch,
289 Label* smi,
290 Label* non_string_object) {
291 // Check that the object isn't a smi.
292 __ JumpIfSmi(receiver, smi);
293
294 // Check that the object is a string.
295 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
296 __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
297 STATIC_ASSERT(kNotStringTag != 0);
298 __ testl(scratch, Immediate(kNotStringTag));
299 __ j(not_zero, non_string_object);
300 }
301
302
303 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
304 Register receiver,
305 Register scratch1,
306 Register scratch2,
307 Label* miss) {
308 Label check_wrapper;
309
310 // Check if the object is a string leaving the instance type in the
311 // scratch register.
312 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
313
314 // Load length directly from the string.
315 __ movp(rax, FieldOperand(receiver, String::kLengthOffset));
316 __ ret(0);
317
318 // Check if the object is a JSValue wrapper.
319 __ bind(&check_wrapper);
320 __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
321 __ j(not_equal, miss);
322
323 // Check if the wrapped value is a string and load the length
324 // directly if it is.
325 __ movp(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
326 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
327 __ movp(rax, FieldOperand(scratch2, String::kLengthOffset));
328 __ ret(0);
329 }
330
331
332 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 284 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
333 Register receiver, 285 Register receiver,
334 Register result, 286 Register result,
335 Register scratch, 287 Register scratch,
336 Label* miss_label) { 288 Label* miss_label) {
337 __ TryGetFunctionPrototype(receiver, result, miss_label); 289 __ TryGetFunctionPrototype(receiver, result, miss_label);
338 if (!result.is(rax)) __ movp(rax, result); 290 if (!result.is(rax)) __ movp(rax, result);
339 __ ret(0); 291 __ ret(0);
340 } 292 }
341 293
342 294
343 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 295 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
344 Register dst, 296 Register dst,
345 Register src, 297 Register src,
346 bool inobject, 298 bool inobject,
347 int index, 299 int index,
348 Representation representation) { 300 Representation representation) {
349 ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); 301 ASSERT(!representation.IsDouble());
350 int offset = index * kPointerSize; 302 int offset = index * kPointerSize;
351 if (!inobject) { 303 if (!inobject) {
352 // Calculate the offset into the properties array. 304 // Calculate the offset into the properties array.
353 offset = offset + FixedArray::kHeaderSize; 305 offset = offset + FixedArray::kHeaderSize;
354 __ movp(dst, FieldOperand(src, JSObject::kPropertiesOffset)); 306 __ movp(dst, FieldOperand(src, JSObject::kPropertiesOffset));
355 src = dst; 307 src = dst;
356 } 308 }
357 __ movp(dst, FieldOperand(src, offset)); 309 __ movp(dst, FieldOperand(src, offset));
358 } 310 }
359 311
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
530 int descriptor = transition->LastAdded(); 482 int descriptor = transition->LastAdded();
531 DescriptorArray* descriptors = transition->instance_descriptors(); 483 DescriptorArray* descriptors = transition->instance_descriptors();
532 PropertyDetails details = descriptors->GetDetails(descriptor); 484 PropertyDetails details = descriptors->GetDetails(descriptor);
533 Representation representation = details.representation(); 485 Representation representation = details.representation();
534 ASSERT(!representation.IsNone()); 486 ASSERT(!representation.IsNone());
535 487
536 if (details.type() == CONSTANT) { 488 if (details.type() == CONSTANT) {
537 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate()); 489 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
538 __ Cmp(value_reg, constant); 490 __ Cmp(value_reg, constant);
539 __ j(not_equal, miss_label); 491 __ j(not_equal, miss_label);
540 } else if (FLAG_track_fields && representation.IsSmi()) { 492 } else if (representation.IsSmi()) {
541 __ JumpIfNotSmi(value_reg, miss_label); 493 __ JumpIfNotSmi(value_reg, miss_label);
542 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 494 } else if (representation.IsHeapObject()) {
543 __ JumpIfSmi(value_reg, miss_label); 495 __ JumpIfSmi(value_reg, miss_label);
544 } else if (FLAG_track_double_fields && representation.IsDouble()) { 496 } else if (representation.IsDouble()) {
545 Label do_store, heap_number; 497 Label do_store, heap_number;
546 __ AllocateHeapNumber(storage_reg, scratch1, slow); 498 __ AllocateHeapNumber(storage_reg, scratch1, slow);
547 499
548 __ JumpIfNotSmi(value_reg, &heap_number); 500 __ JumpIfNotSmi(value_reg, &heap_number);
549 __ SmiToInteger32(scratch1, value_reg); 501 __ SmiToInteger32(scratch1, value_reg);
550 __ Cvtlsi2sd(xmm0, scratch1); 502 __ Cvtlsi2sd(xmm0, scratch1);
551 __ jmp(&do_store); 503 __ jmp(&do_store);
552 504
553 __ bind(&heap_number); 505 __ bind(&heap_number);
554 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), 506 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
607 // face of a transition we can use the old map here because the size of the 559 // face of a transition we can use the old map here because the size of the
608 // object and the number of in-object properties is not going to change. 560 // object and the number of in-object properties is not going to change.
609 index -= object->map()->inobject_properties(); 561 index -= object->map()->inobject_properties();
610 562
611 // TODO(verwaest): Share this code as a code stub. 563 // TODO(verwaest): Share this code as a code stub.
612 SmiCheck smi_check = representation.IsTagged() 564 SmiCheck smi_check = representation.IsTagged()
613 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 565 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
614 if (index < 0) { 566 if (index < 0) {
615 // Set the property straight into the object. 567 // Set the property straight into the object.
616 int offset = object->map()->instance_size() + (index * kPointerSize); 568 int offset = object->map()->instance_size() + (index * kPointerSize);
617 if (FLAG_track_double_fields && representation.IsDouble()) { 569 if (representation.IsDouble()) {
618 __ movp(FieldOperand(receiver_reg, offset), storage_reg); 570 __ movp(FieldOperand(receiver_reg, offset), storage_reg);
619 } else { 571 } else {
620 __ movp(FieldOperand(receiver_reg, offset), value_reg); 572 __ movp(FieldOperand(receiver_reg, offset), value_reg);
621 } 573 }
622 574
623 if (!FLAG_track_fields || !representation.IsSmi()) { 575 if (!representation.IsSmi()) {
624 // Update the write barrier for the array address. 576 // Update the write barrier for the array address.
625 if (!FLAG_track_double_fields || !representation.IsDouble()) { 577 if (!representation.IsDouble()) {
626 __ movp(storage_reg, value_reg); 578 __ movp(storage_reg, value_reg);
627 } 579 }
628 __ RecordWriteField( 580 __ RecordWriteField(
629 receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs, 581 receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs,
630 EMIT_REMEMBERED_SET, smi_check); 582 EMIT_REMEMBERED_SET, smi_check);
631 } 583 }
632 } else { 584 } else {
633 // Write to the properties array. 585 // Write to the properties array.
634 int offset = index * kPointerSize + FixedArray::kHeaderSize; 586 int offset = index * kPointerSize + FixedArray::kHeaderSize;
635 // Get the properties array (optimistically). 587 // Get the properties array (optimistically).
636 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); 588 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
637 if (FLAG_track_double_fields && representation.IsDouble()) { 589 if (representation.IsDouble()) {
638 __ movp(FieldOperand(scratch1, offset), storage_reg); 590 __ movp(FieldOperand(scratch1, offset), storage_reg);
639 } else { 591 } else {
640 __ movp(FieldOperand(scratch1, offset), value_reg); 592 __ movp(FieldOperand(scratch1, offset), value_reg);
641 } 593 }
642 594
643 if (!FLAG_track_fields || !representation.IsSmi()) { 595 if (!representation.IsSmi()) {
644 // Update the write barrier for the array address. 596 // Update the write barrier for the array address.
645 if (!FLAG_track_double_fields || !representation.IsDouble()) { 597 if (!representation.IsDouble()) {
646 __ movp(storage_reg, value_reg); 598 __ movp(storage_reg, value_reg);
647 } 599 }
648 __ RecordWriteField( 600 __ RecordWriteField(
649 scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs, 601 scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs,
650 EMIT_REMEMBERED_SET, smi_check); 602 EMIT_REMEMBERED_SET, smi_check);
651 } 603 }
652 } 604 }
653 605
654 // Return the value (register rax). 606 // Return the value (register rax).
655 ASSERT(value_reg.is(rax)); 607 ASSERT(value_reg.is(rax));
(...skipping 18 matching lines...) Expand all
674 626
675 int index = lookup->GetFieldIndex().field_index(); 627 int index = lookup->GetFieldIndex().field_index();
676 628
677 // Adjust for the number of properties stored in the object. Even in the 629 // Adjust for the number of properties stored in the object. Even in the
678 // face of a transition we can use the old map here because the size of the 630 // face of a transition we can use the old map here because the size of the
679 // object and the number of in-object properties is not going to change. 631 // object and the number of in-object properties is not going to change.
680 index -= object->map()->inobject_properties(); 632 index -= object->map()->inobject_properties();
681 633
682 Representation representation = lookup->representation(); 634 Representation representation = lookup->representation();
683 ASSERT(!representation.IsNone()); 635 ASSERT(!representation.IsNone());
684 if (FLAG_track_fields && representation.IsSmi()) { 636 if (representation.IsSmi()) {
685 __ JumpIfNotSmi(value_reg, miss_label); 637 __ JumpIfNotSmi(value_reg, miss_label);
686 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { 638 } else if (representation.IsHeapObject()) {
687 __ JumpIfSmi(value_reg, miss_label); 639 __ JumpIfSmi(value_reg, miss_label);
688 } else if (FLAG_track_double_fields && representation.IsDouble()) { 640 } else if (representation.IsDouble()) {
689 // Load the double storage. 641 // Load the double storage.
690 if (index < 0) { 642 if (index < 0) {
691 int offset = object->map()->instance_size() + (index * kPointerSize); 643 int offset = object->map()->instance_size() + (index * kPointerSize);
692 __ movp(scratch1, FieldOperand(receiver_reg, offset)); 644 __ movp(scratch1, FieldOperand(receiver_reg, offset));
693 } else { 645 } else {
694 __ movp(scratch1, 646 __ movp(scratch1,
695 FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); 647 FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
696 int offset = index * kPointerSize + FixedArray::kHeaderSize; 648 int offset = index * kPointerSize + FixedArray::kHeaderSize;
697 __ movp(scratch1, FieldOperand(scratch1, offset)); 649 __ movp(scratch1, FieldOperand(scratch1, offset));
698 } 650 }
(...skipping 18 matching lines...) Expand all
717 } 669 }
718 670
719 // TODO(verwaest): Share this code as a code stub. 671 // TODO(verwaest): Share this code as a code stub.
720 SmiCheck smi_check = representation.IsTagged() 672 SmiCheck smi_check = representation.IsTagged()
721 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK; 673 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
722 if (index < 0) { 674 if (index < 0) {
723 // Set the property straight into the object. 675 // Set the property straight into the object.
724 int offset = object->map()->instance_size() + (index * kPointerSize); 676 int offset = object->map()->instance_size() + (index * kPointerSize);
725 __ movp(FieldOperand(receiver_reg, offset), value_reg); 677 __ movp(FieldOperand(receiver_reg, offset), value_reg);
726 678
727 if (!FLAG_track_fields || !representation.IsSmi()) { 679 if (!representation.IsSmi()) {
728 // Update the write barrier for the array address. 680 // Update the write barrier for the array address.
729 // Pass the value being stored in the now unused name_reg. 681 // Pass the value being stored in the now unused name_reg.
730 __ movp(name_reg, value_reg); 682 __ movp(name_reg, value_reg);
731 __ RecordWriteField( 683 __ RecordWriteField(
732 receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs, 684 receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs,
733 EMIT_REMEMBERED_SET, smi_check); 685 EMIT_REMEMBERED_SET, smi_check);
734 } 686 }
735 } else { 687 } else {
736 // Write to the properties array. 688 // Write to the properties array.
737 int offset = index * kPointerSize + FixedArray::kHeaderSize; 689 int offset = index * kPointerSize + FixedArray::kHeaderSize;
738 // Get the properties array (optimistically). 690 // Get the properties array (optimistically).
739 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); 691 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
740 __ movp(FieldOperand(scratch1, offset), value_reg); 692 __ movp(FieldOperand(scratch1, offset), value_reg);
741 693
742 if (!FLAG_track_fields || !representation.IsSmi()) { 694 if (!representation.IsSmi()) {
743 // Update the write barrier for the array address. 695 // Update the write barrier for the array address.
744 // Pass the value being stored in the now unused name_reg. 696 // Pass the value being stored in the now unused name_reg.
745 __ movp(name_reg, value_reg); 697 __ movp(name_reg, value_reg);
746 __ RecordWriteField( 698 __ RecordWriteField(
747 scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs, 699 scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs,
748 EMIT_REMEMBERED_SET, smi_check); 700 EMIT_REMEMBERED_SET, smi_check);
749 } 701 }
750 } 702 }
751 703
752 // Return the value (register rax). 704 // Return the value (register rax).
(...skipping 394 matching lines...) Expand 10 before | Expand all | Expand 10 after
1147 } 1099 }
1148 1100
1149 1101
1150 #undef __ 1102 #undef __
1151 #define __ ACCESS_MASM(masm) 1103 #define __ ACCESS_MASM(masm)
1152 1104
1153 1105
1154 void StoreStubCompiler::GenerateStoreViaSetter( 1106 void StoreStubCompiler::GenerateStoreViaSetter(
1155 MacroAssembler* masm, 1107 MacroAssembler* masm,
1156 Handle<HeapType> type, 1108 Handle<HeapType> type,
1109 Register receiver,
1157 Handle<JSFunction> setter) { 1110 Handle<JSFunction> setter) {
1158 // ----------- S t a t e ------------- 1111 // ----------- S t a t e -------------
1159 // -- rax : value
1160 // -- rcx : name
1161 // -- rdx : receiver
1162 // -- rsp[0] : return address 1112 // -- rsp[0] : return address
1163 // ----------------------------------- 1113 // -----------------------------------
1164 { 1114 {
1165 FrameScope scope(masm, StackFrame::INTERNAL); 1115 FrameScope scope(masm, StackFrame::INTERNAL);
1166 Register receiver = rdx;
1167 Register value = rax;
1168 1116
1169 // Save value register, so we can restore it later. 1117 // Save value register, so we can restore it later.
1170 __ push(value); 1118 __ push(value());
1171 1119
1172 if (!setter.is_null()) { 1120 if (!setter.is_null()) {
1173 // Call the JavaScript setter with receiver and value on the stack. 1121 // Call the JavaScript setter with receiver and value on the stack.
1174 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) { 1122 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1175 // Swap in the global receiver. 1123 // Swap in the global receiver.
1176 __ movp(receiver, 1124 __ movp(receiver,
1177 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset)); 1125 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1178 } 1126 }
1179 __ push(receiver); 1127 __ push(receiver);
1180 __ push(value); 1128 __ push(value());
1181 ParameterCount actual(1); 1129 ParameterCount actual(1);
1182 ParameterCount expected(setter); 1130 ParameterCount expected(setter);
1183 __ InvokeFunction(setter, expected, actual, 1131 __ InvokeFunction(setter, expected, actual,
1184 CALL_FUNCTION, NullCallWrapper()); 1132 CALL_FUNCTION, NullCallWrapper());
1185 } else { 1133 } else {
1186 // If we generate a global code snippet for deoptimization only, remember 1134 // If we generate a global code snippet for deoptimization only, remember
1187 // the place to continue after deoptimization. 1135 // the place to continue after deoptimization.
1188 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset()); 1136 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1189 } 1137 }
1190 1138
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
1278 } 1226 }
1279 1227
1280 1228
1281 Register* KeyedLoadStubCompiler::registers() { 1229 Register* KeyedLoadStubCompiler::registers() {
1282 // receiver, name, scratch1, scratch2, scratch3, scratch4. 1230 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1283 static Register registers[] = { rdx, rax, rbx, rcx, rdi, r8 }; 1231 static Register registers[] = { rdx, rax, rbx, rcx, rdi, r8 };
1284 return registers; 1232 return registers;
1285 } 1233 }
1286 1234
1287 1235
1236 Register StoreStubCompiler::value() {
1237 return rax;
1238 }
1239
1240
1288 Register* StoreStubCompiler::registers() { 1241 Register* StoreStubCompiler::registers() {
1289 // receiver, name, value, scratch1, scratch2, scratch3. 1242 // receiver, name, scratch1, scratch2, scratch3.
1290 static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 }; 1243 static Register registers[] = { rdx, rcx, rbx, rdi, r8 };
1291 return registers; 1244 return registers;
1292 } 1245 }
1293 1246
1294 1247
1295 Register* KeyedStoreStubCompiler::registers() { 1248 Register* KeyedStoreStubCompiler::registers() {
1296 // receiver, name, value, scratch1, scratch2, scratch3. 1249 // receiver, name, scratch1, scratch2, scratch3.
1297 static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 }; 1250 static Register registers[] = { rdx, rcx, rbx, rdi, r8 };
1298 return registers; 1251 return registers;
1299 } 1252 }
1300 1253
1301 1254
1302 #undef __ 1255 #undef __
1303 #define __ ACCESS_MASM(masm) 1256 #define __ ACCESS_MASM(masm)
1304 1257
1305 1258
1306 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm, 1259 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1307 Handle<HeapType> type, 1260 Handle<HeapType> type,
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
1473 // ----------------------------------- 1426 // -----------------------------------
1474 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss); 1427 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1475 } 1428 }
1476 1429
1477 1430
1478 #undef __ 1431 #undef __
1479 1432
1480 } } // namespace v8::internal 1433 } } // namespace v8::internal
1481 1434
1482 #endif // V8_TARGET_ARCH_X64 1435 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.cc ('k') | test/cctest/cctest.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698