Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(308)

Side by Side Diff: src/arm/lithium-codegen-arm.cc

Issue 6577036: [Isolates] Merge from bleeding_edge to isolates, revisions 6100-6300. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/isolates/
Patch Set: '' Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
(...skipping 305 matching lines...) Expand 10 before | Expand all | Expand 10 after
317 // Local or spill slot. Skip the frame pointer, function, and 317 // Local or spill slot. Skip the frame pointer, function, and
318 // context in the fixed part of the frame. 318 // context in the fixed part of the frame.
319 return MemOperand(fp, -(index + 3) * kPointerSize); 319 return MemOperand(fp, -(index + 3) * kPointerSize);
320 } else { 320 } else {
321 // Incoming parameter. Skip the return address. 321 // Incoming parameter. Skip the return address.
322 return MemOperand(fp, -(index - 1) * kPointerSize); 322 return MemOperand(fp, -(index - 1) * kPointerSize);
323 } 323 }
324 } 324 }
325 325
326 326
327 void LCodeGen::WriteTranslation(LEnvironment* environment,
328 Translation* translation) {
329 if (environment == NULL) return;
330
331 // The translation includes one command per value in the environment.
332 int translation_size = environment->values()->length();
333 // The output frame height does not include the parameters.
334 int height = translation_size - environment->parameter_count();
335
336 WriteTranslation(environment->outer(), translation);
337 int closure_id = DefineDeoptimizationLiteral(environment->closure());
338 translation->BeginFrame(environment->ast_id(), closure_id, height);
339 for (int i = 0; i < translation_size; ++i) {
340 LOperand* value = environment->values()->at(i);
341 // spilled_registers_ and spilled_double_registers_ are either
342 // both NULL or both set.
343 if (environment->spilled_registers() != NULL && value != NULL) {
344 if (value->IsRegister() &&
345 environment->spilled_registers()[value->index()] != NULL) {
346 translation->MarkDuplicate();
347 AddToTranslation(translation,
348 environment->spilled_registers()[value->index()],
349 environment->HasTaggedValueAt(i));
350 } else if (
351 value->IsDoubleRegister() &&
352 environment->spilled_double_registers()[value->index()] != NULL) {
353 translation->MarkDuplicate();
354 AddToTranslation(
355 translation,
356 environment->spilled_double_registers()[value->index()],
357 false);
358 }
359 }
360
361 AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
362 }
363 }
364
365
327 void LCodeGen::AddToTranslation(Translation* translation, 366 void LCodeGen::AddToTranslation(Translation* translation,
328 LOperand* op, 367 LOperand* op,
329 bool is_tagged) { 368 bool is_tagged) {
330 if (op == NULL) { 369 if (op == NULL) {
331 // TODO(twuerthinger): Introduce marker operands to indicate that this value 370 // TODO(twuerthinger): Introduce marker operands to indicate that this value
332 // is not present and must be reconstructed from the deoptimizer. Currently 371 // is not present and must be reconstructed from the deoptimizer. Currently
333 // this is only used for the arguments object. 372 // this is only used for the arguments object.
334 translation->StoreArgumentsObject(); 373 translation->StoreArgumentsObject();
335 } else if (op->IsStackSlot()) { 374 } else if (op->IsStackSlot()) {
336 if (is_tagged) { 375 if (is_tagged) {
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
432 // Layout of the translation: 471 // Layout of the translation:
433 // 0 ........................................................ size - 1 + 4 472 // 0 ........................................................ size - 1 + 4
434 // [expression stack including arguments] [locals] [4 words] [parameters] 473 // [expression stack including arguments] [locals] [4 words] [parameters]
435 // |>------------ translation_size ------------<| 474 // |>------------ translation_size ------------<|
436 475
437 int frame_count = 0; 476 int frame_count = 0;
438 for (LEnvironment* e = environment; e != NULL; e = e->outer()) { 477 for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
439 ++frame_count; 478 ++frame_count;
440 } 479 }
441 Translation translation(&translations_, frame_count); 480 Translation translation(&translations_, frame_count);
442 environment->WriteTranslation(this, &translation); 481 WriteTranslation(environment, &translation);
443 int deoptimization_index = deoptimizations_.length(); 482 int deoptimization_index = deoptimizations_.length();
444 environment->Register(deoptimization_index, translation.index()); 483 environment->Register(deoptimization_index, translation.index());
445 deoptimizations_.Add(environment); 484 deoptimizations_.Add(environment);
446 } 485 }
447 } 486 }
448 487
449 488
450 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) { 489 void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
451 RegisterEnvironmentForDeoptimization(environment); 490 RegisterEnvironmentForDeoptimization(environment);
452 ASSERT(environment->HasBeenRegistered()); 491 ASSERT(environment->HasBeenRegistered());
(...skipping 138 matching lines...) Expand 10 before | Expand all | Expand 10 after
591 current_block_ = label->block_id(); 630 current_block_ = label->block_id();
592 LCodeGen::DoGap(label); 631 LCodeGen::DoGap(label);
593 } 632 }
594 633
595 634
596 void LCodeGen::DoParallelMove(LParallelMove* move) { 635 void LCodeGen::DoParallelMove(LParallelMove* move) {
597 // d0 must always be a scratch register. 636 // d0 must always be a scratch register.
598 DoubleRegister dbl_scratch = d0; 637 DoubleRegister dbl_scratch = d0;
599 LUnallocated marker_operand(LUnallocated::NONE); 638 LUnallocated marker_operand(LUnallocated::NONE);
600 639
601 Register core_scratch = r9; 640 Register core_scratch = scratch0();
602 bool destroys_core_scratch = false; 641 bool destroys_core_scratch = false;
603 642
604 LGapResolver resolver(move->move_operands(), &marker_operand); 643 const ZoneList<LMoveOperands>* moves =
605 const ZoneList<LMoveOperands>* moves = resolver.ResolveInReverseOrder(); 644 resolver_.Resolve(move->move_operands(), &marker_operand);
606 for (int i = moves->length() - 1; i >= 0; --i) { 645 for (int i = moves->length() - 1; i >= 0; --i) {
607 LMoveOperands move = moves->at(i); 646 LMoveOperands move = moves->at(i);
608 LOperand* from = move.from(); 647 LOperand* from = move.from();
609 LOperand* to = move.to(); 648 LOperand* to = move.to();
610 ASSERT(!from->IsDoubleRegister() || 649 ASSERT(!from->IsDoubleRegister() ||
611 !ToDoubleRegister(from).is(dbl_scratch)); 650 !ToDoubleRegister(from).is(dbl_scratch));
612 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(dbl_scratch)); 651 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(dbl_scratch));
613 ASSERT(!from->IsRegister() || !ToRegister(from).is(core_scratch)); 652 ASSERT(!from->IsRegister() || !ToRegister(from).is(core_scratch));
614 ASSERT(!to->IsRegister() || !ToRegister(to).is(core_scratch)); 653 ASSERT(!to->IsRegister() || !ToRegister(to).is(core_scratch));
615 if (from == &marker_operand) { 654 if (from == &marker_operand) {
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after
723 } 762 }
724 } 763 }
725 764
726 765
727 void LCodeGen::DoParameter(LParameter* instr) { 766 void LCodeGen::DoParameter(LParameter* instr) {
728 // Nothing to do. 767 // Nothing to do.
729 } 768 }
730 769
731 770
732 void LCodeGen::DoCallStub(LCallStub* instr) { 771 void LCodeGen::DoCallStub(LCallStub* instr) {
733 Abort("DoCallStub unimplemented."); 772 ASSERT(ToRegister(instr->result()).is(r0));
773 switch (instr->hydrogen()->major_key()) {
774 case CodeStub::RegExpConstructResult: {
775 RegExpConstructResultStub stub;
776 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
777 break;
778 }
779 case CodeStub::RegExpExec: {
780 RegExpExecStub stub;
781 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
782 break;
783 }
784 case CodeStub::SubString: {
785 SubStringStub stub;
786 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
787 break;
788 }
789 case CodeStub::StringCharAt: {
790 Abort("StringCharAtStub unimplemented.");
791 break;
792 }
793 case CodeStub::MathPow: {
794 Abort("MathPowStub unimplemented.");
795 break;
796 }
797 case CodeStub::NumberToString: {
798 NumberToStringStub stub;
799 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
800 break;
801 }
802 case CodeStub::StringAdd: {
803 StringAddStub stub(NO_STRING_ADD_FLAGS);
804 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
805 break;
806 }
807 case CodeStub::StringCompare: {
808 StringCompareStub stub;
809 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
810 break;
811 }
812 case CodeStub::TranscendentalCache: {
813 __ ldr(r0, MemOperand(sp, 0));
814 TranscendentalCacheStub stub(instr->transcendental_type());
815 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
816 break;
817 }
818 default:
819 UNREACHABLE();
820 }
734 } 821 }
735 822
736 823
737 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { 824 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
738 // Nothing to do. 825 // Nothing to do.
739 } 826 }
740 827
741 828
742 void LCodeGen::DoModI(LModI* instr) { 829 void LCodeGen::DoModI(LModI* instr) {
743 Abort("DoModI unimplemented."); 830 Abort("DoModI unimplemented.");
744 } 831 }
745 832
746 833
747 void LCodeGen::DoDivI(LDivI* instr) { 834 void LCodeGen::DoDivI(LDivI* instr) {
748 Abort("DoDivI unimplemented."); 835 Abort("DoDivI unimplemented.");
749 } 836 }
750 837
751 838
752 void LCodeGen::DoMulI(LMulI* instr) { 839 void LCodeGen::DoMulI(LMulI* instr) {
840 Register scratch = scratch0();
753 Register left = ToRegister(instr->left()); 841 Register left = ToRegister(instr->left());
754 Register scratch = r9;
755 Register right = EmitLoadRegister(instr->right(), scratch); 842 Register right = EmitLoadRegister(instr->right(), scratch);
756 843
757 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) && 844 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero) &&
758 !instr->right()->IsConstantOperand()) { 845 !instr->right()->IsConstantOperand()) {
759 __ orr(ToRegister(instr->temp()), left, right); 846 __ orr(ToRegister(instr->temp()), left, right);
760 } 847 }
761 848
762 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 849 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
763 // scratch:left = left * right. 850 // scratch:left = left * right.
764 __ smull(scratch, left, left, right); 851 __ smull(scratch, left, left, right);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
806 __ eor(result, ToRegister(left), Operand(right_reg)); 893 __ eor(result, ToRegister(left), Operand(right_reg));
807 break; 894 break;
808 default: 895 default:
809 UNREACHABLE(); 896 UNREACHABLE();
810 break; 897 break;
811 } 898 }
812 } 899 }
813 900
814 901
815 void LCodeGen::DoShiftI(LShiftI* instr) { 902 void LCodeGen::DoShiftI(LShiftI* instr) {
903 Register scratch = scratch0();
816 LOperand* left = instr->left(); 904 LOperand* left = instr->left();
817 LOperand* right = instr->right(); 905 LOperand* right = instr->right();
818 ASSERT(left->Equals(instr->result())); 906 ASSERT(left->Equals(instr->result()));
819 ASSERT(left->IsRegister()); 907 ASSERT(left->IsRegister());
820 Register result = ToRegister(left); 908 Register result = ToRegister(left);
821 if (right->IsRegister()) { 909 if (right->IsRegister()) {
822 // Mask the right operand. 910 // Mask the right operand.
823 __ and_(r9, ToRegister(right), Operand(0x1F)); 911 __ and_(scratch, ToRegister(right), Operand(0x1F));
824 switch (instr->op()) { 912 switch (instr->op()) {
825 case Token::SAR: 913 case Token::SAR:
826 __ mov(result, Operand(result, ASR, r9)); 914 __ mov(result, Operand(result, ASR, scratch));
827 break; 915 break;
828 case Token::SHR: 916 case Token::SHR:
829 if (instr->can_deopt()) { 917 if (instr->can_deopt()) {
830 __ mov(result, Operand(result, LSR, r9), SetCC); 918 __ mov(result, Operand(result, LSR, scratch), SetCC);
831 DeoptimizeIf(mi, instr->environment()); 919 DeoptimizeIf(mi, instr->environment());
832 } else { 920 } else {
833 __ mov(result, Operand(result, LSR, r9)); 921 __ mov(result, Operand(result, LSR, scratch));
834 } 922 }
835 break; 923 break;
836 case Token::SHL: 924 case Token::SHL:
837 __ mov(result, Operand(result, LSL, r9)); 925 __ mov(result, Operand(result, LSL, scratch));
838 break; 926 break;
839 default: 927 default:
840 UNREACHABLE(); 928 UNREACHABLE();
841 break; 929 break;
842 } 930 }
843 } else { 931 } else {
844 int value = ToInteger32(LConstantOperand::cast(right)); 932 int value = ToInteger32(LConstantOperand::cast(right));
845 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F); 933 uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
846 switch (instr->op()) { 934 switch (instr->op()) {
847 case Token::SAR: 935 case Token::SAR:
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
891 Abort("DoConstantD unimplemented."); 979 Abort("DoConstantD unimplemented.");
892 } 980 }
893 981
894 982
895 void LCodeGen::DoConstantT(LConstantT* instr) { 983 void LCodeGen::DoConstantT(LConstantT* instr) {
896 ASSERT(instr->result()->IsRegister()); 984 ASSERT(instr->result()->IsRegister());
897 __ mov(ToRegister(instr->result()), Operand(instr->value())); 985 __ mov(ToRegister(instr->result()), Operand(instr->value()));
898 } 986 }
899 987
900 988
901 void LCodeGen::DoArrayLength(LArrayLength* instr) { 989 void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
902 Register result = ToRegister(instr->result()); 990 Register result = ToRegister(instr->result());
991 Register array = ToRegister(instr->input());
992 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset));
993 }
903 994
904 if (instr->hydrogen()->value()->IsLoadElements()) {
905 // We load the length directly from the elements array.
906 Register elements = ToRegister(instr->input());
907 __ ldr(result, FieldMemOperand(elements, FixedArray::kLengthOffset));
908 } else {
909 // Check that the receiver really is an array.
910 Register array = ToRegister(instr->input());
911 Register temporary = ToRegister(instr->temporary());
912 __ CompareObjectType(array, temporary, temporary, JS_ARRAY_TYPE);
913 DeoptimizeIf(ne, instr->environment());
914 995
915 // Load length directly from the array. 996 void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
916 __ ldr(result, FieldMemOperand(array, JSArray::kLengthOffset)); 997 Register result = ToRegister(instr->result());
917 } 998 Register array = ToRegister(instr->input());
918 Abort("DoArrayLength untested."); 999 __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
919 } 1000 }
920 1001
921 1002
922 void LCodeGen::DoValueOf(LValueOf* instr) { 1003 void LCodeGen::DoValueOf(LValueOf* instr) {
923 Abort("DoValueOf unimplemented."); 1004 Register input = ToRegister(instr->input());
1005 Register result = ToRegister(instr->result());
1006 Register map = ToRegister(instr->temporary());
1007 ASSERT(input.is(result));
1008 Label done;
1009
1010 // If the object is a smi return the object.
1011 __ tst(input, Operand(kSmiTagMask));
1012 __ b(eq, &done);
1013
1014 // If the object is not a value type, return the object.
1015 __ CompareObjectType(input, map, map, JS_VALUE_TYPE);
1016 __ b(ne, &done);
1017 __ ldr(result, FieldMemOperand(input, JSValue::kValueOffset));
1018
1019 __ bind(&done);
924 } 1020 }
925 1021
926 1022
927 void LCodeGen::DoBitNotI(LBitNotI* instr) { 1023 void LCodeGen::DoBitNotI(LBitNotI* instr) {
928 LOperand* input = instr->input(); 1024 LOperand* input = instr->input();
929 ASSERT(input->Equals(instr->result())); 1025 ASSERT(input->Equals(instr->result()));
930 __ mvn(ToRegister(input), Operand(ToRegister(input))); 1026 __ mvn(ToRegister(input), Operand(ToRegister(input)));
931 Abort("DoBitNotI untested.");
932 } 1027 }
933 1028
934 1029
935 void LCodeGen::DoThrow(LThrow* instr) { 1030 void LCodeGen::DoThrow(LThrow* instr) {
936 Register input_reg = EmitLoadRegister(instr->input(), ip); 1031 Register input_reg = EmitLoadRegister(instr->input(), ip);
937 __ push(input_reg); 1032 __ push(input_reg);
938 CallRuntime(Runtime::kThrow, 1, instr); 1033 CallRuntime(Runtime::kThrow, 1, instr);
939 1034
940 if (FLAG_debug_code) { 1035 if (FLAG_debug_code) {
941 __ stop("Unreachable code."); 1036 __ stop("Unreachable code.");
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
1028 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1123 int true_block = chunk_->LookupDestination(instr->true_block_id());
1029 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1124 int false_block = chunk_->LookupDestination(instr->false_block_id());
1030 1125
1031 Representation r = instr->hydrogen()->representation(); 1126 Representation r = instr->hydrogen()->representation();
1032 if (r.IsInteger32()) { 1127 if (r.IsInteger32()) {
1033 Register reg = ToRegister(instr->input()); 1128 Register reg = ToRegister(instr->input());
1034 __ cmp(reg, Operand(0)); 1129 __ cmp(reg, Operand(0));
1035 EmitBranch(true_block, false_block, nz); 1130 EmitBranch(true_block, false_block, nz);
1036 } else if (r.IsDouble()) { 1131 } else if (r.IsDouble()) {
1037 DoubleRegister reg = ToDoubleRegister(instr->input()); 1132 DoubleRegister reg = ToDoubleRegister(instr->input());
1038 __ vcmp(reg, 0.0); 1133 Register scratch = scratch0();
1134
1135 // Test the double value. Zero and NaN are false.
1136 __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
1137 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
1039 EmitBranch(true_block, false_block, ne); 1138 EmitBranch(true_block, false_block, ne);
1040 } else { 1139 } else {
1041 ASSERT(r.IsTagged()); 1140 ASSERT(r.IsTagged());
1042 Register reg = ToRegister(instr->input()); 1141 Register reg = ToRegister(instr->input());
1043 if (instr->hydrogen()->type().IsBoolean()) { 1142 if (instr->hydrogen()->type().IsBoolean()) {
1044 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1143 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1045 __ cmp(reg, ip); 1144 __ cmp(reg, ip);
1046 EmitBranch(true_block, false_block, eq); 1145 EmitBranch(true_block, false_block, eq);
1047 } else { 1146 } else {
1048 Label* true_label = chunk_->GetAssemblyLabel(true_block); 1147 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1049 Label* false_label = chunk_->GetAssemblyLabel(false_block); 1148 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1050 1149
1051 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1150 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1052 __ cmp(reg, ip); 1151 __ cmp(reg, ip);
1053 __ b(eq, false_label); 1152 __ b(eq, false_label);
1054 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1153 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1055 __ cmp(reg, ip); 1154 __ cmp(reg, ip);
1056 __ b(eq, true_label); 1155 __ b(eq, true_label);
1057 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 1156 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1058 __ cmp(reg, ip); 1157 __ cmp(reg, ip);
1059 __ b(eq, false_label); 1158 __ b(eq, false_label);
1060 __ cmp(reg, Operand(0)); 1159 __ cmp(reg, Operand(0));
1061 __ b(eq, false_label); 1160 __ b(eq, false_label);
1062 __ tst(reg, Operand(kSmiTagMask)); 1161 __ tst(reg, Operand(kSmiTagMask));
1063 __ b(eq, true_label); 1162 __ b(eq, true_label);
1064 1163
1065 // Test for double values. Zero is false. 1164 // Test double values. Zero and NaN are false.
1066 Label call_stub; 1165 Label call_stub;
1067 DoubleRegister dbl_scratch = d0; 1166 DoubleRegister dbl_scratch = d0;
1068 Register core_scratch = r9; 1167 Register scratch = scratch0();
1069 ASSERT(!reg.is(core_scratch)); 1168 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1070 __ ldr(core_scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1071 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 1169 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1072 __ cmp(core_scratch, Operand(ip)); 1170 __ cmp(scratch, Operand(ip));
1073 __ b(ne, &call_stub); 1171 __ b(ne, &call_stub);
1074 __ sub(ip, reg, Operand(kHeapObjectTag)); 1172 __ sub(ip, reg, Operand(kHeapObjectTag));
1075 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset); 1173 __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
1076 __ vcmp(dbl_scratch, 0.0); 1174 __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
1077 __ b(eq, false_label); 1175 __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
1176 __ b(ne, false_label);
1078 __ b(true_label); 1177 __ b(true_label);
1079 1178
1080 // The conversion stub doesn't cause garbage collections so it's 1179 // The conversion stub doesn't cause garbage collections so it's
1081 // safe to not record a safepoint after the call. 1180 // safe to not record a safepoint after the call.
1082 __ bind(&call_stub); 1181 __ bind(&call_stub);
1083 ToBooleanStub stub(reg); 1182 ToBooleanStub stub(reg);
1084 RegList saved_regs = kJSCallerSaved | kCalleeSaved; 1183 RegList saved_regs = kJSCallerSaved | kCalleeSaved;
1085 __ stm(db_w, sp, saved_regs); 1184 __ stm(db_w, sp, saved_regs);
1086 __ CallStub(&stub); 1185 __ CallStub(&stub);
1087 __ cmp(reg, Operand(0)); 1186 __ cmp(reg, Operand(0));
1088 __ ldm(ia_w, sp, saved_regs); 1187 __ ldm(ia_w, sp, saved_regs);
1089 EmitBranch(true_block, false_block, nz); 1188 EmitBranch(true_block, false_block, nz);
1090 } 1189 }
1091 } 1190 }
1092 } 1191 }
1093 1192
1094 1193
1095 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { 1194 void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1096 // TODO(srdjan): Perform stack overflow check if this goto needs it
1097 // before jumping.
1098 block = chunk_->LookupDestination(block); 1195 block = chunk_->LookupDestination(block);
1099 int next_block = GetNextEmittedBlock(current_block_); 1196 int next_block = GetNextEmittedBlock(current_block_);
1100 if (block != next_block) { 1197 if (block != next_block) {
1101 __ jmp(chunk_->GetAssemblyLabel(block)); 1198 // Perform stack overflow check if this goto needs it before jumping.
1199 if (deferred_stack_check != NULL) {
1200 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1201 __ cmp(sp, Operand(ip));
1202 __ b(hs, chunk_->GetAssemblyLabel(block));
1203 __ jmp(deferred_stack_check->entry());
1204 deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1205 } else {
1206 __ jmp(chunk_->GetAssemblyLabel(block));
1207 }
1102 } 1208 }
1103 } 1209 }
1104 1210
1105 1211
1106 void LCodeGen::DoDeferredStackCheck(LGoto* instr) { 1212 void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1107 UNIMPLEMENTED(); 1213 __ PushSafepointRegisters();
1214 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1215 RecordSafepointWithRegisters(
1216 instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1217 __ PopSafepointRegisters();
1108 } 1218 }
1109 1219
1110 1220
1111 void LCodeGen::DoGoto(LGoto* instr) { 1221 void LCodeGen::DoGoto(LGoto* instr) {
1112 // TODO(srdjan): Implement deferred stack check. 1222 class DeferredStackCheck: public LDeferredCode {
1113 EmitGoto(instr->block_id(), NULL); 1223 public:
1224 DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1225 : LDeferredCode(codegen), instr_(instr) { }
1226 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1227 private:
1228 LGoto* instr_;
1229 };
1230
1231 DeferredStackCheck* deferred = NULL;
1232 if (instr->include_stack_check()) {
1233 deferred = new DeferredStackCheck(this, instr);
1234 }
1235 EmitGoto(instr->block_id(), deferred);
1114 } 1236 }
1115 1237
1116 1238
1117 Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { 1239 Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1118 Condition cond = no_condition; 1240 Condition cond = no_condition;
1119 switch (op) { 1241 switch (op) {
1120 case Token::EQ: 1242 case Token::EQ:
1121 case Token::EQ_STRICT: 1243 case Token::EQ_STRICT:
1122 cond = eq; 1244 cond = eq;
1123 break; 1245 break;
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
1169 Abort("DoCmpJSObjectEq untested."); 1291 Abort("DoCmpJSObjectEq untested.");
1170 } 1292 }
1171 1293
1172 1294
1173 void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) { 1295 void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1174 Abort("DoCmpJSObjectEqAndBranch unimplemented."); 1296 Abort("DoCmpJSObjectEqAndBranch unimplemented.");
1175 } 1297 }
1176 1298
1177 1299
1178 void LCodeGen::DoIsNull(LIsNull* instr) { 1300 void LCodeGen::DoIsNull(LIsNull* instr) {
1179 Abort("DoIsNull unimplemented."); 1301 Register reg = ToRegister(instr->input());
1302 Register result = ToRegister(instr->result());
1303
1304 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1305 __ cmp(reg, ip);
1306 if (instr->is_strict()) {
1307 __ LoadRoot(result, Heap::kTrueValueRootIndex, eq);
1308 __ LoadRoot(result, Heap::kFalseValueRootIndex, ne);
1309 } else {
1310 Label true_value, false_value, done;
1311 __ b(eq, &true_value);
1312 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1313 __ cmp(ip, reg);
1314 __ b(eq, &true_value);
1315 __ tst(reg, Operand(kSmiTagMask));
1316 __ b(eq, &false_value);
1317 // Check for undetectable objects by looking in the bit field in
1318 // the map. The object has already been smi checked.
1319 Register scratch = result;
1320 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1321 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1322 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1323 __ b(ne, &true_value);
1324 __ bind(&false_value);
1325 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1326 __ jmp(&done);
1327 __ bind(&true_value);
1328 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1329 __ bind(&done);
1330 }
1180 } 1331 }
1181 1332
1182 1333
1183 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) { 1334 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1335 Register scratch = scratch0();
1184 Register reg = ToRegister(instr->input()); 1336 Register reg = ToRegister(instr->input());
1185 1337
1186 // TODO(fsc): If the expression is known to be a smi, then it's 1338 // TODO(fsc): If the expression is known to be a smi, then it's
1187 // definitely not null. Jump to the false block. 1339 // definitely not null. Jump to the false block.
1188 1340
1189 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1341 int true_block = chunk_->LookupDestination(instr->true_block_id());
1190 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1342 int false_block = chunk_->LookupDestination(instr->false_block_id());
1191 1343
1192 __ LoadRoot(ip, Heap::kNullValueRootIndex); 1344 __ LoadRoot(ip, Heap::kNullValueRootIndex);
1193 __ cmp(reg, ip); 1345 __ cmp(reg, ip);
1194 if (instr->is_strict()) { 1346 if (instr->is_strict()) {
1195 EmitBranch(true_block, false_block, eq); 1347 EmitBranch(true_block, false_block, eq);
1196 } else { 1348 } else {
1197 Label* true_label = chunk_->GetAssemblyLabel(true_block); 1349 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1198 Label* false_label = chunk_->GetAssemblyLabel(false_block); 1350 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1199 __ b(eq, true_label); 1351 __ b(eq, true_label);
1200 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 1352 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1201 __ cmp(reg, ip); 1353 __ cmp(reg, ip);
1202 __ b(eq, true_label); 1354 __ b(eq, true_label);
1203 __ tst(reg, Operand(kSmiTagMask)); 1355 __ tst(reg, Operand(kSmiTagMask));
1204 __ b(eq, false_label); 1356 __ b(eq, false_label);
1205 // Check for undetectable objects by looking in the bit field in 1357 // Check for undetectable objects by looking in the bit field in
1206 // the map. The object has already been smi checked. 1358 // the map. The object has already been smi checked.
1207 Register scratch = ToRegister(instr->temp());
1208 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset)); 1359 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1209 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset)); 1360 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1210 __ tst(scratch, Operand(1 << Map::kIsUndetectable)); 1361 __ tst(scratch, Operand(1 << Map::kIsUndetectable));
1211 EmitBranch(true_block, false_block, ne); 1362 EmitBranch(true_block, false_block, ne);
1212 } 1363 }
1213 } 1364 }
1214 1365
1215 1366
1216 Condition LCodeGen::EmitIsObject(Register input, 1367 Condition LCodeGen::EmitIsObject(Register input,
1217 Register temp1, 1368 Register temp1,
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
1275 return eq; 1426 return eq;
1276 } 1427 }
1277 1428
1278 1429
1279 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) { 1430 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1280 Abort("DoHasInstanceType unimplemented."); 1431 Abort("DoHasInstanceType unimplemented.");
1281 } 1432 }
1282 1433
1283 1434
1284 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) { 1435 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1436 Register scratch = scratch0();
1285 Register input = ToRegister(instr->input()); 1437 Register input = ToRegister(instr->input());
1286 Register temp = ToRegister(instr->temp());
1287 1438
1288 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1439 int true_block = chunk_->LookupDestination(instr->true_block_id());
1289 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1440 int false_block = chunk_->LookupDestination(instr->false_block_id());
1290 1441
1291 Label* false_label = chunk_->GetAssemblyLabel(false_block); 1442 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1292 1443
1293 __ tst(input, Operand(kSmiTagMask)); 1444 __ tst(input, Operand(kSmiTagMask));
1294 __ b(eq, false_label); 1445 __ b(eq, false_label);
1295 1446
1296 __ CompareObjectType(input, temp, temp, instr->TestType()); 1447 __ CompareObjectType(input, scratch, scratch, instr->TestType());
1297 EmitBranch(true_block, false_block, instr->BranchCondition()); 1448 EmitBranch(true_block, false_block, instr->BranchCondition());
1298 } 1449 }
1299 1450
1300 1451
1301 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) { 1452 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1302 Abort("DoHasCachedArrayIndex unimplemented."); 1453 Abort("DoHasCachedArrayIndex unimplemented.");
1303 } 1454 }
1304 1455
1305 1456
1306 void LCodeGen::DoHasCachedArrayIndexAndBranch( 1457 void LCodeGen::DoHasCachedArrayIndexAndBranch(
1307 LHasCachedArrayIndexAndBranch* instr) { 1458 LHasCachedArrayIndexAndBranch* instr) {
1308 Abort("DoHasCachedArrayIndexAndBranch unimplemented."); 1459 Abort("DoHasCachedArrayIndexAndBranch unimplemented.");
1309 } 1460 }
1310 1461
1311 1462
1312 // Branches to a label or falls through with the answer in the z flag. Trashes 1463 // Branches to a label or falls through with the answer in flags. Trashes
1313 // the temp registers, but not the input. Only input and temp2 may alias. 1464 // the temp registers, but not the input. Only input and temp2 may alias.
1314 void LCodeGen::EmitClassOfTest(Label* is_true, 1465 void LCodeGen::EmitClassOfTest(Label* is_true,
1315 Label* is_false, 1466 Label* is_false,
1316 Handle<String>class_name, 1467 Handle<String>class_name,
1317 Register input, 1468 Register input,
1318 Register temp, 1469 Register temp,
1319 Register temp2) { 1470 Register temp2) {
1320 Abort("EmitClassOfTest unimplemented."); 1471 ASSERT(!input.is(temp));
1472 ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1473 __ tst(input, Operand(kSmiTagMask));
1474 __ b(eq, is_false);
1475 __ CompareObjectType(input, temp, temp2, FIRST_JS_OBJECT_TYPE);
1476 __ b(lt, is_false);
1477
1478 // Map is now in temp.
1479 // Functions have class 'Function'.
1480 __ CompareInstanceType(temp, temp2, JS_FUNCTION_TYPE);
1481 if (class_name->IsEqualTo(CStrVector("Function"))) {
1482 __ b(eq, is_true);
1483 } else {
1484 __ b(eq, is_false);
1485 }
1486
1487 // Check if the constructor in the map is a function.
1488 __ ldr(temp, FieldMemOperand(temp, Map::kConstructorOffset));
1489
1490 // As long as JS_FUNCTION_TYPE is the last instance type and it is
1491 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1492 // LAST_JS_OBJECT_TYPE.
1493 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1494 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1495
1496 // Objects with a non-function constructor have class 'Object'.
1497 __ CompareObjectType(temp, temp2, temp2, JS_FUNCTION_TYPE);
1498 if (class_name->IsEqualTo(CStrVector("Object"))) {
1499 __ b(ne, is_true);
1500 } else {
1501 __ b(ne, is_false);
1502 }
1503
1504 // temp now contains the constructor function. Grab the
1505 // instance class name from there.
1506 __ ldr(temp, FieldMemOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1507 __ ldr(temp, FieldMemOperand(temp,
1508 SharedFunctionInfo::kInstanceClassNameOffset));
1509 // The class name we are testing against is a symbol because it's a literal.
1510 // The name in the constructor is a symbol because of the way the context is
1511 // booted. This routine isn't expected to work for random API-created
1512 // classes and it doesn't have to because you can't access it with natives
1513 // syntax. Since both sides are symbols it is sufficient to use an identity
1514 // comparison.
1515 __ cmp(temp, Operand(class_name));
1516 // End with the answer in flags.
1321 } 1517 }
1322 1518
1323 1519
1324 void LCodeGen::DoClassOfTest(LClassOfTest* instr) { 1520 void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1325 Abort("DoClassOfTest unimplemented."); 1521 Register input = ToRegister(instr->input());
1522 Register result = ToRegister(instr->result());
1523 ASSERT(input.is(result));
1524 Handle<String> class_name = instr->hydrogen()->class_name();
1525
1526 Label done, is_true, is_false;
1527
1528 EmitClassOfTest(&is_true, &is_false, class_name, input, scratch0(), input);
1529 __ b(ne, &is_false);
1530
1531 __ bind(&is_true);
1532 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1533 __ jmp(&done);
1534
1535 __ bind(&is_false);
1536 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1537 __ bind(&done);
1326 } 1538 }
1327 1539
1328 1540
1329 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) { 1541 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1330 Abort("DoClassOfTestAndBranch unimplemented."); 1542 Register input = ToRegister(instr->input());
1543 Register temp = scratch0();
1544 Register temp2 = ToRegister(instr->temporary());
1545 Handle<String> class_name = instr->hydrogen()->class_name();
1546
1547 int true_block = chunk_->LookupDestination(instr->true_block_id());
1548 int false_block = chunk_->LookupDestination(instr->false_block_id());
1549
1550 Label* true_label = chunk_->GetAssemblyLabel(true_block);
1551 Label* false_label = chunk_->GetAssemblyLabel(false_block);
1552
1553 EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1554
1555 EmitBranch(true_block, false_block, eq);
1331 } 1556 }
1332 1557
1333 1558
1334 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { 1559 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1335 Abort("DoCmpMapAndBranch unimplemented."); 1560 Register reg = ToRegister(instr->input());
1561 Register temp = ToRegister(instr->temp());
1562 int true_block = instr->true_block_id();
1563 int false_block = instr->false_block_id();
1564
1565 __ ldr(temp, FieldMemOperand(reg, HeapObject::kMapOffset));
1566 __ cmp(temp, Operand(instr->map()));
1567 EmitBranch(true_block, false_block, eq);
1336 } 1568 }
1337 1569
1338 1570
1339 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 1571 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1340 // We expect object and function in registers r1 and r0. 1572 ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
1573 ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
1574
1341 InstanceofStub stub(InstanceofStub::kArgsInRegisters); 1575 InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1342 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1576 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1343 1577
1344 Label true_value, done; 1578 Label true_value, done;
1345 __ tst(r0, r0); 1579 __ tst(r0, r0);
1346 __ mov(r0, Operand(FACTORY->false_value()), LeaveCC, eq); 1580 __ mov(r0, Operand(FACTORY->false_value()), LeaveCC, ne);
1347 __ mov(r0, Operand(FACTORY->true_value()), LeaveCC, ne); 1581 __ mov(r0, Operand(FACTORY->true_value()), LeaveCC, eq);
1348 } 1582 }
1349 1583
1350 1584
1351 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { 1585 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1352 Abort("DoInstanceOfAndBranch unimplemented."); 1586 Abort("DoInstanceOfAndBranch unimplemented.");
1353 } 1587 }
1354 1588
1355 1589
1590 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1591 Abort("DoInstanceOfKnownGlobal unimplemented.");
1592 }
1593
1356 1594
1357 static Condition ComputeCompareCondition(Token::Value op) { 1595 static Condition ComputeCompareCondition(Token::Value op) {
1358 switch (op) { 1596 switch (op) {
1359 case Token::EQ_STRICT: 1597 case Token::EQ_STRICT:
1360 case Token::EQ: 1598 case Token::EQ:
1361 return eq; 1599 return eq;
1362 case Token::LT: 1600 case Token::LT:
1363 return lt; 1601 return lt;
1364 case Token::GT: 1602 case Token::GT:
1365 return gt; 1603 return gt;
(...skipping 12 matching lines...) Expand all
1378 Token::Value op = instr->op(); 1616 Token::Value op = instr->op();
1379 1617
1380 Handle<Code> ic = CompareIC::GetUninitialized(op); 1618 Handle<Code> ic = CompareIC::GetUninitialized(op);
1381 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1619 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1382 1620
1383 Condition condition = ComputeCompareCondition(op); 1621 Condition condition = ComputeCompareCondition(op);
1384 if (op == Token::GT || op == Token::LTE) { 1622 if (op == Token::GT || op == Token::LTE) {
1385 condition = ReverseCondition(condition); 1623 condition = ReverseCondition(condition);
1386 } 1624 }
1387 __ cmp(r0, Operand(0)); 1625 __ cmp(r0, Operand(0));
1388 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex, 1626 __ LoadRoot(ToRegister(instr->result()),
1389 condition); 1627 Heap::kTrueValueRootIndex,
1390 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex, 1628 condition);
1391 NegateCondition(condition)); 1629 __ LoadRoot(ToRegister(instr->result()),
1630 Heap::kFalseValueRootIndex,
1631 NegateCondition(condition));
1392 } 1632 }
1393 1633
1394 1634
1395 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { 1635 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
1396 Abort("DoCmpTAndBranch unimplemented."); 1636 Abort("DoCmpTAndBranch unimplemented.");
1397 } 1637 }
1398 1638
1399 1639
1400 void LCodeGen::DoReturn(LReturn* instr) { 1640 void LCodeGen::DoReturn(LReturn* instr) {
1401 if (FLAG_trace) { 1641 if (FLAG_trace) {
(...skipping 23 matching lines...) Expand all
1425 1665
1426 1666
1427 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) { 1667 void LCodeGen::DoStoreGlobal(LStoreGlobal* instr) {
1428 Register value = ToRegister(instr->input()); 1668 Register value = ToRegister(instr->input());
1429 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell()))); 1669 __ mov(ip, Operand(Handle<Object>(instr->hydrogen()->cell())));
1430 __ str(value, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); 1670 __ str(value, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
1431 } 1671 }
1432 1672
1433 1673
1434 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { 1674 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
1435 Abort("DoLoadNamedField unimplemented."); 1675 Register object = ToRegister(instr->input());
1676 Register result = ToRegister(instr->result());
1677 if (instr->hydrogen()->is_in_object()) {
1678 __ ldr(result, FieldMemOperand(object, instr->hydrogen()->offset()));
1679 } else {
1680 __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
1681 __ ldr(result, FieldMemOperand(result, instr->hydrogen()->offset()));
1682 }
1436 } 1683 }
1437 1684
1438 1685
1439 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) { 1686 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
1440 ASSERT(ToRegister(instr->object()).is(r0)); 1687 ASSERT(ToRegister(instr->object()).is(r0));
1441 ASSERT(ToRegister(instr->result()).is(r0)); 1688 ASSERT(ToRegister(instr->result()).is(r0));
1442 1689
1443 // Name is always in r2. 1690 // Name is always in r2.
1444 __ mov(r2, Operand(instr->name())); 1691 __ mov(r2, Operand(instr->name()));
1445 Handle<Code> ic( 1692 Handle<Code> ic(
1446 Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Initialize)); 1693 Isolate::Current()->builtins()->builtin(Builtins::LoadIC_Initialize));
1447 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1694 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1448 } 1695 }
1449 1696
1450 1697
1698 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
1699 Register scratch = scratch0();
1700 Register function = ToRegister(instr->function());
1701 Register result = ToRegister(instr->result());
1702
1703 // Check that the function really is a function. Load map into the
1704 // result register.
1705 __ CompareObjectType(function, result, scratch, JS_FUNCTION_TYPE);
1706 DeoptimizeIf(ne, instr->environment());
1707
1708 // Make sure that the function has an instance prototype.
1709 Label non_instance;
1710 __ ldrb(scratch, FieldMemOperand(result, Map::kBitFieldOffset));
1711 __ tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
1712 __ b(ne, &non_instance);
1713
1714 // Get the prototype or initial map from the function.
1715 __ ldr(result,
1716 FieldMemOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1717
1718 // Check that the function has a prototype or an initial map.
1719 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1720 __ cmp(result, ip);
1721 DeoptimizeIf(eq, instr->environment());
1722
1723 // If the function does not have an initial map, we're done.
1724 Label done;
1725 __ CompareObjectType(result, scratch, scratch, MAP_TYPE);
1726 __ b(ne, &done);
1727
1728 // Get the prototype from the initial map.
1729 __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
1730 __ jmp(&done);
1731
1732 // Non-instance prototype: Fetch prototype from constructor field
1733 // in initial map.
1734 __ bind(&non_instance);
1735 __ ldr(result, FieldMemOperand(result, Map::kConstructorOffset));
1736
1737 // All done.
1738 __ bind(&done);
1739 }
1740
1741
1451 void LCodeGen::DoLoadElements(LLoadElements* instr) { 1742 void LCodeGen::DoLoadElements(LLoadElements* instr) {
1452 Abort("DoLoadElements unimplemented."); 1743 ASSERT(instr->result()->Equals(instr->input()));
1744 Register reg = ToRegister(instr->input());
1745 Register scratch = scratch0();
1746
1747 __ ldr(reg, FieldMemOperand(reg, JSObject::kElementsOffset));
1748 if (FLAG_debug_code) {
1749 Label done;
1750 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1751 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
1752 __ cmp(scratch, ip);
1753 __ b(eq, &done);
1754 __ LoadRoot(ip, Heap::kFixedCOWArrayMapRootIndex);
1755 __ cmp(scratch, ip);
1756 __ Check(eq, "Check for fast elements failed.");
1757 __ bind(&done);
1758 }
1453 } 1759 }
1454 1760
1455 1761
1456 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { 1762 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
1457 Abort("DoAccessArgumentsAt unimplemented."); 1763 Register arguments = ToRegister(instr->arguments());
1764 Register length = ToRegister(instr->length());
1765 Register index = ToRegister(instr->index());
1766 Register result = ToRegister(instr->result());
1767
1768 // Bailout index is not a valid argument index. Use unsigned check to get
1769 // negative check for free.
1770 __ sub(length, length, index, SetCC);
1771 DeoptimizeIf(ls, instr->environment());
1772
1773 // There are two words between the frame pointer and the last argument.
1774 // Subtracting from length accounts for one of them add one more.
1775 __ add(length, length, Operand(1));
1776 __ ldr(result, MemOperand(arguments, length, LSL, kPointerSizeLog2));
1458 } 1777 }
1459 1778
1460 1779
1461 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) { 1780 void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
1462 Abort("DoLoadKeyedFastElement unimplemented."); 1781 Register elements = ToRegister(instr->elements());
1782 Register key = EmitLoadRegister(instr->key(), scratch0());
1783 Register result = ToRegister(instr->result());
1784 Register scratch = scratch0();
1785 ASSERT(result.is(elements));
1786
1787 // Load the result.
1788 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
1789 __ ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize));
1790
1791 // Check for the hole value.
1792 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
1793 __ cmp(result, scratch);
1794 DeoptimizeIf(eq, instr->environment());
1463 } 1795 }
1464 1796
1465 1797
1466 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) { 1798 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
1467 ASSERT(ToRegister(instr->object()).is(r1)); 1799 ASSERT(ToRegister(instr->object()).is(r1));
1468 ASSERT(ToRegister(instr->key()).is(r0)); 1800 ASSERT(ToRegister(instr->key()).is(r0));
1469 1801
1470 Handle<Code> ic(Isolate::Current()->builtins()-> 1802 Handle<Code> ic(Isolate::Current()->builtins()->
1471 builtin(Builtins::KeyedLoadIC_Initialize)); 1803 builtin(Builtins::KeyedLoadIC_Initialize));
1472 CallCode(ic, RelocInfo::CODE_TARGET, instr); 1804 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1473 } 1805 }
1474 1806
1475 1807
1476 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 1808 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
1477 Abort("DoArgumentsElements unimplemented."); 1809 Register scratch = scratch0();
1810 Register result = ToRegister(instr->result());
1811
1812 // Check if the calling frame is an arguments adaptor frame.
1813 Label done, adapted;
1814 __ ldr(scratch, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1815 __ ldr(result, MemOperand(scratch, StandardFrameConstants::kContextOffset));
1816 __ cmp(result, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1817
1818 // Result is the frame pointer for the frame if not adapted and for the real
1819 // frame below the adaptor frame if adapted.
1820 __ mov(result, fp, LeaveCC, ne);
1821 __ mov(result, scratch, LeaveCC, eq);
1478 } 1822 }
1479 1823
1480 1824
1481 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { 1825 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
1482 Abort("DoArgumentsLength unimplemented."); 1826 Register elem = ToRegister(instr->input());
1827 Register result = ToRegister(instr->result());
1828
1829 Label done;
1830
1831 // If no arguments adaptor frame the number of arguments is fixed.
1832 __ cmp(fp, elem);
1833 __ mov(result, Operand(scope()->num_parameters()));
1834 __ b(eq, &done);
1835
1836 // Arguments adaptor frame present. Get argument length from there.
1837 __ ldr(result, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1838 __ ldr(result,
1839 MemOperand(result, ArgumentsAdaptorFrameConstants::kLengthOffset));
1840 __ SmiUntag(result);
1841
1842 // Argument length is in result register.
1843 __ bind(&done);
1483 } 1844 }
1484 1845
1485 1846
1486 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { 1847 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
1487 Abort("DoApplyArguments unimplemented."); 1848 Register receiver = ToRegister(instr->receiver());
1849 Register function = ToRegister(instr->function());
1850 Register scratch = scratch0();
1851
1852 ASSERT(receiver.is(r0));
1853 ASSERT(function.is(r1));
1854 ASSERT(ToRegister(instr->result()).is(r0));
1855
1856 // If the receiver is null or undefined, we have to pass the
1857 // global object as a receiver.
1858 Label global_receiver, receiver_ok;
1859 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
1860 __ cmp(receiver, scratch);
1861 __ b(eq, &global_receiver);
1862 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
1863 __ cmp(receiver, scratch);
1864 __ b(ne, &receiver_ok);
1865 __ bind(&global_receiver);
1866 __ ldr(receiver, GlobalObjectOperand());
1867 __ bind(&receiver_ok);
1868
1869 Register length = ToRegister(instr->length());
1870 Register elements = ToRegister(instr->elements());
1871
1872 Label invoke;
1873
1874 // Copy the arguments to this function possibly from the
1875 // adaptor frame below it.
1876 const uint32_t kArgumentsLimit = 1 * KB;
1877 __ cmp(length, Operand(kArgumentsLimit));
1878 DeoptimizeIf(hi, instr->environment());
1879
1880 // Push the receiver and use the register to keep the original
1881 // number of arguments.
1882 __ push(receiver);
1883 __ mov(receiver, length);
1884 // The arguments are at a one pointer size offset from elements.
1885 __ add(elements, elements, Operand(1 * kPointerSize));
1886
1887 // Loop through the arguments pushing them onto the execution
1888 // stack.
1889 Label loop;
1890 // length is a small non-negative integer, due to the test above.
1891 __ tst(length, Operand(length));
1892 __ b(eq, &invoke);
1893 __ bind(&loop);
1894 __ ldr(scratch, MemOperand(elements, length, LSL, 2));
1895 __ push(scratch);
1896 __ sub(length, length, Operand(1), SetCC);
1897 __ b(ne, &loop);
1898
1899 __ bind(&invoke);
1900 // Invoke the function. The number of arguments is stored in receiver
1901 // which is r0, as expected by InvokeFunction.
1902 v8::internal::ParameterCount actual(receiver);
1903 SafepointGenerator safepoint_generator(this,
1904 instr->pointer_map(),
1905 Safepoint::kNoDeoptimizationIndex);
1906 __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
1488 } 1907 }
1489 1908
1490 1909
1491 void LCodeGen::DoPushArgument(LPushArgument* instr) { 1910 void LCodeGen::DoPushArgument(LPushArgument* instr) {
1492 LOperand* argument = instr->input(); 1911 LOperand* argument = instr->input();
1493 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) { 1912 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
1494 Abort("DoPushArgument not implemented for double type."); 1913 Abort("DoPushArgument not implemented for double type.");
1495 } else { 1914 } else {
1496 Register argument_reg = EmitLoadRegister(argument, ip); 1915 Register argument_reg = EmitLoadRegister(argument, ip);
1497 __ push(argument_reg); 1916 __ push(argument_reg);
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
1539 1958
1540 // Setup deoptimization. 1959 // Setup deoptimization.
1541 RegisterLazyDeoptimization(instr); 1960 RegisterLazyDeoptimization(instr);
1542 1961
1543 // Restore context. 1962 // Restore context.
1544 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1963 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1545 } 1964 }
1546 1965
1547 1966
1548 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) { 1967 void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
1549 Abort("DoCallConstantFunction unimplemented."); 1968 ASSERT(ToRegister(instr->result()).is(r0));
1969 __ mov(r1, Operand(instr->function()));
1970 CallKnownFunction(instr->function(), instr->arity(), instr);
1550 } 1971 }
1551 1972
1552 1973
1553 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) { 1974 void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
1554 Abort("DoDeferredMathAbsTaggedHeapNumber unimplemented."); 1975 Abort("DoDeferredMathAbsTaggedHeapNumber unimplemented.");
1555 } 1976 }
1556 1977
1557 1978
1558 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) { 1979 void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
1559 Abort("DoMathAbs unimplemented."); 1980 Abort("DoMathAbs unimplemented.");
1560 } 1981 }
1561 1982
1562 1983
1563 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) { 1984 void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
1564 Abort("DoMathFloor unimplemented."); 1985 DoubleRegister input = ToDoubleRegister(instr->input());
1986 Register result = ToRegister(instr->result());
1987 Register prev_fpscr = ToRegister(instr->temp());
1988 SwVfpRegister single_scratch = single_scratch0();
1989 Register scratch = scratch0();
1990
1991 // Set custom FPCSR:
1992 // - Set rounding mode to "Round towards Minus Infinity".
1993 // - Clear vfp cumulative exception flags.
1994 // - Make sure Flush-to-zero mode control bit is unset.
1995 __ vmrs(prev_fpscr);
1996 __ bic(scratch, prev_fpscr,
1997 Operand(kVFPExceptionMask | kVFPRoundingModeMask | kVFPFlushToZeroMask));
1998 __ orr(scratch, scratch, Operand(kVFPRoundToMinusInfinityBits));
1999 __ vmsr(scratch);
2000
2001 // Convert the argument to an integer.
2002 __ vcvt_s32_f64(single_scratch,
2003 input,
2004 Assembler::FPSCRRounding,
2005 al);
2006
2007 // Retrieve FPSCR and check for vfp exceptions.
2008 __ vmrs(scratch);
2009 // Restore FPSCR
2010 __ vmsr(prev_fpscr);
2011 __ tst(scratch, Operand(kVFPExceptionMask));
2012 DeoptimizeIf(ne, instr->environment());
2013
2014 // Move the result back to general purpose register r0.
2015 __ vmov(result, single_scratch);
1565 } 2016 }
1566 2017
1567 2018
1568 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) { 2019 void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
1569 Abort("DoMathSqrt unimplemented."); 2020 DoubleRegister input = ToDoubleRegister(instr->input());
2021 ASSERT(ToDoubleRegister(instr->result()).is(input));
2022 __ vsqrt(input, input);
1570 } 2023 }
1571 2024
1572 2025
1573 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) { 2026 void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
1574 switch (instr->op()) { 2027 switch (instr->op()) {
1575 case kMathAbs: 2028 case kMathAbs:
1576 DoMathAbs(instr); 2029 DoMathAbs(instr);
1577 break; 2030 break;
1578 case kMathFloor: 2031 case kMathFloor:
1579 DoMathFloor(instr); 2032 DoMathFloor(instr);
1580 break; 2033 break;
1581 case kMathSqrt: 2034 case kMathSqrt:
1582 DoMathSqrt(instr); 2035 DoMathSqrt(instr);
1583 break; 2036 break;
1584 default: 2037 default:
1585 Abort("Unimplemented type of LUnaryMathOperation."); 2038 Abort("Unimplemented type of LUnaryMathOperation.");
1586 UNREACHABLE(); 2039 UNREACHABLE();
1587 } 2040 }
1588 } 2041 }
1589 2042
1590 2043
1591 void LCodeGen::DoCallKeyed(LCallKeyed* instr) { 2044 void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
1592 Abort("DoCallKeyed unimplemented."); 2045 ASSERT(ToRegister(instr->result()).is(r0));
2046
2047 int arity = instr->arity();
2048 Handle<Code> ic =
2049 Isolate::Current()->stub_cache()->ComputeKeyedCallInitialize(arity,
2050 NOT_IN_LOOP);
2051 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2052 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1593 } 2053 }
1594 2054
1595 2055
1596 void LCodeGen::DoCallNamed(LCallNamed* instr) { 2056 void LCodeGen::DoCallNamed(LCallNamed* instr) {
1597 ASSERT(ToRegister(instr->result()).is(r0)); 2057 ASSERT(ToRegister(instr->result()).is(r0));
1598 2058
1599 int arity = instr->arity(); 2059 int arity = instr->arity();
1600 Handle<Code> ic = Isolate::Current()->stub_cache()-> 2060 Handle<Code> ic = Isolate::Current()->stub_cache()->
1601 ComputeCallInitialize(arity, NOT_IN_LOOP); 2061 ComputeCallInitialize(arity, NOT_IN_LOOP);
1602 __ mov(r2, Operand(instr->name())); 2062 __ mov(r2, Operand(instr->name()));
1603 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2063 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1604 // Restore context register. 2064 // Restore context register.
1605 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2065 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1606 } 2066 }
1607 2067
1608 2068
1609 void LCodeGen::DoCallFunction(LCallFunction* instr) { 2069 void LCodeGen::DoCallFunction(LCallFunction* instr) {
1610 Abort("DoCallFunction unimplemented."); 2070 ASSERT(ToRegister(instr->result()).is(r0));
2071
2072 int arity = instr->arity();
2073 CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2074 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2075 __ Drop(1);
2076 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1611 } 2077 }
1612 2078
1613 2079
1614 void LCodeGen::DoCallGlobal(LCallGlobal* instr) { 2080 void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
1615 Abort("DoCallGlobal unimplemented."); 2081 ASSERT(ToRegister(instr->result()).is(r0));
2082
2083 int arity = instr->arity();
2084 Handle<Code> ic =
2085 Isolate::Current()->stub_cache()->ComputeCallInitialize(arity,
2086 NOT_IN_LOOP);
2087 __ mov(r2, Operand(instr->name()));
2088 CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2089 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1616 } 2090 }
1617 2091
1618 2092
1619 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) { 2093 void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
1620 ASSERT(ToRegister(instr->result()).is(r0)); 2094 ASSERT(ToRegister(instr->result()).is(r0));
1621 __ mov(r1, Operand(instr->target())); 2095 __ mov(r1, Operand(instr->target()));
1622 CallKnownFunction(instr->target(), instr->arity(), instr); 2096 CallKnownFunction(instr->target(), instr->arity(), instr);
1623 } 2097 }
1624 2098
1625 2099
1626 void LCodeGen::DoCallNew(LCallNew* instr) { 2100 void LCodeGen::DoCallNew(LCallNew* instr) {
1627 ASSERT(ToRegister(instr->input()).is(r1)); 2101 ASSERT(ToRegister(instr->input()).is(r1));
1628 ASSERT(ToRegister(instr->result()).is(r0)); 2102 ASSERT(ToRegister(instr->result()).is(r0));
1629 2103
1630 Handle<Code> builtin(Isolate::Current()->builtins()-> 2104 Handle<Code> builtin(Isolate::Current()->builtins()->
1631 builtin(Builtins::JSConstructCall)); 2105 builtin(Builtins::JSConstructCall));
1632 __ mov(r0, Operand(instr->arity())); 2106 __ mov(r0, Operand(instr->arity()));
1633 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr); 2107 CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
1634 } 2108 }
1635 2109
1636 2110
1637 void LCodeGen::DoCallRuntime(LCallRuntime* instr) { 2111 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
1638 CallRuntime(instr->function(), instr->arity(), instr); 2112 CallRuntime(instr->function(), instr->arity(), instr);
1639 } 2113 }
1640 2114
1641 2115
1642 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { 2116 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
1643 Abort("DoStoreNamedField unimplemented."); 2117 Register object = ToRegister(instr->object());
2118 Register value = ToRegister(instr->value());
2119 Register scratch = scratch0();
2120 int offset = instr->offset();
2121
2122 ASSERT(!object.is(value));
2123
2124 if (!instr->transition().is_null()) {
2125 __ mov(scratch, Operand(instr->transition()));
2126 __ str(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
2127 }
2128
2129 // Do the store.
2130 if (instr->is_in_object()) {
2131 __ str(value, FieldMemOperand(object, offset));
2132 if (instr->needs_write_barrier()) {
2133 // Update the write barrier for the object for in-object properties.
2134 __ RecordWrite(object, Operand(offset), value, scratch);
2135 }
2136 } else {
2137 __ ldr(scratch, FieldMemOperand(object, JSObject::kPropertiesOffset));
2138 __ str(value, FieldMemOperand(scratch, offset));
2139 if (instr->needs_write_barrier()) {
2140 // Update the write barrier for the properties array.
2141 // object is used as a scratch register.
2142 __ RecordWrite(scratch, Operand(offset), value, object);
2143 }
2144 }
1644 } 2145 }
1645 2146
1646 2147
1647 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) { 2148 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
1648 ASSERT(ToRegister(instr->object()).is(r1)); 2149 ASSERT(ToRegister(instr->object()).is(r1));
1649 ASSERT(ToRegister(instr->value()).is(r0)); 2150 ASSERT(ToRegister(instr->value()).is(r0));
1650 2151
1651 // Name is always in r2. 2152 // Name is always in r2.
1652 __ mov(r2, Operand(instr->name())); 2153 __ mov(r2, Operand(instr->name()));
1653 Handle<Code> ic(Isolate::Current()->builtins()-> 2154 Handle<Code> ic(Isolate::Current()->builtins()->
1654 builtin(Builtins::StoreIC_Initialize)); 2155 builtin(Builtins::StoreIC_Initialize));
1655 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2156 CallCode(ic, RelocInfo::CODE_TARGET, instr);
1656 } 2157 }
1657 2158
1658 2159
1659 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) { 2160 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
1660 Abort("DoBoundsCheck unimplemented."); 2161 __ cmp(ToRegister(instr->index()), ToRegister(instr->length()));
2162 DeoptimizeIf(hs, instr->environment());
1661 } 2163 }
1662 2164
1663 2165
1664 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) { 2166 void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
1665 Abort("DoStoreKeyedFastElement unimplemented."); 2167 Register value = ToRegister(instr->value());
2168 Register elements = ToRegister(instr->object());
2169 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
2170 Register scratch = scratch0();
2171
2172 // Do the store.
2173 if (instr->key()->IsConstantOperand()) {
2174 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
2175 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
2176 int offset =
2177 ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
2178 __ str(value, FieldMemOperand(elements, offset));
2179 } else {
2180 __ add(scratch, elements, Operand(key, LSL, kPointerSizeLog2));
2181 __ str(value, FieldMemOperand(scratch, FixedArray::kHeaderSize));
2182 }
2183
2184 if (instr->hydrogen()->NeedsWriteBarrier()) {
2185 // Compute address of modified element and store it into key register.
2186 __ add(key, scratch, Operand(FixedArray::kHeaderSize));
2187 __ RecordWrite(elements, key, value);
2188 }
1666 } 2189 }
1667 2190
1668 2191
1669 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { 2192 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
1670 ASSERT(ToRegister(instr->object()).is(r2)); 2193 ASSERT(ToRegister(instr->object()).is(r2));
1671 ASSERT(ToRegister(instr->key()).is(r1)); 2194 ASSERT(ToRegister(instr->key()).is(r1));
1672 ASSERT(ToRegister(instr->value()).is(r0)); 2195 ASSERT(ToRegister(instr->value()).is(r0));
1673 2196
1674 Handle<Code> ic(Isolate::Current()->builtins()-> 2197 Handle<Code> ic(Isolate::Current()->builtins()->
1675 builtin(Builtins::KeyedStoreIC_Initialize)); 2198 builtin(Builtins::KeyedStoreIC_Initialize));
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after
1756 class DeferredNumberTagD: public LDeferredCode { 2279 class DeferredNumberTagD: public LDeferredCode {
1757 public: 2280 public:
1758 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr) 2281 DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
1759 : LDeferredCode(codegen), instr_(instr) { } 2282 : LDeferredCode(codegen), instr_(instr) { }
1760 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); } 2283 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
1761 private: 2284 private:
1762 LNumberTagD* instr_; 2285 LNumberTagD* instr_;
1763 }; 2286 };
1764 2287
1765 DoubleRegister input_reg = ToDoubleRegister(instr->input()); 2288 DoubleRegister input_reg = ToDoubleRegister(instr->input());
2289 Register scratch = scratch0();
1766 Register reg = ToRegister(instr->result()); 2290 Register reg = ToRegister(instr->result());
1767 Register temp1 = ToRegister(instr->temp1()); 2291 Register temp1 = ToRegister(instr->temp1());
1768 Register temp2 = ToRegister(instr->temp2()); 2292 Register temp2 = ToRegister(instr->temp2());
1769 Register scratch = r9;
1770 2293
1771 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr); 2294 DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
1772 if (FLAG_inline_new) { 2295 if (FLAG_inline_new) {
1773 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex); 2296 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
1774 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry()); 2297 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry());
1775 } else { 2298 } else {
1776 __ jmp(deferred->entry()); 2299 __ jmp(deferred->entry());
1777 } 2300 }
1778 __ bind(deferred->exit()); 2301 __ bind(deferred->exit());
1779 __ sub(ip, reg, Operand(kHeapObjectTag)); 2302 __ sub(ip, reg, Operand(kHeapObjectTag));
(...skipping 20 matching lines...) Expand all
1800 2323
1801 void LCodeGen::DoSmiTag(LSmiTag* instr) { 2324 void LCodeGen::DoSmiTag(LSmiTag* instr) {
1802 LOperand* input = instr->input(); 2325 LOperand* input = instr->input();
1803 ASSERT(input->IsRegister() && input->Equals(instr->result())); 2326 ASSERT(input->IsRegister() && input->Equals(instr->result()));
1804 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow)); 2327 ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
1805 __ SmiTag(ToRegister(input)); 2328 __ SmiTag(ToRegister(input));
1806 } 2329 }
1807 2330
1808 2331
1809 void LCodeGen::DoSmiUntag(LSmiUntag* instr) { 2332 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
1810 Abort("DoSmiUntag unimplemented."); 2333 LOperand* input = instr->input();
2334 ASSERT(input->IsRegister() && input->Equals(instr->result()));
2335 if (instr->needs_check()) {
2336 __ tst(ToRegister(input), Operand(kSmiTagMask));
2337 DeoptimizeIf(ne, instr->environment());
2338 }
2339 __ SmiUntag(ToRegister(input));
1811 } 2340 }
1812 2341
1813 2342
1814 void LCodeGen::EmitNumberUntagD(Register input_reg, 2343 void LCodeGen::EmitNumberUntagD(Register input_reg,
1815 DoubleRegister result_reg, 2344 DoubleRegister result_reg,
1816 LEnvironment* env) { 2345 LEnvironment* env) {
1817 Register core_scratch = r9; 2346 Register scratch = scratch0();
1818 ASSERT(!input_reg.is(core_scratch));
1819 SwVfpRegister flt_scratch = s0; 2347 SwVfpRegister flt_scratch = s0;
1820 ASSERT(!result_reg.is(d0)); 2348 ASSERT(!result_reg.is(d0));
1821 2349
1822 Label load_smi, heap_number, done; 2350 Label load_smi, heap_number, done;
1823 2351
1824 // Smi check. 2352 // Smi check.
1825 __ tst(input_reg, Operand(kSmiTagMask)); 2353 __ tst(input_reg, Operand(kSmiTagMask));
1826 __ b(eq, &load_smi); 2354 __ b(eq, &load_smi);
1827 2355
1828 // Heap number map check. 2356 // Heap number map check.
1829 __ ldr(core_scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); 2357 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
1830 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 2358 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1831 __ cmp(core_scratch, Operand(ip)); 2359 __ cmp(scratch, Operand(ip));
1832 __ b(eq, &heap_number); 2360 __ b(eq, &heap_number);
1833 2361
1834 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 2362 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1835 __ cmp(input_reg, Operand(ip)); 2363 __ cmp(input_reg, Operand(ip));
1836 DeoptimizeIf(ne, env); 2364 DeoptimizeIf(ne, env);
1837 2365
1838 // Convert undefined to NaN. 2366 // Convert undefined to NaN.
1839 __ LoadRoot(ip, Heap::kNanValueRootIndex); 2367 __ LoadRoot(ip, Heap::kNanValueRootIndex);
1840 __ sub(ip, ip, Operand(kHeapObjectTag)); 2368 __ sub(ip, ip, Operand(kHeapObjectTag));
1841 __ vldr(result_reg, ip, HeapNumber::kValueOffset); 2369 __ vldr(result_reg, ip, HeapNumber::kValueOffset);
(...skipping 21 matching lines...) Expand all
1863 : LDeferredCode(codegen), instr_(instr) { } 2391 : LDeferredCode(codegen), instr_(instr) { }
1864 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); } 2392 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
1865 private: 2393 private:
1866 LTaggedToI* instr_; 2394 LTaggedToI* instr_;
1867 }; 2395 };
1868 2396
1869 2397
1870 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { 2398 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
1871 Label done; 2399 Label done;
1872 Register input_reg = ToRegister(instr->input()); 2400 Register input_reg = ToRegister(instr->input());
1873 Register core_scratch = r9; 2401 Register scratch = scratch0();
1874 ASSERT(!input_reg.is(core_scratch));
1875 DoubleRegister dbl_scratch = d0; 2402 DoubleRegister dbl_scratch = d0;
1876 SwVfpRegister flt_scratch = s0; 2403 SwVfpRegister flt_scratch = s0;
1877 DoubleRegister dbl_tmp = ToDoubleRegister(instr->temp()); 2404 DoubleRegister dbl_tmp = ToDoubleRegister(instr->temp());
1878 2405
1879 // Heap number map check. 2406 // Heap number map check.
1880 __ ldr(core_scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); 2407 __ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
1881 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 2408 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
1882 __ cmp(core_scratch, Operand(ip)); 2409 __ cmp(scratch, Operand(ip));
1883 2410
1884 if (instr->truncating()) { 2411 if (instr->truncating()) {
1885 Label heap_number; 2412 Label heap_number;
1886 __ b(eq, &heap_number); 2413 __ b(eq, &heap_number);
1887 // Check for undefined. Undefined is converted to zero for truncating 2414 // Check for undefined. Undefined is converted to zero for truncating
1888 // conversions. 2415 // conversions.
1889 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 2416 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1890 __ cmp(input_reg, Operand(ip)); 2417 __ cmp(input_reg, Operand(ip));
1891 DeoptimizeIf(ne, instr->environment()); 2418 DeoptimizeIf(ne, instr->environment());
1892 __ mov(input_reg, Operand(0)); 2419 __ mov(input_reg, Operand(0));
1893 __ b(&done); 2420 __ b(&done);
1894 2421
1895 __ bind(&heap_number); 2422 __ bind(&heap_number);
1896 __ sub(ip, input_reg, Operand(kHeapObjectTag)); 2423 __ sub(ip, input_reg, Operand(kHeapObjectTag));
1897 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset); 2424 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
1898 __ vcmp(dbl_tmp, 0.0); // Sets overflow bit if NaN. 2425 __ vcmp(dbl_tmp, 0.0); // Sets overflow bit in FPSCR flags if NaN.
1899 __ vcvt_s32_f64(flt_scratch, dbl_tmp); 2426 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
1900 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion. 2427 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
1901 __ vmrs(pc); // Move vector status bits to normal status bits. 2428 __ vmrs(pc); // Move vector status bits to normal status bits.
1902 // Overflow bit is set if dbl_tmp is Nan. 2429 // Overflow bit is set if dbl_tmp is Nan.
1903 __ cmn(input_reg, Operand(1), vc); // 0x7fffffff + 1 -> overflow. 2430 __ cmn(input_reg, Operand(1), vc); // 0x7fffffff + 1 -> overflow.
1904 __ cmp(input_reg, Operand(1), vc); // 0x80000000 - 1 -> overflow. 2431 __ cmp(input_reg, Operand(1), vc); // 0x80000000 - 1 -> overflow.
1905 DeoptimizeIf(vs, instr->environment()); // Saturation may have occured. 2432 DeoptimizeIf(vs, instr->environment()); // Saturation may have occured.
1906 2433
1907 } else { 2434 } else {
1908 // Deoptimize if we don't have a heap number. 2435 // Deoptimize if we don't have a heap number.
1909 DeoptimizeIf(ne, instr->environment()); 2436 DeoptimizeIf(ne, instr->environment());
1910 2437
1911 __ sub(ip, input_reg, Operand(kHeapObjectTag)); 2438 __ sub(ip, input_reg, Operand(kHeapObjectTag));
1912 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset); 2439 __ vldr(dbl_tmp, ip, HeapNumber::kValueOffset);
1913 __ vcvt_s32_f64(flt_scratch, dbl_tmp); 2440 __ vcvt_s32_f64(flt_scratch, dbl_tmp);
1914 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion. 2441 __ vmov(input_reg, flt_scratch); // 32-bit result of conversion.
1915 // Non-truncating conversion means that we cannot lose bits, so we convert 2442 // Non-truncating conversion means that we cannot lose bits, so we convert
1916 // back to check; note that using non-overlapping s and d regs would be 2443 // back to check; note that using non-overlapping s and d regs would be
1917 // slightly faster. 2444 // slightly faster.
1918 __ vcvt_f64_s32(dbl_scratch, flt_scratch); 2445 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
1919 __ vcmp(dbl_scratch, dbl_tmp); 2446 __ VFPCompareAndSetFlags(dbl_scratch, dbl_tmp);
1920 __ vmrs(pc); // Move vector status bits to normal status bits.
1921 DeoptimizeIf(ne, instr->environment()); // Not equal or unordered. 2447 DeoptimizeIf(ne, instr->environment()); // Not equal or unordered.
1922 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 2448 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
1923 __ tst(input_reg, Operand(input_reg)); 2449 __ tst(input_reg, Operand(input_reg));
1924 __ b(ne, &done); 2450 __ b(ne, &done);
1925 __ vmov(lr, ip, dbl_tmp); 2451 __ vmov(lr, ip, dbl_tmp);
1926 __ tst(ip, Operand(1 << 31)); // Test sign bit. 2452 __ tst(ip, Operand(1 << 31)); // Test sign bit.
1927 DeoptimizeIf(ne, instr->environment()); 2453 DeoptimizeIf(ne, instr->environment());
1928 } 2454 }
1929 } 2455 }
1930 __ bind(&done); 2456 __ bind(&done);
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
1971 2497
1972 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { 2498 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
1973 LOperand* input = instr->input(); 2499 LOperand* input = instr->input();
1974 ASSERT(input->IsRegister()); 2500 ASSERT(input->IsRegister());
1975 __ tst(ToRegister(input), Operand(kSmiTagMask)); 2501 __ tst(ToRegister(input), Operand(kSmiTagMask));
1976 DeoptimizeIf(instr->condition(), instr->environment()); 2502 DeoptimizeIf(instr->condition(), instr->environment());
1977 } 2503 }
1978 2504
1979 2505
1980 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { 2506 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
1981 Abort("DoCheckInstanceType unimplemented."); 2507 Register input = ToRegister(instr->input());
2508 Register scratch = scratch0();
2509 InstanceType first = instr->hydrogen()->first();
2510 InstanceType last = instr->hydrogen()->last();
2511
2512 __ ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2513 __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
2514 __ cmp(scratch, Operand(first));
2515
2516 // If there is only one type in the interval check for equality.
2517 if (first == last) {
2518 DeoptimizeIf(ne, instr->environment());
2519 } else {
2520 DeoptimizeIf(lo, instr->environment());
2521 // Omit check for the last type.
2522 if (last != LAST_TYPE) {
2523 __ cmp(scratch, Operand(last));
2524 DeoptimizeIf(hi, instr->environment());
2525 }
2526 }
1982 } 2527 }
1983 2528
1984 2529
1985 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { 2530 void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
1986 ASSERT(instr->input()->IsRegister()); 2531 ASSERT(instr->input()->IsRegister());
1987 Register reg = ToRegister(instr->input()); 2532 Register reg = ToRegister(instr->input());
1988 __ cmp(reg, Operand(instr->hydrogen()->target())); 2533 __ cmp(reg, Operand(instr->hydrogen()->target()));
1989 DeoptimizeIf(ne, instr->environment()); 2534 DeoptimizeIf(ne, instr->environment());
1990 } 2535 }
1991 2536
1992 2537
1993 void LCodeGen::DoCheckMap(LCheckMap* instr) { 2538 void LCodeGen::DoCheckMap(LCheckMap* instr) {
2539 Register scratch = scratch0();
1994 LOperand* input = instr->input(); 2540 LOperand* input = instr->input();
1995 ASSERT(input->IsRegister()); 2541 ASSERT(input->IsRegister());
1996 Register reg = ToRegister(input); 2542 Register reg = ToRegister(input);
1997 __ ldr(r9, FieldMemOperand(reg, HeapObject::kMapOffset)); 2543 __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
1998 __ cmp(r9, Operand(instr->hydrogen()->map())); 2544 __ cmp(scratch, Operand(instr->hydrogen()->map()));
1999 DeoptimizeIf(ne, instr->environment()); 2545 DeoptimizeIf(ne, instr->environment());
2000 } 2546 }
2001 2547
2002 2548
2003 void LCodeGen::LoadPrototype(Register result, 2549 void LCodeGen::LoadPrototype(Register result,
2004 Handle<JSObject> prototype) { 2550 Handle<JSObject> prototype) {
2005 Abort("LoadPrototype unimplemented."); 2551 if (HEAP->InNewSpace(*prototype)) {
2552 Handle<JSGlobalPropertyCell> cell =
2553 FACTORY->NewJSGlobalPropertyCell(prototype);
2554 __ mov(result, Operand(cell));
2555 __ ldr(result, FieldMemOperand(result, JSGlobalPropertyCell::kValueOffset));
2556 } else {
2557 __ mov(result, Operand(prototype));
2558 }
2006 } 2559 }
2007 2560
2008 2561
2009 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { 2562 void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
2010 Abort("DoCheckPrototypeMaps unimplemented."); 2563 Register temp1 = ToRegister(instr->temp1());
2564 Register temp2 = ToRegister(instr->temp2());
2565
2566 Handle<JSObject> holder = instr->holder();
2567 Handle<JSObject> current_prototype = instr->prototype();
2568
2569 // Load prototype object.
2570 LoadPrototype(temp1, current_prototype);
2571
2572 // Check prototype maps up to the holder.
2573 while (!current_prototype.is_identical_to(holder)) {
2574 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
2575 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
2576 DeoptimizeIf(ne, instr->environment());
2577 current_prototype =
2578 Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
2579 // Load next prototype object.
2580 LoadPrototype(temp1, current_prototype);
2581 }
2582
2583 // Check the holder map.
2584 __ ldr(temp2, FieldMemOperand(temp1, HeapObject::kMapOffset));
2585 __ cmp(temp2, Operand(Handle<Map>(current_prototype->map())));
2586 DeoptimizeIf(ne, instr->environment());
2011 } 2587 }
2012 2588
2013 2589
2014 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) { 2590 void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
2015 Abort("DoArrayLiteral unimplemented."); 2591 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2592 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
2593 __ mov(r2, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2594 __ mov(r1, Operand(instr->hydrogen()->constant_elements()));
2595 __ Push(r3, r2, r1);
2596
2597 // Pick the right runtime function or stub to call.
2598 int length = instr->hydrogen()->length();
2599 if (instr->hydrogen()->IsCopyOnWrite()) {
2600 ASSERT(instr->hydrogen()->depth() == 1);
2601 FastCloneShallowArrayStub::Mode mode =
2602 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
2603 FastCloneShallowArrayStub stub(mode, length);
2604 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2605 } else if (instr->hydrogen()->depth() > 1) {
2606 CallRuntime(Runtime::kCreateArrayLiteral, 3, instr);
2607 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
2608 CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr);
2609 } else {
2610 FastCloneShallowArrayStub::Mode mode =
2611 FastCloneShallowArrayStub::CLONE_ELEMENTS;
2612 FastCloneShallowArrayStub stub(mode, length);
2613 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2614 }
2016 } 2615 }
2017 2616
2018 2617
2019 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { 2618 void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
2020 Abort("DoObjectLiteral unimplemented."); 2619 __ ldr(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2620 __ ldr(r4, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
2621 __ mov(r3, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2622 __ mov(r2, Operand(instr->hydrogen()->constant_properties()));
2623 __ mov(r1, Operand(Smi::FromInt(instr->hydrogen()->fast_elements() ? 1 : 0)));
2624 __ Push(r4, r3, r2, r1);
2625
2626 // Pick the right runtime function to call.
2627 if (instr->hydrogen()->depth() > 1) {
2628 CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
2629 } else {
2630 CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
2631 }
2021 } 2632 }
2022 2633
2023 2634
2024 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 2635 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
2025 Abort("DoRegExpLiteral unimplemented."); 2636 Label materialized;
2637 // Registers will be used as follows:
2638 // r3 = JS function.
2639 // r7 = literals array.
2640 // r1 = regexp literal.
2641 // r0 = regexp literal clone.
2642 // r2 and r4-r6 are used as temporaries.
2643 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2644 __ ldr(r7, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
2645 int literal_offset = FixedArray::kHeaderSize +
2646 instr->hydrogen()->literal_index() * kPointerSize;
2647 __ ldr(r1, FieldMemOperand(r7, literal_offset));
2648 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2649 __ cmp(r1, ip);
2650 __ b(ne, &materialized);
2651
2652 // Create regexp literal using runtime function
2653 // Result will be in r0.
2654 __ mov(r6, Operand(Smi::FromInt(instr->hydrogen()->literal_index())));
2655 __ mov(r5, Operand(instr->hydrogen()->pattern()));
2656 __ mov(r4, Operand(instr->hydrogen()->flags()));
2657 __ Push(r7, r6, r5, r4);
2658 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
2659 __ mov(r1, r0);
2660
2661 __ bind(&materialized);
2662 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
2663 Label allocated, runtime_allocate;
2664
2665 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
2666 __ jmp(&allocated);
2667
2668 __ bind(&runtime_allocate);
2669 __ mov(r0, Operand(Smi::FromInt(size)));
2670 __ Push(r1, r0);
2671 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
2672 __ pop(r1);
2673
2674 __ bind(&allocated);
2675 // Copy the content into the newly allocated memory.
2676 // (Unroll copy loop once for better throughput).
2677 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
2678 __ ldr(r3, FieldMemOperand(r1, i));
2679 __ ldr(r2, FieldMemOperand(r1, i + kPointerSize));
2680 __ str(r3, FieldMemOperand(r0, i));
2681 __ str(r2, FieldMemOperand(r0, i + kPointerSize));
2682 }
2683 if ((size % (2 * kPointerSize)) != 0) {
2684 __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
2685 __ str(r3, FieldMemOperand(r0, size - kPointerSize));
2686 }
2026 } 2687 }
2027 2688
2028 2689
2029 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { 2690 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
2030 Abort("DoFunctionLiteral unimplemented."); 2691 // Use the fast case closure allocation code that allocates in new
2692 // space for nested functions that don't need literals cloning.
2693 Handle<SharedFunctionInfo> shared_info = instr->shared_info();
2694 bool pretenure = !instr->hydrogen()->pretenure();
2695 if (shared_info->num_literals() == 0 && !pretenure) {
2696 FastNewClosureStub stub;
2697 __ mov(r1, Operand(shared_info));
2698 __ push(r1);
2699 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2700 } else {
2701 __ mov(r2, Operand(shared_info));
2702 __ mov(r1, Operand(pretenure
2703 ? FACTORY->true_value()
2704 : FACTORY->false_value()));
2705 __ Push(cp, r2, r1);
2706 CallRuntime(Runtime::kNewClosure, 3, instr);
2707 }
2031 } 2708 }
2032 2709
2033 2710
2034 void LCodeGen::DoTypeof(LTypeof* instr) { 2711 void LCodeGen::DoTypeof(LTypeof* instr) {
2035 Abort("DoTypeof unimplemented."); 2712 Register input = ToRegister(instr->input());
2713 __ push(input);
2714 CallRuntime(Runtime::kTypeof, 1, instr);
2036 } 2715 }
2037 2716
2038 2717
2039 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { 2718 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
2040 Abort("DoTypeofIs unimplemented."); 2719 Register input = ToRegister(instr->input());
2041 } 2720 Register result = ToRegister(instr->result());
2042 2721 Label true_label;
2043 2722 Label false_label;
2723 Label done;
2724
2725 Condition final_branch_condition = EmitTypeofIs(&true_label,
2726 &false_label,
2727 input,
2728 instr->type_literal());
2729 __ b(final_branch_condition, &true_label);
2730 __ bind(&false_label);
2731 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2732 __ b(&done);
2733
2734 __ bind(&true_label);
2735 __ LoadRoot(result, Heap::kTrueValueRootIndex);
2736
2737 __ bind(&done);
2738 }
2739
2740
2044 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { 2741 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
2045 Register input = ToRegister(instr->input()); 2742 Register input = ToRegister(instr->input());
2046 int true_block = chunk_->LookupDestination(instr->true_block_id()); 2743 int true_block = chunk_->LookupDestination(instr->true_block_id());
2047 int false_block = chunk_->LookupDestination(instr->false_block_id()); 2744 int false_block = chunk_->LookupDestination(instr->false_block_id());
2048 Label* true_label = chunk_->GetAssemblyLabel(true_block); 2745 Label* true_label = chunk_->GetAssemblyLabel(true_block);
2049 Label* false_label = chunk_->GetAssemblyLabel(false_block); 2746 Label* false_label = chunk_->GetAssemblyLabel(false_block);
2050 2747
2051 Condition final_branch_condition = EmitTypeofIs(true_label, 2748 Condition final_branch_condition = EmitTypeofIs(true_label,
2052 false_label, 2749 false_label,
2053 input, 2750 input,
2054 instr->type_literal()); 2751 instr->type_literal());
2055 2752
2056 EmitBranch(true_block, false_block, final_branch_condition); 2753 EmitBranch(true_block, false_block, final_branch_condition);
2057 } 2754 }
2058 2755
2059 2756
2060 Condition LCodeGen::EmitTypeofIs(Label* true_label, 2757 Condition LCodeGen::EmitTypeofIs(Label* true_label,
2061 Label* false_label, 2758 Label* false_label,
2062 Register input, 2759 Register input,
2063 Handle<String> type_name) { 2760 Handle<String> type_name) {
2064 Condition final_branch_condition = no_condition; 2761 Condition final_branch_condition = no_condition;
2065 Register core_scratch = r9; 2762 Register scratch = scratch0();
2066 ASSERT(!input.is(core_scratch));
2067 if (type_name->Equals(HEAP->number_symbol())) { 2763 if (type_name->Equals(HEAP->number_symbol())) {
2068 __ tst(input, Operand(kSmiTagMask)); 2764 __ tst(input, Operand(kSmiTagMask));
2069 __ b(eq, true_label); 2765 __ b(eq, true_label);
2070 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); 2766 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2071 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex); 2767 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2072 __ cmp(input, Operand(ip)); 2768 __ cmp(input, Operand(ip));
2073 final_branch_condition = eq; 2769 final_branch_condition = eq;
2074 2770
2075 } else if (type_name->Equals(HEAP->string_symbol())) { 2771 } else if (type_name->Equals(HEAP->string_symbol())) {
2076 __ tst(input, Operand(kSmiTagMask)); 2772 __ tst(input, Operand(kSmiTagMask));
2077 __ b(eq, false_label); 2773 __ b(eq, false_label);
2078 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); 2774 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2079 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); 2775 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2080 __ tst(ip, Operand(1 << Map::kIsUndetectable)); 2776 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2081 __ b(ne, false_label); 2777 __ b(ne, false_label);
2082 __ CompareInstanceType(input, core_scratch, FIRST_NONSTRING_TYPE); 2778 __ CompareInstanceType(input, scratch, FIRST_NONSTRING_TYPE);
2083 final_branch_condition = lo; 2779 final_branch_condition = lo;
2084 2780
2085 } else if (type_name->Equals(HEAP->boolean_symbol())) { 2781 } else if (type_name->Equals(HEAP->boolean_symbol())) {
2086 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 2782 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
2087 __ cmp(input, ip); 2783 __ cmp(input, ip);
2088 __ b(eq, true_label); 2784 __ b(eq, true_label);
2089 __ LoadRoot(ip, Heap::kFalseValueRootIndex); 2785 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
2090 __ cmp(input, ip); 2786 __ cmp(input, ip);
2091 final_branch_condition = eq; 2787 final_branch_condition = eq;
2092 2788
2093 } else if (type_name->Equals(HEAP->undefined_symbol())) { 2789 } else if (type_name->Equals(HEAP->undefined_symbol())) {
2094 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 2790 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2095 __ cmp(input, ip); 2791 __ cmp(input, ip);
2096 __ b(eq, true_label); 2792 __ b(eq, true_label);
2097 __ tst(input, Operand(kSmiTagMask)); 2793 __ tst(input, Operand(kSmiTagMask));
2098 __ b(eq, false_label); 2794 __ b(eq, false_label);
2099 // Check for undetectable objects => true. 2795 // Check for undetectable objects => true.
2100 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset)); 2796 __ ldr(input, FieldMemOperand(input, HeapObject::kMapOffset));
2101 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); 2797 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2102 __ tst(ip, Operand(1 << Map::kIsUndetectable)); 2798 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2103 final_branch_condition = ne; 2799 final_branch_condition = ne;
2104 2800
2105 } else if (type_name->Equals(HEAP->function_symbol())) { 2801 } else if (type_name->Equals(HEAP->function_symbol())) {
2106 __ tst(input, Operand(kSmiTagMask)); 2802 __ tst(input, Operand(kSmiTagMask));
2107 __ b(eq, false_label); 2803 __ b(eq, false_label);
2108 __ CompareObjectType(input, input, core_scratch, JS_FUNCTION_TYPE); 2804 __ CompareObjectType(input, input, scratch, JS_FUNCTION_TYPE);
2109 __ b(eq, true_label); 2805 __ b(eq, true_label);
2110 // Regular expressions => 'function' (they are callable). 2806 // Regular expressions => 'function' (they are callable).
2111 __ CompareInstanceType(input, core_scratch, JS_REGEXP_TYPE); 2807 __ CompareInstanceType(input, scratch, JS_REGEXP_TYPE);
2112 final_branch_condition = eq; 2808 final_branch_condition = eq;
2113 2809
2114 } else if (type_name->Equals(HEAP->object_symbol())) { 2810 } else if (type_name->Equals(HEAP->object_symbol())) {
2115 __ tst(input, Operand(kSmiTagMask)); 2811 __ tst(input, Operand(kSmiTagMask));
2116 __ b(eq, false_label); 2812 __ b(eq, false_label);
2117 __ LoadRoot(ip, Heap::kNullValueRootIndex); 2813 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2118 __ cmp(input, ip); 2814 __ cmp(input, ip);
2119 __ b(eq, true_label); 2815 __ b(eq, true_label);
2120 // Regular expressions => 'function', not 'object'. 2816 // Regular expressions => 'function', not 'object'.
2121 __ CompareObjectType(input, input, core_scratch, JS_REGEXP_TYPE); 2817 __ CompareObjectType(input, input, scratch, JS_REGEXP_TYPE);
2122 __ b(eq, false_label); 2818 __ b(eq, false_label);
2123 // Check for undetectable objects => false. 2819 // Check for undetectable objects => false.
2124 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset)); 2820 __ ldrb(ip, FieldMemOperand(input, Map::kBitFieldOffset));
2125 __ tst(ip, Operand(1 << Map::kIsUndetectable)); 2821 __ tst(ip, Operand(1 << Map::kIsUndetectable));
2126 __ b(ne, false_label); 2822 __ b(ne, false_label);
2127 // Check for JS objects => true. 2823 // Check for JS objects => true.
2128 __ CompareInstanceType(input, core_scratch, FIRST_JS_OBJECT_TYPE); 2824 __ CompareInstanceType(input, scratch, FIRST_JS_OBJECT_TYPE);
2129 __ b(lo, false_label); 2825 __ b(lo, false_label);
2130 __ CompareInstanceType(input, core_scratch, LAST_JS_OBJECT_TYPE); 2826 __ CompareInstanceType(input, scratch, LAST_JS_OBJECT_TYPE);
2131 final_branch_condition = ls; 2827 final_branch_condition = ls;
2132 2828
2133 } else { 2829 } else {
2134 final_branch_condition = ne; 2830 final_branch_condition = ne;
2135 __ b(false_label); 2831 __ b(false_label);
2136 // A dead branch instruction will be generated after this point. 2832 // A dead branch instruction will be generated after this point.
2137 } 2833 }
2138 2834
2139 return final_branch_condition; 2835 return final_branch_condition;
2140 } 2836 }
2141 2837
2142 2838
2143 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 2839 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
2144 // No code for lazy bailout instruction. Used to capture environment after a 2840 // No code for lazy bailout instruction. Used to capture environment after a
2145 // call for populating the safepoint data with deoptimization data. 2841 // call for populating the safepoint data with deoptimization data.
2146 } 2842 }
2147 2843
2148 2844
2149 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { 2845 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
2150 DeoptimizeIf(no_condition, instr->environment()); 2846 DeoptimizeIf(no_condition, instr->environment());
2151 } 2847 }
2152 2848
2153 2849
2154 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { 2850 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
2155 Abort("DoDeleteProperty unimplemented."); 2851 Register object = ToRegister(instr->object());
2852 Register key = ToRegister(instr->key());
2853 __ Push(object, key);
2854 RecordPosition(instr->pointer_map()->position());
2855 SafepointGenerator safepoint_generator(this,
2856 instr->pointer_map(),
2857 Safepoint::kNoDeoptimizationIndex);
2858 __ InvokeBuiltin(Builtins::DELETE, CALL_JS, &safepoint_generator);
2156 } 2859 }
2157 2860
2158 2861
2159 void LCodeGen::DoStackCheck(LStackCheck* instr) { 2862 void LCodeGen::DoStackCheck(LStackCheck* instr) {
2160 // Perform stack overflow check. 2863 // Perform stack overflow check.
2161 Label ok; 2864 Label ok;
2162 __ LoadRoot(ip, Heap::kStackLimitRootIndex); 2865 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
2163 __ cmp(sp, Operand(ip)); 2866 __ cmp(sp, Operand(ip));
2164 __ b(hs, &ok); 2867 __ b(hs, &ok);
2165 StackCheckStub stub; 2868 StackCheckStub stub;
2166 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 2869 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2167 __ bind(&ok); 2870 __ bind(&ok);
2168 } 2871 }
2169 2872
2170 2873
2171 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 2874 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
2172 Abort("DoOsrEntry unimplemented."); 2875 Abort("DoOsrEntry unimplemented.");
2173 } 2876 }
2174 2877
2175 2878
2176 #undef __ 2879 #undef __
2177 2880
2178 } } // namespace v8::internal 2881 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/arm/lithium-codegen-arm.h ('k') | src/arm/macro-assembler-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698