Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(335)

Side by Side Diff: src/a64/lithium-codegen-a64.cc

Issue 166343004: A64: Improve the deoptimization helpers to generate fewer instructions. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/a64/lithium-codegen-a64.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 987 matching lines...) Expand 10 before | Expand all | Expand 10 after
998 __ Bind(&not_zero); 998 __ Bind(&not_zero);
999 __ Str(w1, MemOperand(x0)); 999 __ Str(w1, MemOperand(x0));
1000 __ Msr(NZCV, x2); 1000 __ Msr(NZCV, x2);
1001 __ Pop(x0, x1, x2); 1001 __ Pop(x0, x1, x2);
1002 } 1002 }
1003 1003
1004 return bailout_type; 1004 return bailout_type;
1005 } 1005 }
1006 1006
1007 1007
1008 void LCodeGen::Deoptimize(LEnvironment* environment, 1008 void LCodeGen::DeoptimizeBranch(LEnvironment* environment,
1009 Deoptimizer::BailoutType bailout_type) { 1009 Deoptimizer::BailoutType bailout_type,
1010 BranchType branch_type,
1011 Register reg,
1012 int bit) {
jochen (gone - plz use gerrit) 2014/02/19 16:22:47 it should be possible to merge DeoptimizeHeader in
Alexandre Rames 2014/02/19 17:20:29 Done.
1010 ASSERT(environment->HasBeenRegistered()); 1013 ASSERT(environment->HasBeenRegistered());
1011 ASSERT(info()->IsOptimizing() || info()->IsStub()); 1014 ASSERT(info()->IsOptimizing() || info()->IsStub());
1012 int id = environment->deoptimization_index(); 1015 int id = environment->deoptimization_index();
1013 Address entry = 1016 Address entry =
1014 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 1017 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
1015 1018
1016 if (info()->ShouldTrapOnDeopt()) { 1019 if (info()->ShouldTrapOnDeopt()) {
1020 Label dont_trap;
1021 __ B(&dont_trap, InvertBranchType(branch_type), reg, bit);
1017 __ Debug("trap_on_deopt", __LINE__, BREAK); 1022 __ Debug("trap_on_deopt", __LINE__, BREAK);
1023 __ Bind(&dont_trap);
1018 } 1024 }
1019 1025
1020 ASSERT(info()->IsStub() || frame_is_built_); 1026 ASSERT(info()->IsStub() || frame_is_built_);
1021 // Go through jump table if we need to build frame, or restore caller doubles. 1027 // Go through jump table if we need to build frame, or restore caller doubles.
1022 if (frame_is_built_ && !info()->saves_caller_doubles()) { 1028 if (frame_is_built_ && !info()->saves_caller_doubles()) {
1029 Label dont_deopt;
1030 __ B(&dont_deopt, InvertBranchType(branch_type), reg, bit);
1023 __ Call(entry, RelocInfo::RUNTIME_ENTRY); 1031 __ Call(entry, RelocInfo::RUNTIME_ENTRY);
1032 __ Bind(&dont_deopt);
1024 } else { 1033 } else {
1025 // We often have several deopts to the same entry, reuse the last 1034 // We often have several deopts to the same entry, reuse the last
1026 // jump entry if this is the case. 1035 // jump entry if this is the case.
1027 if (deopt_jump_table_.is_empty() || 1036 if (deopt_jump_table_.is_empty() ||
1028 (deopt_jump_table_.last().address != entry) || 1037 (deopt_jump_table_.last().address != entry) ||
1029 (deopt_jump_table_.last().bailout_type != bailout_type) || 1038 (deopt_jump_table_.last().bailout_type != bailout_type) ||
1030 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { 1039 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
1031 Deoptimizer::JumpTableEntry table_entry(entry, 1040 Deoptimizer::JumpTableEntry table_entry(entry,
1032 bailout_type, 1041 bailout_type,
1033 !frame_is_built_); 1042 !frame_is_built_);
1034 deopt_jump_table_.Add(table_entry, zone()); 1043 deopt_jump_table_.Add(table_entry, zone());
1035 } 1044 }
1036 __ B(&deopt_jump_table_.last().label); 1045 __ B(&deopt_jump_table_.last().label,
1046 branch_type, reg, bit);
1037 } 1047 }
1038 } 1048 }
1039 1049
1040 1050
1051 void LCodeGen::Deoptimize(LEnvironment* environment,
1052 Deoptimizer::BailoutType bailout_type) {
1053 DeoptimizeBranch(environment, bailout_type, always);
1054 }
1055
1056
1041 void LCodeGen::Deoptimize(LEnvironment* environment) { 1057 void LCodeGen::Deoptimize(LEnvironment* environment) {
1042 Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); 1058 DeoptimizeBranch(environment, always);
1043 Deoptimize(environment, bailout_type);
1044 } 1059 }
1045 1060
1046 1061
1047 void LCodeGen::DeoptimizeIf(Condition cond, LEnvironment* environment) { 1062 void LCodeGen::DeoptimizeIf(Condition cond, LEnvironment* environment) {
1048 Label dont_deopt; 1063 DeoptimizeBranch(environment, static_cast<BranchType>(cond));
1049 Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL);
1050 __ B(InvertCondition(cond), &dont_deopt);
1051 Deoptimize(environment, bailout_type);
1052 __ Bind(&dont_deopt);
1053 } 1064 }
1054 1065
1055 1066
1056 void LCodeGen::DeoptimizeIfZero(Register rt, LEnvironment* environment) { 1067 void LCodeGen::DeoptimizeIfZero(Register rt, LEnvironment* environment) {
1057 Label dont_deopt; 1068 DeoptimizeBranch(environment, reg_zero, rt);
1058 Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL);
1059 __ Cbnz(rt, &dont_deopt);
1060 Deoptimize(environment, bailout_type);
1061 __ Bind(&dont_deopt);
1062 } 1069 }
1063 1070
1064 1071
1065 void LCodeGen::DeoptimizeIfNegative(Register rt, LEnvironment* environment) { 1072 void LCodeGen::DeoptimizeIfNegative(Register rt, LEnvironment* environment) {
1066 Label dont_deopt; 1073 int sign_bit = rt.Is64Bits() ? kXSignBit : kWSignBit;
1067 Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); 1074 DeoptimizeBranch(environment, reg_bit_set, rt, sign_bit);
1068 __ Tbz(rt, rt.Is64Bits() ? kXSignBit : kWSignBit, &dont_deopt);
1069 Deoptimize(environment, bailout_type);
1070 __ Bind(&dont_deopt);
1071 } 1075 }
1072 1076
1073 1077
1074 void LCodeGen::DeoptimizeIfSmi(Register rt, 1078 void LCodeGen::DeoptimizeIfSmi(Register rt,
1075 LEnvironment* environment) { 1079 LEnvironment* environment) {
1076 Label dont_deopt; 1080 DeoptimizeBranch(environment, reg_bit_clear, rt, MaskToBit(kSmiTagMask));
1077 Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL);
1078 __ JumpIfNotSmi(rt, &dont_deopt);
1079 Deoptimize(environment, bailout_type);
1080 __ Bind(&dont_deopt);
1081 } 1081 }
1082 1082
1083 1083
1084 void LCodeGen::DeoptimizeIfNotSmi(Register rt, LEnvironment* environment) { 1084 void LCodeGen::DeoptimizeIfNotSmi(Register rt, LEnvironment* environment) {
1085 Label dont_deopt; 1085 DeoptimizeBranch(environment, reg_bit_set, rt, MaskToBit(kSmiTagMask));
1086 Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL);
1087 __ JumpIfSmi(rt, &dont_deopt);
1088 Deoptimize(environment, bailout_type);
1089 __ Bind(&dont_deopt);
1090 } 1086 }
1091 1087
1092 1088
1093 void LCodeGen::DeoptimizeIfRoot(Register rt, 1089 void LCodeGen::DeoptimizeIfRoot(Register rt,
1094 Heap::RootListIndex index, 1090 Heap::RootListIndex index,
1095 LEnvironment* environment) { 1091 LEnvironment* environment) {
1096 Label dont_deopt; 1092 __ CompareRoot(rt, index);
1097 Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); 1093 DeoptimizeIf(eq, environment);
1098 __ JumpIfNotRoot(rt, index, &dont_deopt);
1099 Deoptimize(environment, bailout_type);
1100 __ Bind(&dont_deopt);
1101 } 1094 }
1102 1095
1103 1096
1104 void LCodeGen::DeoptimizeIfNotRoot(Register rt, 1097 void LCodeGen::DeoptimizeIfNotRoot(Register rt,
1105 Heap::RootListIndex index, 1098 Heap::RootListIndex index,
1106 LEnvironment* environment) { 1099 LEnvironment* environment) {
1107 Label dont_deopt; 1100 __ CompareRoot(rt, index);
1108 Deoptimizer::BailoutType bailout_type = DeoptimizeHeader(environment, NULL); 1101 DeoptimizeIf(ne, environment);
1109 __ JumpIfRoot(rt, index, &dont_deopt);
1110 Deoptimize(environment, bailout_type);
1111 __ Bind(&dont_deopt);
1112 } 1102 }
1113 1103
1114 1104
1115 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { 1105 void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
1116 if (!info()->IsStub()) { 1106 if (!info()->IsStub()) {
1117 // Ensure that we have enough space after the previous lazy-bailout 1107 // Ensure that we have enough space after the previous lazy-bailout
1118 // instruction for patching the code here. 1108 // instruction for patching the code here.
1119 intptr_t current_pc = masm()->pc_offset(); 1109 intptr_t current_pc = masm()->pc_offset();
1120 1110
1121 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) { 1111 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
(...skipping 997 matching lines...) Expand 10 before | Expand all | Expand 10 after
2119 } else { 2109 } else {
2120 Deoptimize(instr->environment()); 2110 Deoptimize(instr->environment());
2121 } 2111 }
2122 2112
2123 __ Bind(&success); 2113 __ Bind(&success);
2124 } 2114 }
2125 2115
2126 2116
2127 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { 2117 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
2128 if (!instr->hydrogen()->value()->IsHeapObject()) { 2118 if (!instr->hydrogen()->value()->IsHeapObject()) {
2129 // TODO(all): Depending of how we chose to implement the deopt, if we could
2130 // guarantee that we have a deopt handler reachable by a tbz instruction,
2131 // we could use tbz here and produce less code to support this instruction.
2132 DeoptimizeIfSmi(ToRegister(instr->value()), instr->environment()); 2119 DeoptimizeIfSmi(ToRegister(instr->value()), instr->environment());
2133 } 2120 }
2134 } 2121 }
2135 2122
2136 2123
2137 void LCodeGen::DoCheckSmi(LCheckSmi* instr) { 2124 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
2138 Register value = ToRegister(instr->value()); 2125 Register value = ToRegister(instr->value());
2139 ASSERT(!instr->result() || ToRegister(instr->result()).Is(value)); 2126 ASSERT(!instr->result() || ToRegister(instr->result()).Is(value));
2140 // TODO(all): See DoCheckNonSmi for comments on use of tbz.
2141 DeoptimizeIfNotSmi(value, instr->environment()); 2127 DeoptimizeIfNotSmi(value, instr->environment());
2142 } 2128 }
2143 2129
2144 2130
2145 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) { 2131 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
2146 Register input = ToRegister(instr->value()); 2132 Register input = ToRegister(instr->value());
2147 Register scratch = ToRegister(instr->temp()); 2133 Register scratch = ToRegister(instr->temp());
2148 2134
2149 __ Ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset)); 2135 __ Ldr(scratch, FieldMemOperand(input, HeapObject::kMapOffset));
2150 __ Ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset)); 2136 __ Ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
(...skipping 3539 matching lines...) Expand 10 before | Expand all | Expand 10 after
5690 __ Bind(&out_of_object); 5676 __ Bind(&out_of_object);
5691 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 5677 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
5692 // Index is equal to negated out of object property index plus 1. 5678 // Index is equal to negated out of object property index plus 1.
5693 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); 5679 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2));
5694 __ Ldr(result, FieldMemOperand(result, 5680 __ Ldr(result, FieldMemOperand(result,
5695 FixedArray::kHeaderSize - kPointerSize)); 5681 FixedArray::kHeaderSize - kPointerSize));
5696 __ Bind(&done); 5682 __ Bind(&done);
5697 } 5683 }
5698 5684
5699 } } // namespace v8::internal 5685 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/a64/lithium-codegen-a64.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698