OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1194 break; | 1194 break; |
1195 } | 1195 } |
1196 if (answer_object == Heap::undefined_value()) { | 1196 if (answer_object == Heap::undefined_value()) { |
1197 return false; | 1197 return false; |
1198 } | 1198 } |
1199 frame_->Push(Handle<Object>(answer_object)); | 1199 frame_->Push(Handle<Object>(answer_object)); |
1200 return true; | 1200 return true; |
1201 } | 1201 } |
1202 | 1202 |
1203 | 1203 |
| 1204 void CodeGenerator::JumpIfBothSmiUsingTypeInfo(Register left, |
| 1205 Register right, |
| 1206 TypeInfo left_info, |
| 1207 TypeInfo right_info, |
| 1208 JumpTarget* both_smi) { |
| 1209 if (left_info.IsDouble() || left_info.IsString() || |
| 1210 right_info.IsDouble() || right_info.IsString()) { |
| 1211 // We know that left and right are not both smi. Don't do any tests. |
| 1212 return; |
| 1213 } |
| 1214 |
| 1215 if (left.is(right)) { |
| 1216 if (!left_info.IsSmi()) { |
| 1217 Condition is_smi = masm()->CheckSmi(left); |
| 1218 both_smi->Branch(is_smi); |
| 1219 } else { |
| 1220 if (FLAG_debug_code) __ AbortIfNotSmi(left); |
| 1221 both_smi->Jump(); |
| 1222 } |
| 1223 } else if (!left_info.IsSmi()) { |
| 1224 if (!right_info.IsSmi()) { |
| 1225 Condition is_smi = masm()->CheckBothSmi(left, right); |
| 1226 both_smi->Branch(is_smi); |
| 1227 } else { |
| 1228 Condition is_smi = masm()->CheckSmi(left); |
| 1229 both_smi->Branch(is_smi); |
| 1230 } |
| 1231 } else { |
| 1232 if (FLAG_debug_code) __ AbortIfNotSmi(left); |
| 1233 if (!right_info.IsSmi()) { |
| 1234 Condition is_smi = masm()->CheckSmi(right); |
| 1235 both_smi->Branch(is_smi); |
| 1236 } else { |
| 1237 if (FLAG_debug_code) __ AbortIfNotSmi(right); |
| 1238 both_smi->Jump(); |
| 1239 } |
| 1240 } |
| 1241 } |
| 1242 |
| 1243 |
1204 void CodeGenerator::JumpIfNotSmiUsingTypeInfo(Register reg, | 1244 void CodeGenerator::JumpIfNotSmiUsingTypeInfo(Register reg, |
1205 TypeInfo type, | 1245 TypeInfo type, |
1206 DeferredCode* deferred) { | 1246 DeferredCode* deferred) { |
1207 if (!type.IsSmi()) { | 1247 if (!type.IsSmi()) { |
1208 __ JumpIfNotSmi(reg, deferred->entry_label()); | 1248 __ JumpIfNotSmi(reg, deferred->entry_label()); |
1209 } | 1249 } |
1210 if (FLAG_debug_code) { | 1250 if (FLAG_debug_code) { |
1211 __ AbortIfNotSmi(reg); | 1251 __ AbortIfNotSmi(reg); |
1212 } | 1252 } |
1213 } | 1253 } |
(...skipping 1021 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2235 } else { | 2275 } else { |
2236 // Here we split control flow to the stub call and inlined cases | 2276 // Here we split control flow to the stub call and inlined cases |
2237 // before finally splitting it to the control destination. We use | 2277 // before finally splitting it to the control destination. We use |
2238 // a jump target and branching to duplicate the virtual frame at | 2278 // a jump target and branching to duplicate the virtual frame at |
2239 // the first split. We manually handle the off-frame references | 2279 // the first split. We manually handle the off-frame references |
2240 // by reconstituting them on the non-fall-through path. | 2280 // by reconstituting them on the non-fall-through path. |
2241 JumpTarget is_smi; | 2281 JumpTarget is_smi; |
2242 Register left_reg = left_side.reg(); | 2282 Register left_reg = left_side.reg(); |
2243 Register right_reg = right_side.reg(); | 2283 Register right_reg = right_side.reg(); |
2244 | 2284 |
2245 Condition both_smi = masm_->CheckBothSmi(left_reg, right_reg); | 2285 // In-line check for comparing two smis. |
2246 is_smi.Branch(both_smi); | 2286 JumpIfBothSmiUsingTypeInfo(left_side.reg(), right_side.reg(), |
| 2287 left_side.type_info(), right_side.type_info(), |
| 2288 &is_smi); |
2247 | 2289 |
2248 // Inline the equality check if both operands can't be a NaN. If both | 2290 if (has_valid_frame()) { |
2249 // objects are the same they are equal. | 2291 // Inline the equality check if both operands can't be a NaN. If both |
2250 if (nan_info == kCantBothBeNaN && cc == equal) { | 2292 // objects are the same they are equal. |
2251 __ cmpq(left_side.reg(), right_side.reg()); | 2293 if (nan_info == kCantBothBeNaN && cc == equal) { |
2252 dest->true_target()->Branch(equal); | 2294 __ cmpq(left_side.reg(), right_side.reg()); |
| 2295 dest->true_target()->Branch(equal); |
| 2296 } |
| 2297 |
| 2298 // Inlined number comparison: |
| 2299 if (inline_number_compare) { |
| 2300 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest); |
| 2301 } |
| 2302 |
| 2303 // End of in-line compare, call out to the compare stub. Don't include |
| 2304 // number comparison in the stub if it was inlined. |
| 2305 CompareStub stub(cc, strict, nan_info, !inline_number_compare); |
| 2306 Result answer = frame_->CallStub(&stub, &left_side, &right_side); |
| 2307 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flags. |
| 2308 answer.Unuse(); |
| 2309 if (is_smi.is_linked()) { |
| 2310 dest->true_target()->Branch(cc); |
| 2311 dest->false_target()->Jump(); |
| 2312 } else { |
| 2313 dest->Split(cc); |
| 2314 } |
2253 } | 2315 } |
2254 | 2316 |
2255 // Inlined number comparison: | 2317 if (is_smi.is_linked()) { |
2256 if (inline_number_compare) { | 2318 is_smi.Bind(); |
2257 GenerateInlineNumberComparison(&left_side, &right_side, cc, dest); | 2319 left_side = Result(left_reg); |
| 2320 right_side = Result(right_reg); |
| 2321 __ SmiCompare(left_side.reg(), right_side.reg()); |
| 2322 right_side.Unuse(); |
| 2323 left_side.Unuse(); |
| 2324 dest->Split(cc); |
2258 } | 2325 } |
2259 | |
2260 // End of in-line compare, call out to the compare stub. Don't include | |
2261 // number comparison in the stub if it was inlined. | |
2262 CompareStub stub(cc, strict, nan_info, !inline_number_compare); | |
2263 Result answer = frame_->CallStub(&stub, &left_side, &right_side); | |
2264 __ testq(answer.reg(), answer.reg()); // Sets both zero and sign flags. | |
2265 answer.Unuse(); | |
2266 dest->true_target()->Branch(cc); | |
2267 dest->false_target()->Jump(); | |
2268 | |
2269 is_smi.Bind(); | |
2270 left_side = Result(left_reg); | |
2271 right_side = Result(right_reg); | |
2272 __ SmiCompare(left_side.reg(), right_side.reg()); | |
2273 right_side.Unuse(); | |
2274 left_side.Unuse(); | |
2275 dest->Split(cc); | |
2276 } | 2326 } |
2277 } | 2327 } |
2278 } | 2328 } |
2279 | 2329 |
2280 | 2330 |
2281 void CodeGenerator::ConstantSmiComparison(Condition cc, | 2331 void CodeGenerator::ConstantSmiComparison(Condition cc, |
2282 bool strict, | 2332 bool strict, |
2283 ControlDestination* dest, | 2333 ControlDestination* dest, |
2284 Result* left_side, | 2334 Result* left_side, |
2285 Result* right_side, | 2335 Result* right_side, |
(...skipping 10139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
12425 #undef __ | 12475 #undef __ |
12426 | 12476 |
12427 void RecordWriteStub::Generate(MacroAssembler* masm) { | 12477 void RecordWriteStub::Generate(MacroAssembler* masm) { |
12428 masm->RecordWriteHelper(object_, addr_, scratch_); | 12478 masm->RecordWriteHelper(object_, addr_, scratch_); |
12429 masm->ret(0); | 12479 masm->ret(0); |
12430 } | 12480 } |
12431 | 12481 |
12432 } } // namespace v8::internal | 12482 } } // namespace v8::internal |
12433 | 12483 |
12434 #endif // V8_TARGET_ARCH_X64 | 12484 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |