Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 3388005: Make the CompareStub and the UnaryOpStub accept smi inputs.... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: x64 and ARM port Created 10 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/arm/codegen-arm.cc » ('j') | src/flag-definitions.h » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 912 matching lines...) Expand 10 before | Expand all | Expand 10 after
923 // On entry lhs_ and rhs_ are the values to be compared. 923 // On entry lhs_ and rhs_ are the values to be compared.
924 // On exit r0 is 0, positive or negative to indicate the result of 924 // On exit r0 is 0, positive or negative to indicate the result of
925 // the comparison. 925 // the comparison.
926 void CompareStub::Generate(MacroAssembler* masm) { 926 void CompareStub::Generate(MacroAssembler* masm) {
927 ASSERT((lhs_.is(r0) && rhs_.is(r1)) || 927 ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
928 (lhs_.is(r1) && rhs_.is(r0))); 928 (lhs_.is(r1) && rhs_.is(r0)));
929 929
930 Label slow; // Call builtin. 930 Label slow; // Call builtin.
931 Label not_smis, both_loaded_as_doubles, lhs_not_nan; 931 Label not_smis, both_loaded_as_doubles, lhs_not_nan;
932 932
933 if (include_smi_compare_) {
934 Label not_two_smis, smi_done;
935 __ orr(r2, r1, r0);
936 __ tst(r2, Operand(kSmiTagMask));
937 __ b(ne, &not_two_smis);
938 __ sub(r0, r1, r0);
939 __ b(vc, &smi_done);
940 // Correct the sign in case of overflow.
941 __ rsb(r0, r0, Operand(0, RelocInfo::NONE));
942 __ bind(&smi_done);
943 __ Ret();
944 __ bind(&not_two_smis);
945 } else if (FLAG_debug_code) {
946 __ orr(r2, r1, r0);
947 __ tst(r2, Operand(kSmiTagMask));
948 __ Assert(nz, "CompareStub: unexpected smi operands.");
949 }
950
933 // NOTICE! This code is only reached after a smi-fast-case check, so 951 // NOTICE! This code is only reached after a smi-fast-case check, so
934 // it is certain that at least one operand isn't a smi. 952 // it is certain that at least one operand isn't a smi.
935 953
936 // Handle the case where the objects are identical. Either returns the answer 954 // Handle the case where the objects are identical. Either returns the answer
937 // or goes to slow. Only falls through if the objects were not identical. 955 // or goes to slow. Only falls through if the objects were not identical.
938 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_); 956 EmitIdenticalObjectComparison(masm, &slow, cc_, never_nan_nan_);
939 957
940 // If either is a Smi (we know that not both are), then they can only 958 // If either is a Smi (we know that not both are), then they can only
941 // be strictly equal if the other is a HeapNumber. 959 // be strictly equal if the other is a HeapNumber.
942 STATIC_ASSERT(kSmiTag == 0); 960 STATIC_ASSERT(kSmiTag == 0);
(...skipping 1338 matching lines...) Expand 10 before | Expand all | Expand 10 after
2281 } 2299 }
2282 2300
2283 2301
2284 void StackCheckStub::Generate(MacroAssembler* masm) { 2302 void StackCheckStub::Generate(MacroAssembler* masm) {
2285 // Do tail-call to runtime routine. Runtime routines expect at least one 2303 // Do tail-call to runtime routine. Runtime routines expect at least one
2286 // argument, so give it a Smi. 2304 // argument, so give it a Smi.
2287 __ mov(r0, Operand(Smi::FromInt(0))); 2305 __ mov(r0, Operand(Smi::FromInt(0)));
2288 __ push(r0); 2306 __ push(r0);
2289 __ TailCallRuntime(Runtime::kStackGuard, 1, 1); 2307 __ TailCallRuntime(Runtime::kStackGuard, 1, 1);
2290 2308
2291 __ StubReturn(1); 2309 __ Ret();
2292 } 2310 }
2293 2311
2294 2312
2295 void GenericUnaryOpStub::Generate(MacroAssembler* masm) { 2313 void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
2296 Label slow, done; 2314 Label slow, done;
2297 2315
2298 Register heap_number_map = r6; 2316 Register heap_number_map = r6;
2299 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); 2317 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2300 2318
2301 if (op_ == Token::SUB) { 2319 if (op_ == Token::SUB) {
2302 // Check whether the value is a smi. 2320 if (include_smi_code_) {
2303 Label try_float; 2321 // Check whether the value is a smi.
2304 __ tst(r0, Operand(kSmiTagMask)); 2322 Label try_float;
2305 __ b(ne, &try_float); 2323 __ tst(r0, Operand(kSmiTagMask));
2324 __ b(ne, &try_float);
2306 2325
2307 // Go slow case if the value of the expression is zero 2326 // Go slow case if the value of the expression is zero
2308 // to make sure that we switch between 0 and -0. 2327 // to make sure that we switch between 0 and -0.
2309 if (negative_zero_ == kStrictNegativeZero) { 2328 if (negative_zero_ == kStrictNegativeZero) {
2310 // If we have to check for zero, then we can check for the max negative 2329 // If we have to check for zero, then we can check for the max negative
2311 // smi while we are at it. 2330 // smi while we are at it.
2312 __ bic(ip, r0, Operand(0x80000000), SetCC); 2331 __ bic(ip, r0, Operand(0x80000000), SetCC);
2313 __ b(eq, &slow); 2332 __ b(eq, &slow);
2314 __ rsb(r0, r0, Operand(0, RelocInfo::NONE)); 2333 __ rsb(r0, r0, Operand(0, RelocInfo::NONE));
2315 __ StubReturn(1); 2334 __ Ret();
2316 } else { 2335 } else {
2317 // The value of the expression is a smi and 0 is OK for -0. Try 2336 // The value of the expression is a smi and 0 is OK for -0. Try
2318 // optimistic subtraction '0 - value'. 2337 // optimistic subtraction '0 - value'.
2319 __ rsb(r0, r0, Operand(0, RelocInfo::NONE), SetCC); 2338 __ rsb(r0, r0, Operand(0, RelocInfo::NONE), SetCC);
2320 __ StubReturn(1, vc); 2339 __ Ret(vc);
2321 // We don't have to reverse the optimistic neg since the only case 2340 // We don't have to reverse the optimistic neg since the only case
2322 // where we fall through is the minimum negative Smi, which is the case 2341 // where we fall through is the minimum negative Smi, which is the case
2323 // where the neg leaves the register unchanged. 2342 // where the neg leaves the register unchanged.
2324 __ jmp(&slow); // Go slow on max negative Smi. 2343 __ jmp(&slow); // Go slow on max negative Smi.
2344 }
2345 __ bind(&try_float);
2346 } else if (FLAG_debug_code) {
2347 __ tst(r0, Operand(kSmiTagMask));
2348 __ Assert(ne, "Unexpected smi operand.");
2325 } 2349 }
2326 2350
2327 __ bind(&try_float);
2328 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2351 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2329 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); 2352 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2330 __ cmp(r1, heap_number_map); 2353 __ cmp(r1, heap_number_map);
2331 __ b(ne, &slow); 2354 __ b(ne, &slow);
2332 // r0 is a heap number. Get a new heap number in r1. 2355 // r0 is a heap number. Get a new heap number in r1.
2333 if (overwrite_ == UNARY_OVERWRITE) { 2356 if (overwrite_ == UNARY_OVERWRITE) {
2334 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 2357 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2335 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. 2358 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
2336 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 2359 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2337 } else { 2360 } else {
2338 __ AllocateHeapNumber(r1, r2, r3, r6, &slow); 2361 __ AllocateHeapNumber(r1, r2, r3, r6, &slow);
2339 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); 2362 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
2340 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 2363 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
2341 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); 2364 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
2342 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. 2365 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
2343 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); 2366 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
2344 __ mov(r0, Operand(r1)); 2367 __ mov(r0, Operand(r1));
2345 } 2368 }
2346 } else if (op_ == Token::BIT_NOT) { 2369 } else if (op_ == Token::BIT_NOT) {
2370 if (include_smi_code_) {
2371 Label non_smi;
2372 __ BranchOnNotSmi(r0, &non_smi);
2373 __ mvn(r0, Operand(r0));
2374 // Bit-clear inverted smi-tag.
2375 __ bic(r0, r0, Operand(kSmiTagMask));
2376 __ Ret();
2377 __ bind(&non_smi);
2378 } else if (FLAG_debug_code) {
2379 __ tst(r0, Operand(kSmiTagMask));
2380 __ Assert(ne, "Unexpected smi operand.");
2381 }
2382
2347 // Check if the operand is a heap number. 2383 // Check if the operand is a heap number.
2348 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 2384 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2349 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); 2385 __ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
2350 __ cmp(r1, heap_number_map); 2386 __ cmp(r1, heap_number_map);
2351 __ b(ne, &slow); 2387 __ b(ne, &slow);
2352 2388
2353 // Convert the heap number is r0 to an untagged integer in r1. 2389 // Convert the heap number is r0 to an untagged integer in r1.
2354 __ ConvertToInt32(r0, r1, r2, r3, &slow); 2390 __ ConvertToInt32(r0, r1, r2, r3, &slow);
2355 2391
2356 // Do the bitwise operation (move negated) and check if the result 2392 // Do the bitwise operation (move negated) and check if the result
(...skipping 27 matching lines...) Expand all
2384 WriteInt32ToHeapNumberStub stub(r1, r0, r2); 2420 WriteInt32ToHeapNumberStub stub(r1, r0, r2);
2385 __ push(lr); 2421 __ push(lr);
2386 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); 2422 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2387 __ pop(lr); 2423 __ pop(lr);
2388 } 2424 }
2389 } else { 2425 } else {
2390 UNIMPLEMENTED(); 2426 UNIMPLEMENTED();
2391 } 2427 }
2392 2428
2393 __ bind(&done); 2429 __ bind(&done);
2394 __ StubReturn(1); 2430 __ Ret();
2395 2431
2396 // Handle the slow case by jumping to the JavaScript builtin. 2432 // Handle the slow case by jumping to the JavaScript builtin.
2397 __ bind(&slow); 2433 __ bind(&slow);
2398 __ push(r0); 2434 __ push(r0);
2399 switch (op_) { 2435 switch (op_) {
2400 case Token::SUB: 2436 case Token::SUB:
2401 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS); 2437 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
2402 break; 2438 break;
2403 case Token::BIT_NOT: 2439 case Token::BIT_NOT:
2404 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_JS); 2440 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_JS);
(...skipping 1087 matching lines...) Expand 10 before | Expand all | Expand 10 after
3492 const char* never_nan_nan_name = ""; 3528 const char* never_nan_nan_name = "";
3493 if (never_nan_nan_ && (cc_ == eq || cc_ == ne)) { 3529 if (never_nan_nan_ && (cc_ == eq || cc_ == ne)) {
3494 never_nan_nan_name = "_NO_NAN"; 3530 never_nan_nan_name = "_NO_NAN";
3495 } 3531 }
3496 3532
3497 const char* include_number_compare_name = ""; 3533 const char* include_number_compare_name = "";
3498 if (!include_number_compare_) { 3534 if (!include_number_compare_) {
3499 include_number_compare_name = "_NO_NUMBER"; 3535 include_number_compare_name = "_NO_NUMBER";
3500 } 3536 }
3501 3537
3538 const char* include_smi_compare_name = "";
3539 if (!include_smi_compare_) {
3540 include_smi_compare_name = "_NO_SMI";
3541 }
3542
3502 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), 3543 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
3503 "CompareStub_%s%s%s%s%s%s", 3544 "CompareStub_%s%s%s%s%s%s",
3504 cc_name, 3545 cc_name,
3505 lhs_name, 3546 lhs_name,
3506 rhs_name, 3547 rhs_name,
3507 strict_name, 3548 strict_name,
3508 never_nan_nan_name, 3549 never_nan_nan_name,
3509 include_number_compare_name); 3550 include_number_compare_name,
3551 include_smi_compare_name);
3510 return name_; 3552 return name_;
3511 } 3553 }
3512 3554
3513 3555
3514 int CompareStub::MinorKey() { 3556 int CompareStub::MinorKey() {
3515 // Encode the three parameters in a unique 16 bit value. To avoid duplicate 3557 // Encode the three parameters in a unique 16 bit value. To avoid duplicate
3516 // stubs the never NaN NaN condition is only taken into account if the 3558 // stubs the never NaN NaN condition is only taken into account if the
3517 // condition is equals. 3559 // condition is equals.
3518 ASSERT((static_cast<unsigned>(cc_) >> 28) < (1 << 12)); 3560 ASSERT((static_cast<unsigned>(cc_) >> 28) < (1 << 12));
3519 ASSERT((lhs_.is(r0) && rhs_.is(r1)) || 3561 ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
3520 (lhs_.is(r1) && rhs_.is(r0))); 3562 (lhs_.is(r1) && rhs_.is(r0)));
3521 return ConditionField::encode(static_cast<unsigned>(cc_) >> 28) 3563 return ConditionField::encode(static_cast<unsigned>(cc_) >> 28)
3522 | RegisterField::encode(lhs_.is(r0)) 3564 | RegisterField::encode(lhs_.is(r0))
3523 | StrictField::encode(strict_) 3565 | StrictField::encode(strict_)
3524 | NeverNanNanField::encode(cc_ == eq ? never_nan_nan_ : false) 3566 | NeverNanNanField::encode(cc_ == eq ? never_nan_nan_ : false)
3525 | IncludeNumberCompareField::encode(include_number_compare_); 3567 | IncludeNumberCompareField::encode(include_number_compare_)
3568 | IncludeSmiCompareField::encode(include_smi_compare_);
3526 } 3569 }
3527 3570
3528 3571
3529 // StringCharCodeAtGenerator 3572 // StringCharCodeAtGenerator
3530 3573
3531 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 3574 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3532 Label flat_string; 3575 Label flat_string;
3533 Label ascii_string; 3576 Label ascii_string;
3534 Label got_char_code; 3577 Label got_char_code;
3535 3578
(...skipping 1143 matching lines...) Expand 10 before | Expand all | Expand 10 after
4679 __ bind(&string_add_runtime); 4722 __ bind(&string_add_runtime);
4680 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); 4723 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
4681 } 4724 }
4682 4725
4683 4726
4684 #undef __ 4727 #undef __
4685 4728
4686 } } // namespace v8::internal 4729 } } // namespace v8::internal
4687 4730
4688 #endif // V8_TARGET_ARCH_ARM 4731 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « no previous file | src/arm/codegen-arm.cc » ('j') | src/flag-definitions.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698