Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(438)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 20843012: Extract hardcoded error strings into a single place and replace them with enum. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: styles fixed Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 493 matching lines...) Expand 10 before | Expand all | Expand 10 after
504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); 504 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); 505 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
506 506
507 // If this block context is nested in the native context we get a smi 507 // If this block context is nested in the native context we get a smi
508 // sentinel instead of a function. The block context should get the 508 // sentinel instead of a function. The block context should get the
509 // canonical empty function of the native context as its closure which 509 // canonical empty function of the native context as its closure which
510 // we still have to look up. 510 // we still have to look up.
511 Label after_sentinel; 511 Label after_sentinel;
512 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); 512 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
513 if (FLAG_debug_code) { 513 if (FLAG_debug_code) {
514 const char* message = "Expected 0 as a Smi sentinel";
515 __ cmpq(rcx, Immediate(0)); 514 __ cmpq(rcx, Immediate(0));
516 __ Assert(equal, message); 515 __ Assert(equal, kExpected0AsASmiSentinel);
517 } 516 }
518 __ movq(rcx, GlobalObjectOperand()); 517 __ movq(rcx, GlobalObjectOperand());
519 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset)); 518 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
520 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); 519 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
521 __ bind(&after_sentinel); 520 __ bind(&after_sentinel);
522 521
523 // Set up the fixed slots. 522 // Set up the fixed slots.
524 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); 523 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
525 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); 524 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
526 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); 525 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
(...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after
947 Label allocation_failed; 946 Label allocation_failed;
948 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). 947 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64).
949 // Allocate heap number in new space. 948 // Allocate heap number in new space.
950 // Not using AllocateHeapNumber macro in order to reuse 949 // Not using AllocateHeapNumber macro in order to reuse
951 // already loaded heap_number_map. 950 // already loaded heap_number_map.
952 __ Allocate(HeapNumber::kSize, rax, rdx, no_reg, &allocation_failed, 951 __ Allocate(HeapNumber::kSize, rax, rdx, no_reg, &allocation_failed,
953 TAG_OBJECT); 952 TAG_OBJECT);
954 // Set the map. 953 // Set the map.
955 __ AssertRootValue(heap_number_map, 954 __ AssertRootValue(heap_number_map,
956 Heap::kHeapNumberMapRootIndex, 955 Heap::kHeapNumberMapRootIndex,
957 "HeapNumberMap register clobbered."); 956 kHeapNumberMapRegisterClobbered);
958 __ movq(FieldOperand(rax, HeapObject::kMapOffset), 957 __ movq(FieldOperand(rax, HeapObject::kMapOffset),
959 heap_number_map); 958 heap_number_map);
960 __ cvtqsi2sd(xmm0, rbx); 959 __ cvtqsi2sd(xmm0, rbx);
961 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); 960 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0);
962 __ Ret(); 961 __ Ret();
963 962
964 __ bind(&allocation_failed); 963 __ bind(&allocation_failed);
965 // We need tagged values in rdx and rax for the following code, 964 // We need tagged values in rdx and rax for the following code,
966 // not int32 in rax and rcx. 965 // not int32 in rax and rcx.
967 __ Integer32ToSmi(rax, rcx); 966 __ Integer32ToSmi(rax, rcx);
968 __ Integer32ToSmi(rdx, rbx); 967 __ Integer32ToSmi(rdx, rbx);
969 __ jmp(allocation_failure); 968 __ jmp(allocation_failure);
970 } 969 }
971 break; 970 break;
972 } 971 }
973 default: UNREACHABLE(); break; 972 default: UNREACHABLE(); break;
974 } 973 }
975 // No fall-through from this generated code. 974 // No fall-through from this generated code.
976 if (FLAG_debug_code) { 975 if (FLAG_debug_code) {
977 __ Abort("Unexpected fall-through in " 976 __ Abort(kUnexpectedFallThroughInBinaryStubGenerateFloatingPointCode);
978 "BinaryStub_GenerateFloatingPointCode.");
979 } 977 }
980 } 978 }
981 979
982 980
983 static void BinaryOpStub_GenerateRegisterArgsPushUnderReturn( 981 static void BinaryOpStub_GenerateRegisterArgsPushUnderReturn(
984 MacroAssembler* masm) { 982 MacroAssembler* masm) {
985 // Push arguments, but ensure they are under the return address 983 // Push arguments, but ensure they are under the return address
986 // for a tail call. 984 // for a tail call.
987 __ pop(rcx); 985 __ pop(rcx);
988 __ push(rdx); 986 __ push(rdx);
(...skipping 1620 matching lines...) Expand 10 before | Expand all | Expand 10 after
2609 __ movq(rax, Operand(rsp, kJSRegExpOffset)); 2607 __ movq(rax, Operand(rsp, kJSRegExpOffset));
2610 __ JumpIfSmi(rax, &runtime); 2608 __ JumpIfSmi(rax, &runtime);
2611 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); 2609 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
2612 __ j(not_equal, &runtime); 2610 __ j(not_equal, &runtime);
2613 2611
2614 // Check that the RegExp has been compiled (data contains a fixed array). 2612 // Check that the RegExp has been compiled (data contains a fixed array).
2615 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset)); 2613 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset));
2616 if (FLAG_debug_code) { 2614 if (FLAG_debug_code) {
2617 Condition is_smi = masm->CheckSmi(rax); 2615 Condition is_smi = masm->CheckSmi(rax);
2618 __ Check(NegateCondition(is_smi), 2616 __ Check(NegateCondition(is_smi),
2619 "Unexpected type for RegExp data, FixedArray expected"); 2617 kUnexpectedTypeForRegExpDataFixedArrayExpected);
2620 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); 2618 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
2621 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected"); 2619 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2622 } 2620 }
2623 2621
2624 // rax: RegExp data (FixedArray) 2622 // rax: RegExp data (FixedArray)
2625 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 2623 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
2626 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); 2624 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
2627 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); 2625 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
2628 __ j(not_equal, &runtime); 2626 __ j(not_equal, &runtime);
2629 2627
2630 // rax: RegExp data (FixedArray) 2628 // rax: RegExp data (FixedArray)
2631 // Check that the number of captures fit in the static offsets vector buffer. 2629 // Check that the number of captures fit in the static offsets vector buffer.
(...skipping 345 matching lines...) Expand 10 before | Expand all | Expand 10 after
2977 __ j(greater, &not_long_external, Label::kNear); // Go to (10). 2975 __ j(greater, &not_long_external, Label::kNear); // Go to (10).
2978 2976
2979 // (8) External string. Short external strings have been ruled out. 2977 // (8) External string. Short external strings have been ruled out.
2980 __ bind(&external_string); 2978 __ bind(&external_string);
2981 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 2979 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
2982 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 2980 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2983 if (FLAG_debug_code) { 2981 if (FLAG_debug_code) {
2984 // Assert that we do not have a cons or slice (indirect strings) here. 2982 // Assert that we do not have a cons or slice (indirect strings) here.
2985 // Sequential strings have already been ruled out. 2983 // Sequential strings have already been ruled out.
2986 __ testb(rbx, Immediate(kIsIndirectStringMask)); 2984 __ testb(rbx, Immediate(kIsIndirectStringMask));
2987 __ Assert(zero, "external string expected, but not found"); 2985 __ Assert(zero, kExternalStringExpectedButNotFound);
2988 } 2986 }
2989 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); 2987 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
2990 // Move the pointer so that offset-wise, it looks like a sequential string. 2988 // Move the pointer so that offset-wise, it looks like a sequential string.
2991 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 2989 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2992 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 2990 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2993 STATIC_ASSERT(kTwoByteStringTag == 0); 2991 STATIC_ASSERT(kTwoByteStringTag == 0);
2994 // (8a) Is the external string one byte? If yes, go to (6). 2992 // (8a) Is the external string one byte? If yes, go to (6).
2995 __ testb(rbx, Immediate(kStringEncodingMask)); 2993 __ testb(rbx, Immediate(kStringEncodingMask));
2996 __ j(not_zero, &seq_one_byte_string); // Goto (6). 2994 __ j(not_zero, &seq_one_byte_string); // Goto (6).
2997 2995
(...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after
3441 StringCompareStub::GenerateCompareFlatAsciiStrings(masm, 3439 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
3442 rdx, 3440 rdx,
3443 rax, 3441 rax,
3444 rcx, 3442 rcx,
3445 rbx, 3443 rbx,
3446 rdi, 3444 rdi,
3447 r8); 3445 r8);
3448 } 3446 }
3449 3447
3450 #ifdef DEBUG 3448 #ifdef DEBUG
3451 __ Abort("Unexpected fall-through from string comparison"); 3449 __ Abort(kUnexpectedFallThroughFromStringComparison);
3452 #endif 3450 #endif
3453 3451
3454 __ bind(&check_unequal_objects); 3452 __ bind(&check_unequal_objects);
3455 if (cc == equal && !strict()) { 3453 if (cc == equal && !strict()) {
3456 // Not strict equality. Objects are unequal if 3454 // Not strict equality. Objects are unequal if
3457 // they are both JSObjects and not undetectable, 3455 // they are both JSObjects and not undetectable,
3458 // and their pointers are different. 3456 // and their pointers are different.
3459 Label not_both_objects, return_unequal; 3457 Label not_both_objects, return_unequal;
3460 // At most one is a smi, so we can test for smi by adding the two. 3458 // At most one is a smi, so we can test for smi by adding the two.
3461 // A smi plus a heap object has the low bit set, a heap object plus 3459 // A smi plus a heap object has the low bit set, a heap object plus
(...skipping 806 matching lines...) Expand 10 before | Expand all | Expand 10 after
4268 if (!HasCallSiteInlineCheck()) { 4266 if (!HasCallSiteInlineCheck()) {
4269 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); 4267 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
4270 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); 4268 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
4271 } else { 4269 } else {
4272 // Get return address and delta to inlined map check. 4270 // Get return address and delta to inlined map check.
4273 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4271 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
4274 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4272 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
4275 if (FLAG_debug_code) { 4273 if (FLAG_debug_code) {
4276 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); 4274 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
4277 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); 4275 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
4278 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); 4276 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
4279 } 4277 }
4280 __ movq(kScratchRegister, 4278 __ movq(kScratchRegister,
4281 Operand(kScratchRegister, kOffsetToMapCheckValue)); 4279 Operand(kScratchRegister, kOffsetToMapCheckValue));
4282 __ movq(Operand(kScratchRegister, 0), rax); 4280 __ movq(Operand(kScratchRegister, 0), rax);
4283 } 4281 }
4284 4282
4285 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); 4283 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
4286 4284
4287 // Loop through the prototype chain looking for the function prototype. 4285 // Loop through the prototype chain looking for the function prototype.
4288 Label loop, is_instance, is_not_instance; 4286 Label loop, is_instance, is_not_instance;
(...skipping 21 matching lines...) Expand all
4310 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 4308 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
4311 // Assert it is a 1-byte signed value. 4309 // Assert it is a 1-byte signed value.
4312 ASSERT(true_offset >= 0 && true_offset < 0x100); 4310 ASSERT(true_offset >= 0 && true_offset < 0x100);
4313 __ movl(rax, Immediate(true_offset)); 4311 __ movl(rax, Immediate(true_offset));
4314 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4312 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
4315 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4313 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
4316 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 4314 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
4317 if (FLAG_debug_code) { 4315 if (FLAG_debug_code) {
4318 __ movl(rax, Immediate(kWordBeforeResultValue)); 4316 __ movl(rax, Immediate(kWordBeforeResultValue));
4319 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 4317 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
4320 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); 4318 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
4321 } 4319 }
4322 __ Set(rax, 0); 4320 __ Set(rax, 0);
4323 } 4321 }
4324 __ ret(2 * kPointerSize + extra_stack_space); 4322 __ ret(2 * kPointerSize + extra_stack_space);
4325 4323
4326 __ bind(&is_not_instance); 4324 __ bind(&is_not_instance);
4327 if (!HasCallSiteInlineCheck()) { 4325 if (!HasCallSiteInlineCheck()) {
4328 // We have to store a non-zero value in the cache. 4326 // We have to store a non-zero value in the cache.
4329 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); 4327 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
4330 } else { 4328 } else {
4331 // Store offset of false in the root array at the inline check site. 4329 // Store offset of false in the root array at the inline check site.
4332 int false_offset = 0x100 + 4330 int false_offset = 0x100 +
4333 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; 4331 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
4334 // Assert it is a 1-byte signed value. 4332 // Assert it is a 1-byte signed value.
4335 ASSERT(false_offset >= 0 && false_offset < 0x100); 4333 ASSERT(false_offset >= 0 && false_offset < 0x100);
4336 __ movl(rax, Immediate(false_offset)); 4334 __ movl(rax, Immediate(false_offset));
4337 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); 4335 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
4338 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); 4336 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
4339 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); 4337 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
4340 if (FLAG_debug_code) { 4338 if (FLAG_debug_code) {
4341 __ movl(rax, Immediate(kWordBeforeResultValue)); 4339 __ movl(rax, Immediate(kWordBeforeResultValue));
4342 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); 4340 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
4343 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); 4341 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
4344 } 4342 }
4345 } 4343 }
4346 __ ret(2 * kPointerSize + extra_stack_space); 4344 __ ret(2 * kPointerSize + extra_stack_space);
4347 4345
4348 // Slow-case: Go through the JavaScript implementation. 4346 // Slow-case: Go through the JavaScript implementation.
4349 __ bind(&slow); 4347 __ bind(&slow);
4350 if (HasCallSiteInlineCheck()) { 4348 if (HasCallSiteInlineCheck()) {
4351 // Remove extra value from the stack. 4349 // Remove extra value from the stack.
4352 __ pop(rcx); 4350 __ pop(rcx);
4353 __ pop(rax); 4351 __ pop(rax);
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
4397 masm, object_, index_, result_, &call_runtime_); 4395 masm, object_, index_, result_, &call_runtime_);
4398 4396
4399 __ Integer32ToSmi(result_, result_); 4397 __ Integer32ToSmi(result_, result_);
4400 __ bind(&exit_); 4398 __ bind(&exit_);
4401 } 4399 }
4402 4400
4403 4401
4404 void StringCharCodeAtGenerator::GenerateSlow( 4402 void StringCharCodeAtGenerator::GenerateSlow(
4405 MacroAssembler* masm, 4403 MacroAssembler* masm,
4406 const RuntimeCallHelper& call_helper) { 4404 const RuntimeCallHelper& call_helper) {
4407 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); 4405 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
4408 4406
4409 Factory* factory = masm->isolate()->factory(); 4407 Factory* factory = masm->isolate()->factory();
4410 // Index is not a smi. 4408 // Index is not a smi.
4411 __ bind(&index_not_smi_); 4409 __ bind(&index_not_smi_);
4412 // If index is a heap number, try converting it to an integer. 4410 // If index is a heap number, try converting it to an integer.
4413 __ CheckMap(index_, 4411 __ CheckMap(index_,
4414 factory->heap_number_map(), 4412 factory->heap_number_map(),
4415 index_not_number_, 4413 index_not_number_,
4416 DONT_DO_SMI_CHECK); 4414 DONT_DO_SMI_CHECK);
4417 call_helper.BeforeCall(masm); 4415 call_helper.BeforeCall(masm);
(...skipping 29 matching lines...) Expand all
4447 __ push(object_); 4445 __ push(object_);
4448 __ Integer32ToSmi(index_, index_); 4446 __ Integer32ToSmi(index_, index_);
4449 __ push(index_); 4447 __ push(index_);
4450 __ CallRuntime(Runtime::kStringCharCodeAt, 2); 4448 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
4451 if (!result_.is(rax)) { 4449 if (!result_.is(rax)) {
4452 __ movq(result_, rax); 4450 __ movq(result_, rax);
4453 } 4451 }
4454 call_helper.AfterCall(masm); 4452 call_helper.AfterCall(masm);
4455 __ jmp(&exit_); 4453 __ jmp(&exit_);
4456 4454
4457 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); 4455 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
4458 } 4456 }
4459 4457
4460 4458
4461 // ------------------------------------------------------------------------- 4459 // -------------------------------------------------------------------------
4462 // StringCharFromCodeGenerator 4460 // StringCharFromCodeGenerator
4463 4461
4464 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 4462 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
4465 // Fast case of Heap::LookupSingleCharacterStringFromCode. 4463 // Fast case of Heap::LookupSingleCharacterStringFromCode.
4466 __ JumpIfNotSmi(code_, &slow_case_); 4464 __ JumpIfNotSmi(code_, &slow_case_);
4467 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode)); 4465 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
4468 __ j(above, &slow_case_); 4466 __ j(above, &slow_case_);
4469 4467
4470 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); 4468 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
4471 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2); 4469 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
4472 __ movq(result_, FieldOperand(result_, index.reg, index.scale, 4470 __ movq(result_, FieldOperand(result_, index.reg, index.scale,
4473 FixedArray::kHeaderSize)); 4471 FixedArray::kHeaderSize));
4474 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); 4472 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
4475 __ j(equal, &slow_case_); 4473 __ j(equal, &slow_case_);
4476 __ bind(&exit_); 4474 __ bind(&exit_);
4477 } 4475 }
4478 4476
4479 4477
4480 void StringCharFromCodeGenerator::GenerateSlow( 4478 void StringCharFromCodeGenerator::GenerateSlow(
4481 MacroAssembler* masm, 4479 MacroAssembler* masm,
4482 const RuntimeCallHelper& call_helper) { 4480 const RuntimeCallHelper& call_helper) {
4483 __ Abort("Unexpected fallthrough to CharFromCode slow case"); 4481 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
4484 4482
4485 __ bind(&slow_case_); 4483 __ bind(&slow_case_);
4486 call_helper.BeforeCall(masm); 4484 call_helper.BeforeCall(masm);
4487 __ push(code_); 4485 __ push(code_);
4488 __ CallRuntime(Runtime::kCharFromCode, 1); 4486 __ CallRuntime(Runtime::kCharFromCode, 1);
4489 if (!result_.is(rax)) { 4487 if (!result_.is(rax)) {
4490 __ movq(result_, rax); 4488 __ movq(result_, rax);
4491 } 4489 }
4492 call_helper.AfterCall(masm); 4490 call_helper.AfterCall(masm);
4493 __ jmp(&exit_); 4491 __ jmp(&exit_);
4494 4492
4495 __ Abort("Unexpected fallthrough from CharFromCode slow case"); 4493 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
4496 } 4494 }
4497 4495
4498 4496
4499 void StringAddStub::Generate(MacroAssembler* masm) { 4497 void StringAddStub::Generate(MacroAssembler* masm) {
4500 Label call_runtime, call_builtin; 4498 Label call_runtime, call_builtin;
4501 Builtins::JavaScript builtin_id = Builtins::ADD; 4499 Builtins::JavaScript builtin_id = Builtins::ADD;
4502 4500
4503 // Load the two arguments. 4501 // Load the two arguments.
4504 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). 4502 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left).
4505 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). 4503 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right).
(...skipping 527 matching lines...) Expand 10 before | Expand all | Expand 10 after
5033 Label is_string; 5031 Label is_string;
5034 __ CmpObjectType(candidate, ODDBALL_TYPE, map); 5032 __ CmpObjectType(candidate, ODDBALL_TYPE, map);
5035 __ j(not_equal, &is_string, Label::kNear); 5033 __ j(not_equal, &is_string, Label::kNear);
5036 5034
5037 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex); 5035 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
5038 __ j(equal, not_found); 5036 __ j(equal, not_found);
5039 // Must be the hole (deleted entry). 5037 // Must be the hole (deleted entry).
5040 if (FLAG_debug_code) { 5038 if (FLAG_debug_code) {
5041 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 5039 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
5042 __ cmpq(kScratchRegister, candidate); 5040 __ cmpq(kScratchRegister, candidate);
5043 __ Assert(equal, "oddball in string table is not undefined or the hole"); 5041 __ Assert(equal, kOddballInStringTableIsNotUndefinedOrTheHole);
5044 } 5042 }
5045 __ jmp(&next_probe[i]); 5043 __ jmp(&next_probe[i]);
5046 5044
5047 __ bind(&is_string); 5045 __ bind(&is_string);
5048 5046
5049 // If length is not 2 the string is not a candidate. 5047 // If length is not 2 the string is not a candidate.
5050 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset), 5048 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
5051 Smi::FromInt(2)); 5049 Smi::FromInt(2));
5052 __ j(not_equal, &next_probe[i]); 5050 __ j(not_equal, &next_probe[i]);
5053 5051
(...skipping 1478 matching lines...) Expand 10 before | Expand all | Expand 10 after
6532 Label next; 6530 Label next;
6533 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 6531 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
6534 __ cmpl(rdx, Immediate(kind)); 6532 __ cmpl(rdx, Immediate(kind));
6535 __ j(not_equal, &next); 6533 __ j(not_equal, &next);
6536 T stub(kind); 6534 T stub(kind);
6537 __ TailCallStub(&stub); 6535 __ TailCallStub(&stub);
6538 __ bind(&next); 6536 __ bind(&next);
6539 } 6537 }
6540 6538
6541 // If we reached this point there is a problem. 6539 // If we reached this point there is a problem.
6542 __ Abort("Unexpected ElementsKind in array constructor"); 6540 __ Abort(kUnexpectedElementsKindInArrayConstructor);
6543 } 6541 }
6544 6542
6545 6543
6546 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) { 6544 static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
6547 // rbx - type info cell 6545 // rbx - type info cell
6548 // rdx - kind 6546 // rdx - kind
6549 // rax - number of arguments 6547 // rax - number of arguments
6550 // rdi - constructor? 6548 // rdi - constructor?
6551 // rsp[0] - return address 6549 // rsp[0] - return address
6552 // rsp[8] - last argument 6550 // rsp[8] - last argument
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
6595 Label next; 6593 Label next;
6596 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 6594 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
6597 __ cmpl(rdx, Immediate(kind)); 6595 __ cmpl(rdx, Immediate(kind));
6598 __ j(not_equal, &next); 6596 __ j(not_equal, &next);
6599 ArraySingleArgumentConstructorStub stub(kind); 6597 ArraySingleArgumentConstructorStub stub(kind);
6600 __ TailCallStub(&stub); 6598 __ TailCallStub(&stub);
6601 __ bind(&next); 6599 __ bind(&next);
6602 } 6600 }
6603 6601
6604 // If we reached this point there is a problem. 6602 // If we reached this point there is a problem.
6605 __ Abort("Unexpected ElementsKind in array constructor"); 6603 __ Abort(kUnexpectedElementsKindInArrayConstructor);
6606 } 6604 }
6607 6605
6608 6606
6609 template<class T> 6607 template<class T>
6610 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 6608 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
6611 int to_index = GetSequenceIndexFromFastElementsKind( 6609 int to_index = GetSequenceIndexFromFastElementsKind(
6612 TERMINAL_FAST_ELEMENTS_KIND); 6610 TERMINAL_FAST_ELEMENTS_KIND);
6613 for (int i = 0; i <= to_index; ++i) { 6611 for (int i = 0; i <= to_index; ++i) {
6614 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 6612 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
6615 T stub(kind); 6613 T stub(kind);
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
6661 6659
6662 if (FLAG_debug_code) { 6660 if (FLAG_debug_code) {
6663 // The array construct code is only set for the global and natives 6661 // The array construct code is only set for the global and natives
6664 // builtin Array functions which always have maps. 6662 // builtin Array functions which always have maps.
6665 6663
6666 // Initial map for the builtin Array function should be a map. 6664 // Initial map for the builtin Array function should be a map.
6667 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 6665 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
6668 // Will both indicate a NULL and a Smi. 6666 // Will both indicate a NULL and a Smi.
6669 STATIC_ASSERT(kSmiTag == 0); 6667 STATIC_ASSERT(kSmiTag == 0);
6670 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 6668 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
6671 __ Check(not_smi, "Unexpected initial map for Array function"); 6669 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
6672 __ CmpObjectType(rcx, MAP_TYPE, rcx); 6670 __ CmpObjectType(rcx, MAP_TYPE, rcx);
6673 __ Check(equal, "Unexpected initial map for Array function"); 6671 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
6674 6672
6675 // We should either have undefined in rbx or a valid cell 6673 // We should either have undefined in rbx or a valid cell
6676 Label okay_here; 6674 Label okay_here;
6677 Handle<Map> cell_map = masm->isolate()->factory()->cell_map(); 6675 Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
6678 __ Cmp(rbx, undefined_sentinel); 6676 __ Cmp(rbx, undefined_sentinel);
6679 __ j(equal, &okay_here); 6677 __ j(equal, &okay_here);
6680 __ Cmp(FieldOperand(rbx, 0), cell_map); 6678 __ Cmp(FieldOperand(rbx, 0), cell_map);
6681 __ Assert(equal, "Expected property cell in register rbx"); 6679 __ Assert(equal, kExpectedPropertyCellInRegisterRbx);
6682 __ bind(&okay_here); 6680 __ bind(&okay_here);
6683 } 6681 }
6684 6682
6685 Label no_info, switch_ready; 6683 Label no_info, switch_ready;
6686 // Get the elements kind and case on that. 6684 // Get the elements kind and case on that.
6687 __ Cmp(rbx, undefined_sentinel); 6685 __ Cmp(rbx, undefined_sentinel);
6688 __ j(equal, &no_info); 6686 __ j(equal, &no_info);
6689 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset)); 6687 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset));
6690 6688
6691 // The type cell may have undefined in its value. 6689 // The type cell may have undefined in its value.
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
6776 6774
6777 if (FLAG_debug_code) { 6775 if (FLAG_debug_code) {
6778 // The array construct code is only set for the global and natives 6776 // The array construct code is only set for the global and natives
6779 // builtin Array functions which always have maps. 6777 // builtin Array functions which always have maps.
6780 6778
6781 // Initial map for the builtin Array function should be a map. 6779 // Initial map for the builtin Array function should be a map.
6782 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 6780 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
6783 // Will both indicate a NULL and a Smi. 6781 // Will both indicate a NULL and a Smi.
6784 STATIC_ASSERT(kSmiTag == 0); 6782 STATIC_ASSERT(kSmiTag == 0);
6785 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 6783 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
6786 __ Check(not_smi, "Unexpected initial map for Array function"); 6784 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
6787 __ CmpObjectType(rcx, MAP_TYPE, rcx); 6785 __ CmpObjectType(rcx, MAP_TYPE, rcx);
6788 __ Check(equal, "Unexpected initial map for Array function"); 6786 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
6789 } 6787 }
6790 6788
6791 // Figure out the right elements kind 6789 // Figure out the right elements kind
6792 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 6790 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
6793 6791
6794 // Load the map's "bit field 2" into |result|. We only need the first byte, 6792 // Load the map's "bit field 2" into |result|. We only need the first byte,
6795 // but the following masking takes care of that anyway. 6793 // but the following masking takes care of that anyway.
6796 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset)); 6794 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset));
6797 // Retrieve elements_kind from bit field 2. 6795 // Retrieve elements_kind from bit field 2.
6798 __ and_(rcx, Immediate(Map::kElementsKindMask)); 6796 __ and_(rcx, Immediate(Map::kElementsKindMask));
6799 __ shr(rcx, Immediate(Map::kElementsKindShift)); 6797 __ shr(rcx, Immediate(Map::kElementsKindShift));
6800 6798
6801 if (FLAG_debug_code) { 6799 if (FLAG_debug_code) {
6802 Label done; 6800 Label done;
6803 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); 6801 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
6804 __ j(equal, &done); 6802 __ j(equal, &done);
6805 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); 6803 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
6806 __ Assert(equal, 6804 __ Assert(equal,
6807 "Invalid ElementsKind for InternalArray or InternalPackedArray"); 6805 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
6808 __ bind(&done); 6806 __ bind(&done);
6809 } 6807 }
6810 6808
6811 Label fast_elements_case; 6809 Label fast_elements_case;
6812 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); 6810 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
6813 __ j(equal, &fast_elements_case); 6811 __ j(equal, &fast_elements_case);
6814 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 6812 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
6815 6813
6816 __ bind(&fast_elements_case); 6814 __ bind(&fast_elements_case);
6817 GenerateCase(masm, FAST_ELEMENTS); 6815 GenerateCase(masm, FAST_ELEMENTS);
6818 } 6816 }
6819 6817
6820 6818
6821 #undef __ 6819 #undef __
6822 6820
6823 } } // namespace v8::internal 6821 } } // namespace v8::internal
6824 6822
6825 #endif // V8_TARGET_ARCH_X64 6823 #endif // V8_TARGET_ARCH_X64
OLDNEW
« src/compiler.cc ('K') | « src/x64/builtins-x64.cc ('k') | src/x64/codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698