Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(245)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 6676025: X64: Load more values from the root array, rather than from immediate 64-bit ... (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 28 matching lines...) Expand all
39 #define __ ACCESS_MASM(masm) 39 #define __ ACCESS_MASM(masm)
40 40
41 void ToNumberStub::Generate(MacroAssembler* masm) { 41 void ToNumberStub::Generate(MacroAssembler* masm) {
42 // The ToNumber stub takes one argument in eax. 42 // The ToNumber stub takes one argument in eax.
43 NearLabel check_heap_number, call_builtin; 43 NearLabel check_heap_number, call_builtin;
44 __ SmiTest(rax); 44 __ SmiTest(rax);
45 __ j(not_zero, &check_heap_number); 45 __ j(not_zero, &check_heap_number);
46 __ Ret(); 46 __ Ret();
47 47
48 __ bind(&check_heap_number); 48 __ bind(&check_heap_number);
49 __ Move(rbx, Factory::heap_number_map()); 49 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
50 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 50 Heap::kHeapNumberMapRootIndex);
51 __ j(not_equal, &call_builtin); 51 __ j(not_equal, &call_builtin);
52 __ Ret(); 52 __ Ret();
53 53
54 __ bind(&call_builtin); 54 __ bind(&call_builtin);
55 __ pop(rcx); // Pop return address. 55 __ pop(rcx); // Pop return address.
56 __ push(rax); 56 __ push(rax);
57 __ push(rcx); // Push return address. 57 __ push(rcx); // Push return address.
58 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); 58 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
59 } 59 }
60 60
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
97 97
98 // Return and remove the on-stack parameter. 98 // Return and remove the on-stack parameter.
99 __ ret(1 * kPointerSize); 99 __ ret(1 * kPointerSize);
100 100
101 // Create a new closure through the slower runtime call. 101 // Create a new closure through the slower runtime call.
102 __ bind(&gc); 102 __ bind(&gc);
103 __ pop(rcx); // Temporarily remove return address. 103 __ pop(rcx); // Temporarily remove return address.
104 __ pop(rdx); 104 __ pop(rdx);
105 __ push(rsi); 105 __ push(rsi);
106 __ push(rdx); 106 __ push(rdx);
107 __ Push(Factory::false_value()); 107 __ PushRoot(Heap::kFalseValueRootIndex);
108 __ push(rcx); // Restore return address. 108 __ push(rcx); // Restore return address.
109 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); 109 __ TailCallRuntime(Runtime::kNewClosure, 3, 1);
110 } 110 }
111 111
112 112
113 void FastNewContextStub::Generate(MacroAssembler* masm) { 113 void FastNewContextStub::Generate(MacroAssembler* masm) {
114 // Try to allocate the context in new space. 114 // Try to allocate the context in new space.
115 Label gc; 115 Label gc;
116 int length = slots_ + Context::MIN_CONTEXT_SLOTS; 116 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
117 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, 117 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
(...skipping 2368 matching lines...) Expand 10 before | Expand all | Expand 10 after
2486 // rax: RegExp data (FixedArray) 2486 // rax: RegExp data (FixedArray)
2487 // rdx: Number of capture registers 2487 // rdx: Number of capture registers
2488 // Check that the fourth object is a JSArray object. 2488 // Check that the fourth object is a JSArray object.
2489 __ movq(rdi, Operand(rsp, kLastMatchInfoOffset)); 2489 __ movq(rdi, Operand(rsp, kLastMatchInfoOffset));
2490 __ JumpIfSmi(rdi, &runtime); 2490 __ JumpIfSmi(rdi, &runtime);
2491 __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister); 2491 __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister);
2492 __ j(not_equal, &runtime); 2492 __ j(not_equal, &runtime);
2493 // Check that the JSArray is in fast case. 2493 // Check that the JSArray is in fast case.
2494 __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset)); 2494 __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset));
2495 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); 2495 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
2496 __ Cmp(rdi, Factory::fixed_array_map()); 2496 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
2497 Heap::kFixedArrayMapRootIndex);
2497 __ j(not_equal, &runtime); 2498 __ j(not_equal, &runtime);
2498 // Check that the last match info has space for the capture registers and the 2499 // Check that the last match info has space for the capture registers and the
2499 // additional information. Ensure no overflow in add. 2500 // additional information. Ensure no overflow in add.
2500 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); 2501 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
2501 __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset)); 2502 __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset));
2502 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); 2503 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead));
2503 __ cmpl(rdx, rdi); 2504 __ cmpl(rdx, rdi);
2504 __ j(greater, &runtime); 2505 __ j(greater, &runtime);
2505 2506
2506 // rax: RegExp data (FixedArray) 2507 // rax: RegExp data (FixedArray)
(...skipping 14 matching lines...) Expand all
2521 // Check for flat cons string. 2522 // Check for flat cons string.
2522 // A flat cons string is a cons string where the second part is the empty 2523 // A flat cons string is a cons string where the second part is the empty
2523 // string. In that case the subject string is just the first part of the cons 2524 // string. In that case the subject string is just the first part of the cons
2524 // string. Also in this case the first part of the cons string is known to be 2525 // string. Also in this case the first part of the cons string is known to be
2525 // a sequential string or an external string. 2526 // a sequential string or an external string.
2526 STATIC_ASSERT(kExternalStringTag !=0); 2527 STATIC_ASSERT(kExternalStringTag !=0);
2527 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0); 2528 STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
2528 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag)); 2529 __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
2529 __ j(not_zero, &runtime); 2530 __ j(not_zero, &runtime);
2530 // String is a cons string. 2531 // String is a cons string.
2531 __ movq(rdx, FieldOperand(rdi, ConsString::kSecondOffset)); 2532 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
2532 __ Cmp(rdx, Factory::empty_string()); 2533 Heap::kEmptyStringRootIndex);
2533 __ j(not_equal, &runtime); 2534 __ j(not_equal, &runtime);
2534 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); 2535 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
2535 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 2536 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
2536 // String is a cons string with empty second part. 2537 // String is a cons string with empty second part.
2537 // rdi: first part of cons string. 2538 // rdi: first part of cons string.
2538 // rbx: map of first part of cons string. 2539 // rbx: map of first part of cons string.
2539 // Is first part a flat two byte string? 2540 // Is first part a flat two byte string?
2540 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), 2541 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
2541 Immediate(kStringRepresentationMask | kStringEncodingMask)); 2542 Immediate(kStringRepresentationMask | kStringEncodingMask));
2542 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0); 2543 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
(...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after
2786 // rbx: Number of array elements as int32. 2787 // rbx: Number of array elements as int32.
2787 // r8: Number of array elements as smi. 2788 // r8: Number of array elements as smi.
2788 2789
2789 // Set JSArray map to global.regexp_result_map(). 2790 // Set JSArray map to global.regexp_result_map().
2790 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX)); 2791 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX));
2791 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset)); 2792 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2792 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); 2793 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
2793 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx); 2794 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
2794 2795
2795 // Set empty properties FixedArray. 2796 // Set empty properties FixedArray.
2796 __ Move(FieldOperand(rax, JSObject::kPropertiesOffset), 2797 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
2797 Factory::empty_fixed_array()); 2798 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
2798 2799
2799 // Set elements to point to FixedArray allocated right after the JSArray. 2800 // Set elements to point to FixedArray allocated right after the JSArray.
2800 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); 2801 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
2801 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); 2802 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
2802 2803
2803 // Set input, index and length fields from arguments. 2804 // Set input, index and length fields from arguments.
2804 __ movq(r8, Operand(rsp, kPointerSize * 1)); 2805 __ movq(r8, Operand(rsp, kPointerSize * 1));
2805 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); 2806 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
2806 __ movq(r8, Operand(rsp, kPointerSize * 2)); 2807 __ movq(r8, Operand(rsp, kPointerSize * 2));
2807 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); 2808 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
2808 __ movq(r8, Operand(rsp, kPointerSize * 3)); 2809 __ movq(r8, Operand(rsp, kPointerSize * 3));
2809 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); 2810 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
2810 2811
2811 // Fill out the elements FixedArray. 2812 // Fill out the elements FixedArray.
2812 // rax: JSArray. 2813 // rax: JSArray.
2813 // rcx: FixedArray. 2814 // rcx: FixedArray.
2814 // rbx: Number of elements in array as int32. 2815 // rbx: Number of elements in array as int32.
2815 2816
2816 // Set map. 2817 // Set map.
2817 __ Move(FieldOperand(rcx, HeapObject::kMapOffset), 2818 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2818 Factory::fixed_array_map()); 2819 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
2819 // Set length. 2820 // Set length.
2820 __ Integer32ToSmi(rdx, rbx); 2821 __ Integer32ToSmi(rdx, rbx);
2821 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx); 2822 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
2822 // Fill contents of fixed-array with the-hole. 2823 // Fill contents of fixed-array with the-hole.
2823 __ Move(rdx, Factory::the_hole_value()); 2824 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2824 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize)); 2825 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
2825 // Fill fixed array elements with hole. 2826 // Fill fixed array elements with hole.
2826 // rax: JSArray. 2827 // rax: JSArray.
2827 // rbx: Number of elements in array that remains to be filled, as int32. 2828 // rbx: Number of elements in array that remains to be filled, as int32.
2828 // rcx: Start of elements in FixedArray. 2829 // rcx: Start of elements in FixedArray.
2829 // rdx: the hole. 2830 // rdx: the hole.
2830 Label loop; 2831 Label loop;
2831 __ testl(rbx, rbx); 2832 __ testl(rbx, rbx);
2832 __ bind(&loop); 2833 __ bind(&loop);
2833 __ j(less_equal, &done); // Jump if rcx is negative or zero. 2834 __ j(less_equal, &done); // Jump if rcx is negative or zero.
(...skipping 2104 matching lines...) Expand 10 before | Expand all | Expand 10 after
4938 &need_conversion, 4939 &need_conversion,
4939 &need_conversion, 4940 &need_conversion,
4940 &index_out_of_range, 4941 &index_out_of_range,
4941 STRING_INDEX_IS_NUMBER); 4942 STRING_INDEX_IS_NUMBER);
4942 generator.GenerateFast(masm); 4943 generator.GenerateFast(masm);
4943 __ jmp(&done); 4944 __ jmp(&done);
4944 4945
4945 __ bind(&index_out_of_range); 4946 __ bind(&index_out_of_range);
4946 // When the index is out of range, the spec requires us to return 4947 // When the index is out of range, the spec requires us to return
4947 // the empty string. 4948 // the empty string.
4948 __ Move(result, Factory::empty_string()); 4949 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
4949 __ jmp(&done); 4950 __ jmp(&done);
4950 4951
4951 __ bind(&need_conversion); 4952 __ bind(&need_conversion);
4952 // Move smi zero into the result register, which will trigger 4953 // Move smi zero into the result register, which will trigger
4953 // conversion. 4954 // conversion.
4954 __ Move(result, Smi::FromInt(0)); 4955 __ Move(result, Smi::FromInt(0));
4955 __ jmp(&done); 4956 __ jmp(&done);
4956 4957
4957 StubRuntimeCallHelper call_helper; 4958 StubRuntimeCallHelper call_helper;
4958 generator.GenerateSlow(masm, call_helper); 4959 generator.GenerateSlow(masm, call_helper);
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
5077 // Do a tail call to the rewritten stub. 5078 // Do a tail call to the rewritten stub.
5078 __ jmp(rdi); 5079 __ jmp(rdi);
5079 } 5080 }
5080 5081
5081 5082
5082 #undef __ 5083 #undef __
5083 5084
5084 } } // namespace v8::internal 5085 } } // namespace v8::internal
5085 5086
5086 #endif // V8_TARGET_ARCH_X64 5087 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « no previous file | src/x64/lithium-codegen-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698