Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(357)

Side by Side Diff: src/x64/macro-assembler-x64.cc

Issue 8404030: Version 3.7.1 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 9 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 root_array_available_(true) { 48 root_array_available_(true) {
49 if (isolate() != NULL) { 49 if (isolate() != NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(), 50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
51 isolate()); 51 isolate());
52 } 52 }
53 } 53 }
54 54
55 55
56 static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) { 56 static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
57 Address roots_register_value = kRootRegisterBias + 57 Address roots_register_value = kRootRegisterBias +
58 reinterpret_cast<Address>(isolate->heap()->roots_address()); 58 reinterpret_cast<Address>(isolate->heap()->roots_array_start());
59 intptr_t delta = other.address() - roots_register_value; 59 intptr_t delta = other.address() - roots_register_value;
60 return delta; 60 return delta;
61 } 61 }
62 62
63 63
64 Operand MacroAssembler::ExternalOperand(ExternalReference target, 64 Operand MacroAssembler::ExternalOperand(ExternalReference target,
65 Register scratch) { 65 Register scratch) {
66 if (root_array_available_ && !Serializer::enabled()) { 66 if (root_array_available_ && !Serializer::enabled()) {
67 intptr_t delta = RootRegisterDelta(target, isolate()); 67 intptr_t delta = RootRegisterDelta(target, isolate());
68 if (is_int32(delta)) { 68 if (is_int32(delta)) {
(...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after
319 319
320 // Clobber clobbered input registers when running with the debug-code flag 320 // Clobber clobbered input registers when running with the debug-code flag
321 // turned on to provoke errors. 321 // turned on to provoke errors.
322 if (emit_debug_code()) { 322 if (emit_debug_code()) {
323 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 323 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
324 movq(dst, BitCast<int64_t>(kZapValue), RelocInfo::NONE); 324 movq(dst, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
325 } 325 }
326 } 326 }
327 327
328 328
329 void MacroAssembler::RecordWriteArray(Register object,
330 Register value,
331 Register index,
332 SaveFPRegsMode save_fp,
333 RememberedSetAction remembered_set_action,
334 SmiCheck smi_check) {
335 // First, check if a write barrier is even needed. The tests below
336 // catch stores of Smis.
337 Label done;
338
339 // Skip barrier if writing a smi.
340 if (smi_check == INLINE_SMI_CHECK) {
341 JumpIfSmi(value, &done);
342 }
343
344 // Array access: calculate the destination address. Index is not a smi.
345 Register dst = index;
346 lea(dst, Operand(object, index, times_pointer_size,
347 FixedArray::kHeaderSize - kHeapObjectTag));
348
349 RecordWrite(
350 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
351
352 bind(&done);
353
354 // Clobber clobbered input registers when running with the debug-code flag
355 // turned on to provoke errors.
356 if (emit_debug_code()) {
357 movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
358 movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
359 }
360 }
361
362
329 void MacroAssembler::RecordWrite(Register object, 363 void MacroAssembler::RecordWrite(Register object,
330 Register address, 364 Register address,
331 Register value, 365 Register value,
332 SaveFPRegsMode fp_mode, 366 SaveFPRegsMode fp_mode,
333 RememberedSetAction remembered_set_action, 367 RememberedSetAction remembered_set_action,
334 SmiCheck smi_check) { 368 SmiCheck smi_check) {
335 // The compiled code assumes that record write doesn't change the 369 // The compiled code assumes that record write doesn't change the
336 // context register, so we check that none of the clobbered 370 // context register, so we check that none of the clobbered
337 // registers are rsi. 371 // registers are rsi.
338 ASSERT(!value.is(rsi) && !address.is(rsi)); 372 ASSERT(!value.is(rsi) && !address.is(rsi));
(...skipping 1971 matching lines...) Expand 10 before | Expand all | Expand 10 after
2310 addq(rsp, Immediate(stack_elements * kPointerSize)); 2344 addq(rsp, Immediate(stack_elements * kPointerSize));
2311 } 2345 }
2312 } 2346 }
2313 2347
2314 2348
2315 void MacroAssembler::Test(const Operand& src, Smi* source) { 2349 void MacroAssembler::Test(const Operand& src, Smi* source) {
2316 testl(Operand(src, kIntSize), Immediate(source->value())); 2350 testl(Operand(src, kIntSize), Immediate(source->value()));
2317 } 2351 }
2318 2352
2319 2353
2354 void MacroAssembler::TestBit(const Operand& src, int bits) {
2355 int byte_offset = bits / kBitsPerByte;
2356 int bit_in_byte = bits & (kBitsPerByte - 1);
2357 testb(Operand(src, byte_offset), Immediate(1 << bit_in_byte));
2358 }
2359
2360
2320 void MacroAssembler::Jump(ExternalReference ext) { 2361 void MacroAssembler::Jump(ExternalReference ext) {
2321 LoadAddress(kScratchRegister, ext); 2362 LoadAddress(kScratchRegister, ext);
2322 jmp(kScratchRegister); 2363 jmp(kScratchRegister);
2323 } 2364 }
2324 2365
2325 2366
2326 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) { 2367 void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
2327 movq(kScratchRegister, destination, rmode); 2368 movq(kScratchRegister, destination, rmode);
2328 jmp(kScratchRegister); 2369 jmp(kScratchRegister);
2329 } 2370 }
(...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after
2676 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0); 2717 STATIC_ASSERT(FAST_SMI_ONLY_ELEMENTS == 0);
2677 cmpb(FieldOperand(map, Map::kBitField2Offset), 2718 cmpb(FieldOperand(map, Map::kBitField2Offset),
2678 Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue)); 2719 Immediate(Map::kMaximumBitField2FastSmiOnlyElementValue));
2679 j(above, fail, distance); 2720 j(above, fail, distance);
2680 } 2721 }
2681 2722
2682 2723
2683 void MacroAssembler::StoreNumberToDoubleElements( 2724 void MacroAssembler::StoreNumberToDoubleElements(
2684 Register maybe_number, 2725 Register maybe_number,
2685 Register elements, 2726 Register elements,
2686 Register key, 2727 Register index,
2687 XMMRegister xmm_scratch, 2728 XMMRegister xmm_scratch,
2688 Label* fail) { 2729 Label* fail) {
2689 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done; 2730 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
2690 2731
2691 JumpIfSmi(maybe_number, &smi_value, Label::kNear); 2732 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
2692 2733
2693 CheckMap(maybe_number, 2734 CheckMap(maybe_number,
2694 isolate()->factory()->heap_number_map(), 2735 isolate()->factory()->heap_number_map(),
2695 fail, 2736 fail,
2696 DONT_DO_SMI_CHECK); 2737 DONT_DO_SMI_CHECK);
2697 2738
2698 // Double value, canonicalize NaN. 2739 // Double value, canonicalize NaN.
2699 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32); 2740 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
2700 cmpl(FieldOperand(maybe_number, offset), 2741 cmpl(FieldOperand(maybe_number, offset),
2701 Immediate(kNaNOrInfinityLowerBoundUpper32)); 2742 Immediate(kNaNOrInfinityLowerBoundUpper32));
2702 j(greater_equal, &maybe_nan, Label::kNear); 2743 j(greater_equal, &maybe_nan, Label::kNear);
2703 2744
2704 bind(&not_nan); 2745 bind(&not_nan);
2705 movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset)); 2746 movsd(xmm_scratch, FieldOperand(maybe_number, HeapNumber::kValueOffset));
2706 bind(&have_double_value); 2747 bind(&have_double_value);
2707 movsd(FieldOperand(elements, key, times_8, FixedDoubleArray::kHeaderSize), 2748 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
2708 xmm_scratch); 2749 xmm_scratch);
2709 jmp(&done); 2750 jmp(&done);
2710 2751
2711 bind(&maybe_nan); 2752 bind(&maybe_nan);
2712 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise 2753 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
2713 // it's an Infinity, and the non-NaN code path applies. 2754 // it's an Infinity, and the non-NaN code path applies.
2714 j(greater, &is_nan, Label::kNear); 2755 j(greater, &is_nan, Label::kNear);
2715 cmpl(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0)); 2756 cmpl(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
2716 j(zero, &not_nan); 2757 j(zero, &not_nan);
2717 bind(&is_nan); 2758 bind(&is_nan);
2718 // Convert all NaNs to the same canonical NaN value when they are stored in 2759 // Convert all NaNs to the same canonical NaN value when they are stored in
2719 // the double array. 2760 // the double array.
2720 Set(kScratchRegister, BitCast<uint64_t>( 2761 Set(kScratchRegister, BitCast<uint64_t>(
2721 FixedDoubleArray::canonical_not_the_hole_nan_as_double())); 2762 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
2722 movq(xmm_scratch, kScratchRegister); 2763 movq(xmm_scratch, kScratchRegister);
2723 jmp(&have_double_value, Label::kNear); 2764 jmp(&have_double_value, Label::kNear);
2724 2765
2725 bind(&smi_value); 2766 bind(&smi_value);
2726 // Value is a smi. convert to a double and store. 2767 // Value is a smi. convert to a double and store.
2727 // Preserve original value. 2768 // Preserve original value.
2728 SmiToInteger32(kScratchRegister, maybe_number); 2769 SmiToInteger32(kScratchRegister, maybe_number);
2729 cvtlsi2sd(xmm_scratch, kScratchRegister); 2770 cvtlsi2sd(xmm_scratch, kScratchRegister);
2730 movsd(FieldOperand(elements, key, times_8, FixedDoubleArray::kHeaderSize), 2771 movsd(FieldOperand(elements, index, times_8, FixedDoubleArray::kHeaderSize),
2731 xmm_scratch); 2772 xmm_scratch);
2732 bind(&done); 2773 bind(&done);
2733 } 2774 }
2734 2775
2735 2776
2736 void MacroAssembler::CheckMap(Register obj, 2777 void MacroAssembler::CheckMap(Register obj,
2737 Handle<Map> map, 2778 Handle<Map> map,
2738 Label* fail, 2779 Label* fail,
2739 SmiCheckType smi_check_type) { 2780 SmiCheckType smi_check_type) {
2740 if (smi_check_type == DO_SMI_CHECK) { 2781 if (smi_check_type == DO_SMI_CHECK) {
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
2859 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset)); 2900 movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2860 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset)); 2901 movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
2861 STATIC_ASSERT(kNotStringTag != 0); 2902 STATIC_ASSERT(kNotStringTag != 0);
2862 testb(instance_type, Immediate(kIsNotStringMask)); 2903 testb(instance_type, Immediate(kIsNotStringMask));
2863 return zero; 2904 return zero;
2864 } 2905 }
2865 2906
2866 2907
2867 void MacroAssembler::TryGetFunctionPrototype(Register function, 2908 void MacroAssembler::TryGetFunctionPrototype(Register function,
2868 Register result, 2909 Register result,
2869 Label* miss) { 2910 Label* miss,
2911 bool miss_on_bound_function) {
2870 // Check that the receiver isn't a smi. 2912 // Check that the receiver isn't a smi.
2871 testl(function, Immediate(kSmiTagMask)); 2913 testl(function, Immediate(kSmiTagMask));
2872 j(zero, miss); 2914 j(zero, miss);
2873 2915
2874 // Check that the function really is a function. 2916 // Check that the function really is a function.
2875 CmpObjectType(function, JS_FUNCTION_TYPE, result); 2917 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2876 j(not_equal, miss); 2918 j(not_equal, miss);
2877 2919
2920 if (miss_on_bound_function) {
2921 movq(kScratchRegister,
2922 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2923 // It's not smi-tagged (stored in the top half of a smi-tagged 8-byte
2924 // field).
2925 TestBit(FieldOperand(kScratchRegister,
2926 SharedFunctionInfo::kCompilerHintsOffset),
2927 SharedFunctionInfo::kBoundFunction);
2928 j(not_zero, miss);
2929 }
2930
2878 // Make sure that the function has an instance prototype. 2931 // Make sure that the function has an instance prototype.
2879 Label non_instance; 2932 Label non_instance;
2880 testb(FieldOperand(result, Map::kBitFieldOffset), 2933 testb(FieldOperand(result, Map::kBitFieldOffset),
2881 Immediate(1 << Map::kHasNonInstancePrototype)); 2934 Immediate(1 << Map::kHasNonInstancePrototype));
2882 j(not_zero, &non_instance, Label::kNear); 2935 j(not_zero, &non_instance, Label::kNear);
2883 2936
2884 // Get the prototype or initial map from the function. 2937 // Get the prototype or initial map from the function.
2885 movq(result, 2938 movq(result,
2886 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 2939 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2887 2940
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
3060 3113
3061 3114
3062 void MacroAssembler::InvokeFunction(JSFunction* function, 3115 void MacroAssembler::InvokeFunction(JSFunction* function,
3063 const ParameterCount& actual, 3116 const ParameterCount& actual,
3064 InvokeFlag flag, 3117 InvokeFlag flag,
3065 const CallWrapper& call_wrapper, 3118 const CallWrapper& call_wrapper,
3066 CallKind call_kind) { 3119 CallKind call_kind) {
3067 // You can't call a function without a valid frame. 3120 // You can't call a function without a valid frame.
3068 ASSERT(flag == JUMP_FUNCTION || has_frame()); 3121 ASSERT(flag == JUMP_FUNCTION || has_frame());
3069 3122
3070 ASSERT(function->is_compiled());
3071 // Get the function and setup the context. 3123 // Get the function and setup the context.
3072 Move(rdi, Handle<JSFunction>(function)); 3124 Move(rdi, Handle<JSFunction>(function));
3073 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 3125 movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
3074 3126
3075 if (V8::UseCrankshaft()) { 3127 // We call indirectly through the code field in the function to
3076 // Since Crankshaft can recompile a function, we need to load 3128 // allow recompilation to take effect without changing any of the
3077 // the Code object every time we call the function. 3129 // call sites.
3078 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 3130 movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
3079 ParameterCount expected(function->shared()->formal_parameter_count()); 3131 ParameterCount expected(function->shared()->formal_parameter_count());
3080 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind); 3132 InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
3081 } else {
3082 // Invoke the cached code.
3083 Handle<Code> code(function->code());
3084 ParameterCount expected(function->shared()->formal_parameter_count());
3085 InvokeCode(code,
3086 expected,
3087 actual,
3088 RelocInfo::CODE_TARGET,
3089 flag,
3090 call_wrapper,
3091 call_kind);
3092 }
3093 } 3133 }
3094 3134
3095 3135
3096 void MacroAssembler::InvokePrologue(const ParameterCount& expected, 3136 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
3097 const ParameterCount& actual, 3137 const ParameterCount& actual,
3098 Handle<Code> code_constant, 3138 Handle<Code> code_constant,
3099 Register code_register, 3139 Register code_register,
3100 Label* done, 3140 Label* done,
3101 InvokeFlag flag, 3141 InvokeFlag flag,
3102 Label::Distance near_jump, 3142 Label::Distance near_jump,
(...skipping 1171 matching lines...) Expand 10 before | Expand all | Expand 10 after
4274 4314
4275 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask)); 4315 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
4276 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length); 4316 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
4277 4317
4278 bind(&done); 4318 bind(&done);
4279 } 4319 }
4280 4320
4281 } } // namespace v8::internal 4321 } } // namespace v8::internal
4282 4322
4283 #endif // V8_TARGET_ARCH_X64 4323 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/macro-assembler-x64.h ('k') | src/x64/regexp-macro-assembler-x64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698