Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(461)

Side by Side Diff: src/arm/macro-assembler-arm.cc

Issue 146213004: A64: Synchronize with r16849. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/arm/simulator-arm.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 715 matching lines...) Expand 10 before | Expand all | Expand 10 after
726 // If needed, restore wanted bits of FPSCR. 726 // If needed, restore wanted bits of FPSCR.
727 Label fpscr_done; 727 Label fpscr_done;
728 vmrs(scratch); 728 vmrs(scratch);
729 tst(scratch, Operand(kVFPDefaultNaNModeControlBit)); 729 tst(scratch, Operand(kVFPDefaultNaNModeControlBit));
730 b(ne, &fpscr_done); 730 b(ne, &fpscr_done);
731 orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit)); 731 orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit));
732 vmsr(scratch); 732 vmsr(scratch);
733 bind(&fpscr_done); 733 bind(&fpscr_done);
734 } 734 }
735 735
736 void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister value, 736
737 void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister dst,
738 const DwVfpRegister src,
737 const Condition cond) { 739 const Condition cond) {
738 vsub(value, value, kDoubleRegZero, cond); 740 vsub(dst, src, kDoubleRegZero, cond);
739 } 741 }
740 742
741 743
742 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, 744 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
743 const DwVfpRegister src2, 745 const DwVfpRegister src2,
744 const Condition cond) { 746 const Condition cond) {
745 // Compare and move FPSCR flags to the normal condition flags. 747 // Compare and move FPSCR flags to the normal condition flags.
746 VFPCompareAndLoadFlags(src1, src2, pc, cond); 748 VFPCompareAndLoadFlags(src1, src2, pc, cond);
747 } 749 }
748 750
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after
1013 // If we are using the simulator then we should always align to the expected 1015 // If we are using the simulator then we should always align to the expected
1014 // alignment. As the simulator is used to generate snapshots we do not know 1016 // alignment. As the simulator is used to generate snapshots we do not know
1015 // if the target platform will need alignment, so this is controlled from a 1017 // if the target platform will need alignment, so this is controlled from a
1016 // flag. 1018 // flag.
1017 return FLAG_sim_stack_alignment; 1019 return FLAG_sim_stack_alignment;
1018 #endif // V8_HOST_ARCH_ARM 1020 #endif // V8_HOST_ARCH_ARM
1019 } 1021 }
1020 1022
1021 1023
1022 void MacroAssembler::LeaveExitFrame(bool save_doubles, 1024 void MacroAssembler::LeaveExitFrame(bool save_doubles,
1023 Register argument_count) { 1025 Register argument_count,
1026 bool restore_context) {
1024 // Optionally restore all double registers. 1027 // Optionally restore all double registers.
1025 if (save_doubles) { 1028 if (save_doubles) {
1026 // Calculate the stack location of the saved doubles and restore them. 1029 // Calculate the stack location of the saved doubles and restore them.
1027 const int offset = 2 * kPointerSize; 1030 const int offset = 2 * kPointerSize;
1028 sub(r3, fp, 1031 sub(r3, fp,
1029 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize)); 1032 Operand(offset + DwVfpRegister::kMaxNumRegisters * kDoubleSize));
1030 RestoreFPRegs(r3, ip); 1033 RestoreFPRegs(r3, ip);
1031 } 1034 }
1032 1035
1033 // Clear top frame. 1036 // Clear top frame.
1034 mov(r3, Operand::Zero()); 1037 mov(r3, Operand::Zero());
1035 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); 1038 mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
1036 str(r3, MemOperand(ip)); 1039 str(r3, MemOperand(ip));
1037 1040
1041
1038 // Restore current context from top and clear it in debug mode. 1042 // Restore current context from top and clear it in debug mode.
1043 if (restore_context) {
1044 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1045 ldr(cp, MemOperand(ip));
1046 }
1047 #ifdef DEBUG
1039 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate()))); 1048 mov(ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1040 ldr(cp, MemOperand(ip));
1041 #ifdef DEBUG
1042 str(r3, MemOperand(ip)); 1049 str(r3, MemOperand(ip));
1043 #endif 1050 #endif
1044 1051
1045 // Tear down the exit frame, pop the arguments, and return. 1052 // Tear down the exit frame, pop the arguments, and return.
1046 mov(sp, Operand(fp)); 1053 mov(sp, Operand(fp));
1047 ldm(ia_w, sp, fp.bit() | lr.bit()); 1054 ldm(ia_w, sp, fp.bit() | lr.bit());
1048 if (argument_count.is_valid()) { 1055 if (argument_count.is_valid()) {
1049 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2)); 1056 add(sp, sp, Operand(argument_count, LSL, kPointerSizeLog2));
1050 } 1057 }
1051 } 1058 }
(...skipping 1221 matching lines...) Expand 10 before | Expand all | Expand 10 after
2273 stub->CompilingCallsToThisStubIsGCSafe(isolate())); 2280 stub->CompilingCallsToThisStubIsGCSafe(isolate()));
2274 Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, cond); 2281 Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, cond);
2275 } 2282 }
2276 2283
2277 2284
2278 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { 2285 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
2279 return ref0.address() - ref1.address(); 2286 return ref0.address() - ref1.address();
2280 } 2287 }
2281 2288
2282 2289
2283 void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function, 2290 void MacroAssembler::CallApiFunctionAndReturn(
2284 Address function_address, 2291 ExternalReference function,
2285 ExternalReference thunk_ref, 2292 Address function_address,
2286 Register thunk_last_arg, 2293 ExternalReference thunk_ref,
2287 int stack_space, 2294 Register thunk_last_arg,
2288 int return_value_offset) { 2295 int stack_space,
2296 MemOperand return_value_operand,
2297 MemOperand* context_restore_operand) {
2289 ExternalReference next_address = 2298 ExternalReference next_address =
2290 ExternalReference::handle_scope_next_address(isolate()); 2299 ExternalReference::handle_scope_next_address(isolate());
2291 const int kNextOffset = 0; 2300 const int kNextOffset = 0;
2292 const int kLimitOffset = AddressOffset( 2301 const int kLimitOffset = AddressOffset(
2293 ExternalReference::handle_scope_limit_address(isolate()), 2302 ExternalReference::handle_scope_limit_address(isolate()),
2294 next_address); 2303 next_address);
2295 const int kLevelOffset = AddressOffset( 2304 const int kLevelOffset = AddressOffset(
2296 ExternalReference::handle_scope_level_address(isolate()), 2305 ExternalReference::handle_scope_level_address(isolate()),
2297 next_address); 2306 next_address);
2298 2307
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
2342 if (FLAG_log_timer_events) { 2351 if (FLAG_log_timer_events) {
2343 FrameScope frame(this, StackFrame::MANUAL); 2352 FrameScope frame(this, StackFrame::MANUAL);
2344 PushSafepointRegisters(); 2353 PushSafepointRegisters();
2345 PrepareCallCFunction(1, r0); 2354 PrepareCallCFunction(1, r0);
2346 mov(r0, Operand(ExternalReference::isolate_address(isolate()))); 2355 mov(r0, Operand(ExternalReference::isolate_address(isolate())));
2347 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1); 2356 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2348 PopSafepointRegisters(); 2357 PopSafepointRegisters();
2349 } 2358 }
2350 2359
2351 Label promote_scheduled_exception; 2360 Label promote_scheduled_exception;
2361 Label exception_handled;
2352 Label delete_allocated_handles; 2362 Label delete_allocated_handles;
2353 Label leave_exit_frame; 2363 Label leave_exit_frame;
2354 Label return_value_loaded; 2364 Label return_value_loaded;
2355 2365
2356 // load value from ReturnValue 2366 // load value from ReturnValue
2357 ldr(r0, MemOperand(fp, return_value_offset*kPointerSize)); 2367 ldr(r0, return_value_operand);
2358 bind(&return_value_loaded); 2368 bind(&return_value_loaded);
2359 // No more valid handles (the result handle was the last one). Restore 2369 // No more valid handles (the result handle was the last one). Restore
2360 // previous handle scope. 2370 // previous handle scope.
2361 str(r4, MemOperand(r7, kNextOffset)); 2371 str(r4, MemOperand(r7, kNextOffset));
2362 if (emit_debug_code()) { 2372 if (emit_debug_code()) {
2363 ldr(r1, MemOperand(r7, kLevelOffset)); 2373 ldr(r1, MemOperand(r7, kLevelOffset));
2364 cmp(r1, r6); 2374 cmp(r1, r6);
2365 Check(eq, kUnexpectedLevelAfterReturnFromApiCall); 2375 Check(eq, kUnexpectedLevelAfterReturnFromApiCall);
2366 } 2376 }
2367 sub(r6, r6, Operand(1)); 2377 sub(r6, r6, Operand(1));
2368 str(r6, MemOperand(r7, kLevelOffset)); 2378 str(r6, MemOperand(r7, kLevelOffset));
2369 ldr(ip, MemOperand(r7, kLimitOffset)); 2379 ldr(ip, MemOperand(r7, kLimitOffset));
2370 cmp(r5, ip); 2380 cmp(r5, ip);
2371 b(ne, &delete_allocated_handles); 2381 b(ne, &delete_allocated_handles);
2372 2382
2373 // Check if the function scheduled an exception. 2383 // Check if the function scheduled an exception.
2374 bind(&leave_exit_frame); 2384 bind(&leave_exit_frame);
2375 LoadRoot(r4, Heap::kTheHoleValueRootIndex); 2385 LoadRoot(r4, Heap::kTheHoleValueRootIndex);
2376 mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate()))); 2386 mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate())));
2377 ldr(r5, MemOperand(ip)); 2387 ldr(r5, MemOperand(ip));
2378 cmp(r4, r5); 2388 cmp(r4, r5);
2379 b(ne, &promote_scheduled_exception); 2389 b(ne, &promote_scheduled_exception);
2390 bind(&exception_handled);
2380 2391
2392 bool restore_context = context_restore_operand != NULL;
2393 if (restore_context) {
2394 ldr(cp, *context_restore_operand);
2395 }
2381 // LeaveExitFrame expects unwind space to be in a register. 2396 // LeaveExitFrame expects unwind space to be in a register.
2382 mov(r4, Operand(stack_space)); 2397 mov(r4, Operand(stack_space));
2383 LeaveExitFrame(false, r4); 2398 LeaveExitFrame(false, r4, !restore_context);
2384 mov(pc, lr); 2399 mov(pc, lr);
2385 2400
2386 bind(&promote_scheduled_exception); 2401 bind(&promote_scheduled_exception);
2387 TailCallExternalReference( 2402 {
2388 ExternalReference(Runtime::kPromoteScheduledException, isolate()), 2403 FrameScope frame(this, StackFrame::INTERNAL);
2389 0, 2404 CallExternalReference(
2390 1); 2405 ExternalReference(Runtime::kPromoteScheduledException, isolate()),
2406 0);
2407 }
2408 jmp(&exception_handled);
2391 2409
2392 // HandleScope limit has changed. Delete allocated extensions. 2410 // HandleScope limit has changed. Delete allocated extensions.
2393 bind(&delete_allocated_handles); 2411 bind(&delete_allocated_handles);
2394 str(r5, MemOperand(r7, kLimitOffset)); 2412 str(r5, MemOperand(r7, kLimitOffset));
2395 mov(r4, r0); 2413 mov(r4, r0);
2396 PrepareCallCFunction(1, r5); 2414 PrepareCallCFunction(1, r5);
2397 mov(r0, Operand(ExternalReference::isolate_address(isolate()))); 2415 mov(r0, Operand(ExternalReference::isolate_address(isolate())));
2398 CallCFunction( 2416 CallCFunction(
2399 ExternalReference::delete_handle_scope_extensions(isolate()), 1); 2417 ExternalReference::delete_handle_scope_extensions(isolate()), 1);
2400 mov(r0, r4); 2418 mov(r0, r4);
(...skipping 671 matching lines...) Expand 10 before | Expand all | Expand 10 after
3072 Register heap_number_map, 3090 Register heap_number_map,
3073 Register scratch, 3091 Register scratch,
3074 Label* on_not_heap_number) { 3092 Label* on_not_heap_number) {
3075 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset)); 3093 ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
3076 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); 3094 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
3077 cmp(scratch, heap_number_map); 3095 cmp(scratch, heap_number_map);
3078 b(ne, on_not_heap_number); 3096 b(ne, on_not_heap_number);
3079 } 3097 }
3080 3098
3081 3099
3100 void MacroAssembler::LookupNumberStringCache(Register object,
3101 Register result,
3102 Register scratch1,
3103 Register scratch2,
3104 Register scratch3,
3105 Label* not_found) {
3106 // Use of registers. Register result is used as a temporary.
3107 Register number_string_cache = result;
3108 Register mask = scratch3;
3109
3110 // Load the number string cache.
3111 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
3112
3113 // Make the hash mask from the length of the number string cache. It
3114 // contains two elements (number and string) for each cache entry.
3115 ldr(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset));
3116 // Divide length by two (length is a smi).
3117 mov(mask, Operand(mask, ASR, kSmiTagSize + 1));
3118 sub(mask, mask, Operand(1)); // Make mask.
3119
3120 // Calculate the entry in the number string cache. The hash value in the
3121 // number string cache for smis is just the smi value, and the hash for
3122 // doubles is the xor of the upper and lower words. See
3123 // Heap::GetNumberStringCache.
3124 Label is_smi;
3125 Label load_result_from_cache;
3126 JumpIfSmi(object, &is_smi);
3127 CheckMap(object,
3128 scratch1,
3129 Heap::kHeapNumberMapRootIndex,
3130 not_found,
3131 DONT_DO_SMI_CHECK);
3132
3133 STATIC_ASSERT(8 == kDoubleSize);
3134 add(scratch1,
3135 object,
3136 Operand(HeapNumber::kValueOffset - kHeapObjectTag));
3137 ldm(ia, scratch1, scratch1.bit() | scratch2.bit());
3138 eor(scratch1, scratch1, Operand(scratch2));
3139 and_(scratch1, scratch1, Operand(mask));
3140
3141 // Calculate address of entry in string cache: each entry consists
3142 // of two pointer sized fields.
3143 add(scratch1,
3144 number_string_cache,
3145 Operand(scratch1, LSL, kPointerSizeLog2 + 1));
3146
3147 Register probe = mask;
3148 ldr(probe, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
3149 JumpIfSmi(probe, not_found);
3150 sub(scratch2, object, Operand(kHeapObjectTag));
3151 vldr(d0, scratch2, HeapNumber::kValueOffset);
3152 sub(probe, probe, Operand(kHeapObjectTag));
3153 vldr(d1, probe, HeapNumber::kValueOffset);
3154 VFPCompareAndSetFlags(d0, d1);
3155 b(ne, not_found); // The cache did not contain this value.
3156 b(&load_result_from_cache);
3157
3158 bind(&is_smi);
3159 Register scratch = scratch1;
3160 and_(scratch, mask, Operand(object, ASR, 1));
3161 // Calculate address of entry in string cache: each entry consists
3162 // of two pointer sized fields.
3163 add(scratch,
3164 number_string_cache,
3165 Operand(scratch, LSL, kPointerSizeLog2 + 1));
3166
3167 // Check if the entry is the smi we are looking for.
3168 ldr(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize));
3169 cmp(object, probe);
3170 b(ne, not_found);
3171
3172 // Get the result from the cache.
3173 bind(&load_result_from_cache);
3174 ldr(result, FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize));
3175 IncrementCounter(isolate()->counters()->number_to_string_native(),
3176 1,
3177 scratch1,
3178 scratch2);
3179 }
3180
3181
3082 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings( 3182 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
3083 Register first, 3183 Register first,
3084 Register second, 3184 Register second,
3085 Register scratch1, 3185 Register scratch1,
3086 Register scratch2, 3186 Register scratch2,
3087 Label* failure) { 3187 Label* failure) {
3088 // Test that both first and second are sequential ASCII strings. 3188 // Test that both first and second are sequential ASCII strings.
3089 // Assume that they are non-smis. 3189 // Assume that they are non-smis.
3090 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset)); 3190 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
3091 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset)); 3191 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
(...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after
3884 void CodePatcher::EmitCondition(Condition cond) { 3984 void CodePatcher::EmitCondition(Condition cond) {
3885 Instr instr = Assembler::instr_at(masm_.pc_); 3985 Instr instr = Assembler::instr_at(masm_.pc_);
3886 instr = (instr & ~kCondMask) | cond; 3986 instr = (instr & ~kCondMask) | cond;
3887 masm_.emit(instr); 3987 masm_.emit(instr);
3888 } 3988 }
3889 3989
3890 3990
3891 } } // namespace v8::internal 3991 } } // namespace v8::internal
3892 3992
3893 #endif // V8_TARGET_ARCH_ARM 3993 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/arm/simulator-arm.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698