OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 3168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3179 | 3179 |
3180 int remain = field_count % (DwVfpRegister::kSizeInBytes / kPointerSize); | 3180 int remain = field_count % (DwVfpRegister::kSizeInBytes / kPointerSize); |
3181 if (remain != 0) { | 3181 if (remain != 0) { |
3182 vldr(double_scratch.low(), | 3182 vldr(double_scratch.low(), |
3183 FieldMemOperand(src, (field_count - 1) * kPointerSize)); | 3183 FieldMemOperand(src, (field_count - 1) * kPointerSize)); |
3184 vstr(double_scratch.low(), | 3184 vstr(double_scratch.low(), |
3185 FieldMemOperand(dst, (field_count - 1) * kPointerSize)); | 3185 FieldMemOperand(dst, (field_count - 1) * kPointerSize)); |
3186 } | 3186 } |
3187 } | 3187 } |
3188 | 3188 |
3189 | |
3190 void MacroAssembler::CopyBytes(Register src, | 3189 void MacroAssembler::CopyBytes(Register src, |
3191 Register dst, | 3190 Register dst, |
3192 Register length, | 3191 Register length, |
3193 Register scratch) { | 3192 Register scratch) { |
3194 Label align_loop, align_loop_1, word_loop, byte_loop, byte_loop_1, done; | 3193 Label word_loop, byte_loop, byte_loop_1, done; |
3194 | |
Benedikt Meurer
2013/09/03 11:54:35
Please add ASSERTs that src, dst, length and scrat
| |
3195 cmp(length, Operand(8)); | |
3196 b(lt, &byte_loop); | |
3195 | 3197 |
3196 // Align src before copying in word size chunks. | 3198 // Align src before copying in word size chunks. |
3197 bind(&align_loop); | |
3198 cmp(length, Operand::Zero()); | |
3199 b(eq, &done); | |
3200 bind(&align_loop_1); | |
3201 tst(src, Operand(kPointerSize - 1)); | 3199 tst(src, Operand(kPointerSize - 1)); |
Rodolph Perfetta
2013/09/03 14:47:07
This code could be improve further, see the tail e
| |
3202 b(eq, &word_loop); | 3200 b(eq, &word_loop); |
3201 | |
3203 ldrb(scratch, MemOperand(src, 1, PostIndex)); | 3202 ldrb(scratch, MemOperand(src, 1, PostIndex)); |
3203 sub(length, length, Operand(1)); | |
3204 tst(src, Operand(kPointerSize - 1)); | |
3204 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3205 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3205 sub(length, length, Operand(1), SetCC); | 3206 b(eq, &word_loop); |
3206 b(ne, &byte_loop_1); | 3207 |
3208 ldrb(scratch, MemOperand(src, 1, PostIndex)); | |
3209 sub(length, length, Operand(1)); | |
3210 tst(src, Operand(kPointerSize - 1)); | |
3211 strb(scratch, MemOperand(dst, 1, PostIndex)); | |
3212 b(eq, &word_loop); | |
3213 | |
3214 ldrb(scratch, MemOperand(src, 1, PostIndex)); | |
3215 sub(length, length, Operand(1)); | |
3216 strb(scratch, MemOperand(dst, 1, PostIndex)); | |
3207 | 3217 |
3208 // Copy bytes in word size chunks. | 3218 // Copy bytes in word size chunks. |
3209 bind(&word_loop); | 3219 bind(&word_loop); |
3210 if (emit_debug_code()) { | 3220 |
3211 tst(src, Operand(kPointerSize - 1)); | |
3212 Assert(eq, kExpectingAlignmentForCopyBytes); | |
3213 } | |
3214 cmp(length, Operand(kPointerSize)); | |
3215 b(lt, &byte_loop); | |
3216 ldr(scratch, MemOperand(src, kPointerSize, PostIndex)); | |
3217 if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) { | 3221 if (CpuFeatures::IsSupported(UNALIGNED_ACCESSES)) { |
3222 ldr(scratch, MemOperand(src, kPointerSize, PostIndex)); | |
3223 sub(length, length, Operand(kPointerSize)); | |
3224 cmp(length, Operand(kPointerSize)); | |
3218 str(scratch, MemOperand(dst, kPointerSize, PostIndex)); | 3225 str(scratch, MemOperand(dst, kPointerSize, PostIndex)); |
3219 } else { | 3226 } else { |
3227 if (emit_debug_code()) { | |
3228 tst(src, Operand(kPointerSize - 1)); | |
3229 Assert(eq, kExpectingAlignmentForCopyBytes); | |
3230 } | |
3231 cmp(length, Operand(kPointerSize)); | |
3232 b(lt, &byte_loop); | |
3233 ldr(scratch, MemOperand(src, kPointerSize, PostIndex)); | |
3234 sub(length, length, Operand(kPointerSize)); | |
3220 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3235 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3221 mov(scratch, Operand(scratch, LSR, 8)); | 3236 mov(scratch, Operand(scratch, LSR, 8)); |
3222 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3237 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3223 mov(scratch, Operand(scratch, LSR, 8)); | 3238 mov(scratch, Operand(scratch, LSR, 8)); |
3224 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3239 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3225 mov(scratch, Operand(scratch, LSR, 8)); | 3240 mov(scratch, Operand(scratch, LSR, 8)); |
3226 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3241 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3227 } | 3242 } |
3228 sub(length, length, Operand(kPointerSize)); | 3243 |
3229 b(&word_loop); | 3244 b(ge, &word_loop); |
3230 | 3245 |
3231 // Copy the last bytes if any left. | 3246 // Copy the last bytes if any left. |
3232 bind(&byte_loop); | 3247 bind(&byte_loop); |
Rodolph Perfetta
2013/09/03 14:47:07
here too you could avoid the small loop.
| |
3233 cmp(length, Operand::Zero()); | 3248 cmp(length, Operand::Zero()); |
3234 b(eq, &done); | 3249 b(eq, &done); |
3235 bind(&byte_loop_1); | 3250 bind(&byte_loop_1); |
3236 ldrb(scratch, MemOperand(src, 1, PostIndex)); | 3251 ldrb(scratch, MemOperand(src, 1, PostIndex)); |
3252 sub(length, length, Operand(1), SetCC); | |
3237 strb(scratch, MemOperand(dst, 1, PostIndex)); | 3253 strb(scratch, MemOperand(dst, 1, PostIndex)); |
3238 sub(length, length, Operand(1), SetCC); | |
3239 b(ne, &byte_loop_1); | 3254 b(ne, &byte_loop_1); |
3240 bind(&done); | 3255 bind(&done); |
3241 } | 3256 } |
3242 | 3257 |
3243 | |
3244 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset, | 3258 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset, |
3245 Register end_offset, | 3259 Register end_offset, |
3246 Register filler) { | 3260 Register filler) { |
3247 Label loop, entry; | 3261 Label loop, entry; |
3248 b(&entry); | 3262 b(&entry); |
3249 bind(&loop); | 3263 bind(&loop); |
3250 str(filler, MemOperand(start_offset, kPointerSize, PostIndex)); | 3264 str(filler, MemOperand(start_offset, kPointerSize, PostIndex)); |
3251 bind(&entry); | 3265 bind(&entry); |
3252 cmp(start_offset, end_offset); | 3266 cmp(start_offset, end_offset); |
3253 b(lt, &loop); | 3267 b(lt, &loop); |
(...skipping 625 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3879 void CodePatcher::EmitCondition(Condition cond) { | 3893 void CodePatcher::EmitCondition(Condition cond) { |
3880 Instr instr = Assembler::instr_at(masm_.pc_); | 3894 Instr instr = Assembler::instr_at(masm_.pc_); |
3881 instr = (instr & ~kCondMask) | cond; | 3895 instr = (instr & ~kCondMask) | cond; |
3882 masm_.emit(instr); | 3896 masm_.emit(instr); |
3883 } | 3897 } |
3884 | 3898 |
3885 | 3899 |
3886 } } // namespace v8::internal | 3900 } } // namespace v8::internal |
3887 | 3901 |
3888 #endif // V8_TARGET_ARCH_ARM | 3902 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |