OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1214 } | 1214 } |
1215 // The context may be an intermediate context, not a function context. | 1215 // The context may be an intermediate context, not a function context. |
1216 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); | 1216 ldr(dst, MemOperand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX))); |
1217 } else { // Slot is in the current function context. | 1217 } else { // Slot is in the current function context. |
1218 // The context may be an intermediate context, not a function context. | 1218 // The context may be an intermediate context, not a function context. |
1219 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::FCONTEXT_INDEX))); | 1219 ldr(dst, MemOperand(cp, Context::SlotOffset(Context::FCONTEXT_INDEX))); |
1220 } | 1220 } |
1221 } | 1221 } |
1222 | 1222 |
1223 | 1223 |
| 1224 void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings( |
| 1225 Register first, |
| 1226 Register second, |
| 1227 Register scratch1, |
| 1228 Register scratch2, |
| 1229 Label* failure) { |
| 1230 // Test that both first and second are sequential ASCII strings. |
| 1231 // Assume that they are non-smis. |
| 1232 ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset)); |
| 1233 ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset)); |
| 1234 ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); |
| 1235 ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset)); |
| 1236 int kFlatAsciiStringMask = |
| 1237 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask; |
| 1238 int kFlatAsciiStringTag = ASCII_STRING_TYPE; |
| 1239 and_(scratch1, scratch1, Operand(kFlatAsciiStringMask)); |
| 1240 and_(scratch2, scratch2, Operand(kFlatAsciiStringMask)); |
| 1241 cmp(scratch1, Operand(kFlatAsciiStringTag)); |
| 1242 // Ignore second test if first test failed. |
| 1243 cmp(scratch2, Operand(kFlatAsciiStringTag), eq); |
| 1244 b(ne, failure); |
| 1245 } |
| 1246 |
| 1247 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first, |
| 1248 Register second, |
| 1249 Register scratch1, |
| 1250 Register scratch2, |
| 1251 Label* failure) { |
| 1252 // Check that neither is a smi. |
| 1253 ASSERT_EQ(0, kSmiTag); |
| 1254 and_(scratch1, first, Operand(second)); |
| 1255 tst(scratch1, Operand(kSmiTagMask)); |
| 1256 b(eq, failure); |
| 1257 JumpIfNonSmisNotBothSequentialAsciiStrings(first, |
| 1258 second, |
| 1259 scratch1, |
| 1260 scratch2, |
| 1261 failure); |
| 1262 } |
| 1263 |
1224 | 1264 |
1225 #ifdef ENABLE_DEBUGGER_SUPPORT | 1265 #ifdef ENABLE_DEBUGGER_SUPPORT |
1226 CodePatcher::CodePatcher(byte* address, int instructions) | 1266 CodePatcher::CodePatcher(byte* address, int instructions) |
1227 : address_(address), | 1267 : address_(address), |
1228 instructions_(instructions), | 1268 instructions_(instructions), |
1229 size_(instructions * Assembler::kInstrSize), | 1269 size_(instructions * Assembler::kInstrSize), |
1230 masm_(address, size_ + Assembler::kGap) { | 1270 masm_(address, size_ + Assembler::kGap) { |
1231 // Create a new macro assembler pointing to the address of the code to patch. | 1271 // Create a new macro assembler pointing to the address of the code to patch. |
1232 // The size is adjusted with kGap on order for the assembler to generate size | 1272 // The size is adjusted with kGap on order for the assembler to generate size |
1233 // bytes of instructions without failing with buffer size constraints. | 1273 // bytes of instructions without failing with buffer size constraints. |
(...skipping 16 matching lines...) Expand all Loading... |
1250 } | 1290 } |
1251 | 1291 |
1252 | 1292 |
1253 void CodePatcher::Emit(Address addr) { | 1293 void CodePatcher::Emit(Address addr) { |
1254 masm()->emit(reinterpret_cast<Instr>(addr)); | 1294 masm()->emit(reinterpret_cast<Instr>(addr)); |
1255 } | 1295 } |
1256 #endif // ENABLE_DEBUGGER_SUPPORT | 1296 #endif // ENABLE_DEBUGGER_SUPPORT |
1257 | 1297 |
1258 | 1298 |
1259 } } // namespace v8::internal | 1299 } } // namespace v8::internal |
OLD | NEW |