OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1181 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize)); | 1181 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize)); |
1182 j(not_zero, miss); | 1182 j(not_zero, miss); |
1183 | 1183 |
1184 // Get the value at the masked, scaled index. | 1184 // Get the value at the masked, scaled index. |
1185 const int kValueOffset = | 1185 const int kValueOffset = |
1186 SeededNumberDictionary::kElementsStartOffset + kPointerSize; | 1186 SeededNumberDictionary::kElementsStartOffset + kPointerSize; |
1187 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); | 1187 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset)); |
1188 } | 1188 } |
1189 | 1189 |
1190 | 1190 |
1191 ExternalReference MacroAssembler::GetTopAddress(AllocationTarget target) { | |
1192 if (target == NEW_SPACE) { | |
1193 return ExternalReference::new_space_allocation_top_address(isolate()); | |
1194 } else { | |
1195 return ExternalReference::old_pointer_space_allocation_top_address( | |
1196 isolate()); | |
1197 } | |
1198 } | |
1199 | |
1200 | |
1201 ExternalReference MacroAssembler::GetLimitAddress(AllocationTarget target) { | |
1202 if (target == NEW_SPACE) { | |
1203 return ExternalReference::new_space_allocation_limit_address(isolate()); | |
1204 } else { | |
1205 return ExternalReference::old_pointer_space_allocation_limit_address( | |
1206 isolate()); | |
1207 } | |
1208 } | |
1209 | |
1210 | |
1191 void MacroAssembler::LoadAllocationTopHelper(Register result, | 1211 void MacroAssembler::LoadAllocationTopHelper(Register result, |
1192 Register scratch, | 1212 Register scratch, |
1193 AllocationFlags flags) { | 1213 AllocationFlags flags, |
1194 ExternalReference new_space_allocation_top = | 1214 AllocationTarget target) { |
1195 ExternalReference::new_space_allocation_top_address(isolate()); | 1215 ExternalReference allocation_top = GetTopAddress(target); |
1196 | 1216 |
1197 // Just return if allocation top is already known. | 1217 // Just return if allocation top is already known. |
1198 if ((flags & RESULT_CONTAINS_TOP) != 0) { | 1218 if ((flags & RESULT_CONTAINS_TOP) != 0) { |
1199 // No use of scratch if allocation top is provided. | 1219 // No use of scratch if allocation top is provided. |
1200 ASSERT(scratch.is(no_reg)); | 1220 ASSERT(scratch.is(no_reg)); |
1201 #ifdef DEBUG | 1221 #ifdef DEBUG |
1202 // Assert that result actually contains top on entry. | 1222 // Assert that result actually contains top on entry. |
1203 cmp(result, Operand::StaticVariable(new_space_allocation_top)); | 1223 cmp(result, Operand::StaticVariable(allocation_top)); |
1204 Check(equal, "Unexpected allocation top"); | 1224 Check(equal, "Unexpected allocation top"); |
1205 #endif | 1225 #endif |
1206 return; | 1226 return; |
1207 } | 1227 } |
1208 | 1228 |
1209 // Move address of new object to result. Use scratch register if available. | 1229 // Move address of new object to result. Use scratch register if available. |
1210 if (scratch.is(no_reg)) { | 1230 if (scratch.is(no_reg)) { |
1211 mov(result, Operand::StaticVariable(new_space_allocation_top)); | 1231 mov(result, Operand::StaticVariable(allocation_top)); |
1212 } else { | 1232 } else { |
1213 mov(scratch, Immediate(new_space_allocation_top)); | 1233 mov(scratch, Immediate(allocation_top)); |
1214 mov(result, Operand(scratch, 0)); | 1234 mov(result, Operand(scratch, 0)); |
1215 } | 1235 } |
1216 } | 1236 } |
1217 | 1237 |
1218 | 1238 |
1219 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, | 1239 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, |
1220 Register scratch) { | 1240 Register scratch, |
1241 AllocationTarget target) { | |
1221 if (emit_debug_code()) { | 1242 if (emit_debug_code()) { |
1222 test(result_end, Immediate(kObjectAlignmentMask)); | 1243 test(result_end, Immediate(kObjectAlignmentMask)); |
1223 Check(zero, "Unaligned allocation in new space"); | 1244 Check(zero, "Unaligned allocation in new space"); |
1224 } | 1245 } |
1225 | 1246 |
1226 ExternalReference new_space_allocation_top = | 1247 ExternalReference allocation_top = GetTopAddress(target); |
1227 ExternalReference::new_space_allocation_top_address(isolate()); | |
1228 | 1248 |
1229 // Update new top. Use scratch if available. | 1249 // Update new top. Use scratch if available. |
1230 if (scratch.is(no_reg)) { | 1250 if (scratch.is(no_reg)) { |
1231 mov(Operand::StaticVariable(new_space_allocation_top), result_end); | 1251 mov(Operand::StaticVariable(allocation_top), result_end); |
1232 } else { | 1252 } else { |
1233 mov(Operand(scratch, 0), result_end); | 1253 mov(Operand(scratch, 0), result_end); |
1234 } | 1254 } |
1235 } | 1255 } |
1236 | 1256 |
1237 | 1257 |
1238 void MacroAssembler::AllocateInNewSpace(int object_size, | |
1239 Register result, | |
1240 Register result_end, | |
1241 Register scratch, | |
1242 Label* gc_required, | |
1243 AllocationFlags flags) { | |
1244 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); | |
1245 if (!FLAG_inline_new) { | |
1246 if (emit_debug_code()) { | |
1247 // Trash the registers to simulate an allocation failure. | |
1248 mov(result, Immediate(0x7091)); | |
1249 if (result_end.is_valid()) { | |
1250 mov(result_end, Immediate(0x7191)); | |
1251 } | |
1252 if (scratch.is_valid()) { | |
1253 mov(scratch, Immediate(0x7291)); | |
1254 } | |
1255 } | |
1256 jmp(gc_required); | |
1257 return; | |
1258 } | |
1259 ASSERT(!result.is(result_end)); | |
1260 | |
1261 // Load address of new object into result. | |
1262 LoadAllocationTopHelper(result, scratch, flags); | |
1263 | |
1264 // Align the next allocation. Storing the filler map without checking top is | |
1265 // always safe because the limit of the heap is always aligned. | |
1266 if ((flags & DOUBLE_ALIGNMENT) != 0) { | |
1267 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | |
1268 Label aligned; | |
1269 test(result, Immediate(kDoubleAlignmentMask)); | |
1270 j(zero, &aligned, Label::kNear); | |
1271 mov(Operand(result, 0), | |
1272 Immediate(isolate()->factory()->one_pointer_filler_map())); | |
1273 add(result, Immediate(kDoubleSize / 2)); | |
1274 bind(&aligned); | |
1275 } | |
1276 | |
1277 Register top_reg = result_end.is_valid() ? result_end : result; | |
1278 | |
1279 // Calculate new top and bail out if new space is exhausted. | |
1280 ExternalReference new_space_allocation_limit = | |
1281 ExternalReference::new_space_allocation_limit_address(isolate()); | |
1282 | |
1283 if (!top_reg.is(result)) { | |
1284 mov(top_reg, result); | |
1285 } | |
1286 add(top_reg, Immediate(object_size)); | |
1287 j(carry, gc_required); | |
1288 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit)); | |
1289 j(above, gc_required); | |
1290 | |
1291 // Update allocation top. | |
1292 UpdateAllocationTopHelper(top_reg, scratch); | |
1293 | |
1294 // Tag result if requested. | |
1295 bool tag_result = (flags & TAG_OBJECT) != 0; | |
1296 if (top_reg.is(result)) { | |
1297 if (tag_result) { | |
1298 sub(result, Immediate(object_size - kHeapObjectTag)); | |
1299 } else { | |
1300 sub(result, Immediate(object_size)); | |
1301 } | |
1302 } else if (tag_result) { | |
1303 ASSERT(kHeapObjectTag == 1); | |
1304 inc(result); | |
1305 } | |
1306 } | |
1307 | |
1308 | |
1309 void MacroAssembler::AllocateInNewSpace( | 1258 void MacroAssembler::AllocateInNewSpace( |
1310 int header_size, | 1259 int header_size, |
1311 ScaleFactor element_size, | 1260 ScaleFactor element_size, |
1312 Register element_count, | 1261 Register element_count, |
1313 RegisterValueType element_count_type, | 1262 RegisterValueType element_count_type, |
1314 Register result, | 1263 Register result, |
1315 Register result_end, | 1264 Register result_end, |
1316 Register scratch, | 1265 Register scratch, |
1317 Label* gc_required, | 1266 Label* gc_required, |
1318 AllocationFlags flags) { | 1267 AllocationFlags flags) { |
1319 ASSERT((flags & SIZE_IN_WORDS) == 0); | 1268 ASSERT((flags & SIZE_IN_WORDS) == 0); |
1320 if (!FLAG_inline_new) { | 1269 if (!FLAG_inline_new) { |
1321 if (emit_debug_code()) { | 1270 if (emit_debug_code()) { |
1322 // Trash the registers to simulate an allocation failure. | 1271 // Trash the registers to simulate an allocation failure. |
1323 mov(result, Immediate(0x7091)); | 1272 mov(result, Immediate(0x7091)); |
1324 mov(result_end, Immediate(0x7191)); | 1273 mov(result_end, Immediate(0x7191)); |
1325 if (scratch.is_valid()) { | 1274 if (scratch.is_valid()) { |
1326 mov(scratch, Immediate(0x7291)); | 1275 mov(scratch, Immediate(0x7291)); |
1327 } | 1276 } |
1328 // Register element_count is not modified by the function. | 1277 // Register element_count is not modified by the function. |
1329 } | 1278 } |
1330 jmp(gc_required); | 1279 jmp(gc_required); |
1331 return; | 1280 return; |
1332 } | 1281 } |
1333 ASSERT(!result.is(result_end)); | 1282 ASSERT(!result.is(result_end)); |
1334 | 1283 |
1335 // Load address of new object into result. | 1284 // Load address of new object into result. |
1336 LoadAllocationTopHelper(result, scratch, flags); | 1285 LoadAllocationTopHelper(result, scratch, flags, NEW_SPACE); |
1337 | 1286 |
1338 // Align the next allocation. Storing the filler map without checking top is | 1287 // Align the next allocation. Storing the filler map without checking top is |
1339 // always safe because the limit of the heap is always aligned. | 1288 // always safe because the limit of the heap is always aligned. |
1340 if ((flags & DOUBLE_ALIGNMENT) != 0) { | 1289 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
1341 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | 1290 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); |
1342 Label aligned; | 1291 Label aligned; |
1343 test(result, Immediate(kDoubleAlignmentMask)); | 1292 test(result, Immediate(kDoubleAlignmentMask)); |
1344 j(zero, &aligned, Label::kNear); | 1293 j(zero, &aligned, Label::kNear); |
1345 mov(Operand(result, 0), | 1294 mov(Operand(result, 0), |
1346 Immediate(isolate()->factory()->one_pointer_filler_map())); | 1295 Immediate(isolate()->factory()->one_pointer_filler_map())); |
(...skipping 22 matching lines...) Expand all Loading... | |
1369 j(carry, gc_required); | 1318 j(carry, gc_required); |
1370 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); | 1319 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); |
1371 j(above, gc_required); | 1320 j(above, gc_required); |
1372 | 1321 |
1373 if ((flags & TAG_OBJECT) != 0) { | 1322 if ((flags & TAG_OBJECT) != 0) { |
1374 ASSERT(kHeapObjectTag == 1); | 1323 ASSERT(kHeapObjectTag == 1); |
1375 inc(result); | 1324 inc(result); |
1376 } | 1325 } |
1377 | 1326 |
1378 // Update allocation top. | 1327 // Update allocation top. |
1379 UpdateAllocationTopHelper(result_end, scratch); | 1328 UpdateAllocationTopHelper(result_end, scratch, NEW_SPACE); |
1380 } | 1329 } |
1381 | 1330 |
1382 | 1331 |
1383 void MacroAssembler::AllocateInNewSpace(Register object_size, | 1332 void MacroAssembler::AllocateInNewSpace(Register object_size, |
1384 Register result, | 1333 Register result, |
1385 Register result_end, | 1334 Register result_end, |
1386 Register scratch, | 1335 Register scratch, |
1387 Label* gc_required, | 1336 Label* gc_required, |
1388 AllocationFlags flags) { | 1337 AllocationFlags flags) { |
1389 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); | 1338 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); |
1390 if (!FLAG_inline_new) { | 1339 if (!FLAG_inline_new) { |
1391 if (emit_debug_code()) { | 1340 if (emit_debug_code()) { |
1392 // Trash the registers to simulate an allocation failure. | 1341 // Trash the registers to simulate an allocation failure. |
1393 mov(result, Immediate(0x7091)); | 1342 mov(result, Immediate(0x7091)); |
1394 mov(result_end, Immediate(0x7191)); | 1343 mov(result_end, Immediate(0x7191)); |
1395 if (scratch.is_valid()) { | 1344 if (scratch.is_valid()) { |
1396 mov(scratch, Immediate(0x7291)); | 1345 mov(scratch, Immediate(0x7291)); |
1397 } | 1346 } |
1398 // object_size is left unchanged by this function. | 1347 // object_size is left unchanged by this function. |
1399 } | 1348 } |
1400 jmp(gc_required); | 1349 jmp(gc_required); |
1401 return; | 1350 return; |
1402 } | 1351 } |
1403 ASSERT(!result.is(result_end)); | 1352 ASSERT(!result.is(result_end)); |
1404 | 1353 |
1405 // Load address of new object into result. | 1354 // Load address of new object into result. |
1406 LoadAllocationTopHelper(result, scratch, flags); | 1355 LoadAllocationTopHelper(result, scratch, flags, NEW_SPACE); |
1407 | 1356 |
1408 // Align the next allocation. Storing the filler map without checking top is | 1357 // Align the next allocation. Storing the filler map without checking top is |
1409 // always safe because the limit of the heap is always aligned. | 1358 // always safe because the limit of the heap is always aligned. |
1410 if ((flags & DOUBLE_ALIGNMENT) != 0) { | 1359 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
1411 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | 1360 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); |
1412 Label aligned; | 1361 Label aligned; |
1413 test(result, Immediate(kDoubleAlignmentMask)); | 1362 test(result, Immediate(kDoubleAlignmentMask)); |
1414 j(zero, &aligned, Label::kNear); | 1363 j(zero, &aligned, Label::kNear); |
1415 mov(Operand(result, 0), | 1364 mov(Operand(result, 0), |
1416 Immediate(isolate()->factory()->one_pointer_filler_map())); | 1365 Immediate(isolate()->factory()->one_pointer_filler_map())); |
(...skipping 12 matching lines...) Expand all Loading... | |
1429 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); | 1378 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit)); |
1430 j(above, gc_required); | 1379 j(above, gc_required); |
1431 | 1380 |
1432 // Tag result if requested. | 1381 // Tag result if requested. |
1433 if ((flags & TAG_OBJECT) != 0) { | 1382 if ((flags & TAG_OBJECT) != 0) { |
1434 ASSERT(kHeapObjectTag == 1); | 1383 ASSERT(kHeapObjectTag == 1); |
1435 inc(result); | 1384 inc(result); |
1436 } | 1385 } |
1437 | 1386 |
1438 // Update allocation top. | 1387 // Update allocation top. |
1439 UpdateAllocationTopHelper(result_end, scratch); | 1388 UpdateAllocationTopHelper(result_end, scratch, NEW_SPACE); |
1440 } | 1389 } |
1441 | 1390 |
1442 | 1391 |
1392 void MacroAssembler::Allocate(int object_size, | |
1393 Register result, | |
1394 Register result_end, | |
1395 Register scratch, | |
1396 Label* gc_required, | |
1397 AllocationFlags flags, | |
1398 AllocationTarget target) { | |
1399 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0); | |
1400 if (!FLAG_inline_new) { | |
1401 if (emit_debug_code()) { | |
1402 // Trash the registers to simulate an allocation failure. | |
1403 mov(result, Immediate(0x7091)); | |
1404 if (result_end.is_valid()) { | |
1405 mov(result_end, Immediate(0x7191)); | |
1406 } | |
1407 if (scratch.is_valid()) { | |
1408 mov(scratch, Immediate(0x7291)); | |
1409 } | |
1410 } | |
1411 jmp(gc_required); | |
1412 return; | |
1413 } | |
1414 ASSERT(!result.is(result_end)); | |
1415 | |
1416 // Load address of new object into result. | |
1417 LoadAllocationTopHelper(result, scratch, flags, target); | |
1418 | |
1419 // Align the next allocation. Storing the filler map without checking top is | |
1420 // always safe because the limit of the heap is always aligned. | |
danno
2013/03/05 12:18:18
Is the limit of the heap also still aligned in the
Hannes Payer (out of office)
2013/03/11 17:16:31
Done.
| |
1421 if ((flags & DOUBLE_ALIGNMENT) != 0) { | |
1422 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | |
1423 Label aligned; | |
1424 test(result, Immediate(kDoubleAlignmentMask)); | |
1425 j(zero, &aligned, Label::kNear); | |
1426 mov(Operand(result, 0), | |
1427 Immediate(isolate()->factory()->one_pointer_filler_map())); | |
1428 add(result, Immediate(kDoubleSize / 2)); | |
1429 bind(&aligned); | |
1430 } | |
1431 | |
1432 Register top_reg = result_end.is_valid() ? result_end : result; | |
1433 | |
1434 // Calculate new top and bail out if space is exhausted. | |
1435 ExternalReference allocation_limit = GetLimitAddress(target); | |
1436 | |
1437 if (!top_reg.is(result)) { | |
1438 mov(top_reg, result); | |
1439 } | |
1440 add(top_reg, Immediate(object_size)); | |
1441 j(carry, gc_required); | |
1442 cmp(top_reg, Operand::StaticVariable(allocation_limit)); | |
1443 j(above, gc_required); | |
1444 | |
1445 // Update allocation top. | |
1446 UpdateAllocationTopHelper(top_reg, scratch, target); | |
1447 | |
1448 // Tag result if requested. | |
1449 bool tag_result = (flags & TAG_OBJECT) != 0; | |
1450 if (top_reg.is(result)) { | |
1451 if (tag_result) { | |
1452 sub(result, Immediate(object_size - kHeapObjectTag)); | |
1453 } else { | |
1454 sub(result, Immediate(object_size)); | |
1455 } | |
1456 } else if (tag_result) { | |
1457 ASSERT(kHeapObjectTag == 1); | |
1458 inc(result); | |
1459 } | |
1460 } | |
danno
2013/03/05 12:18:18
nit: Can you move this up to where AllocationInNew
Hannes Payer (out of office)
2013/03/11 17:16:31
Done.
| |
1461 | |
1462 | |
1443 void MacroAssembler::UndoAllocationInNewSpace(Register object) { | 1463 void MacroAssembler::UndoAllocationInNewSpace(Register object) { |
1444 ExternalReference new_space_allocation_top = | 1464 ExternalReference new_space_allocation_top = |
1445 ExternalReference::new_space_allocation_top_address(isolate()); | 1465 ExternalReference::new_space_allocation_top_address(isolate()); |
1446 | 1466 |
1447 // Make sure the object has no tag before resetting top. | 1467 // Make sure the object has no tag before resetting top. |
1448 and_(object, Immediate(~kHeapObjectTagMask)); | 1468 and_(object, Immediate(~kHeapObjectTagMask)); |
1449 #ifdef DEBUG | 1469 #ifdef DEBUG |
1450 cmp(object, Operand::StaticVariable(new_space_allocation_top)); | 1470 cmp(object, Operand::StaticVariable(new_space_allocation_top)); |
1451 Check(below, "Undo allocation of non allocated memory"); | 1471 Check(below, "Undo allocation of non allocated memory"); |
1452 #endif | 1472 #endif |
1453 mov(Operand::StaticVariable(new_space_allocation_top), object); | 1473 mov(Operand::StaticVariable(new_space_allocation_top), object); |
1454 } | 1474 } |
1455 | 1475 |
1456 | 1476 |
1457 void MacroAssembler::AllocateHeapNumber(Register result, | 1477 void MacroAssembler::AllocateHeapNumber(Register result, |
1458 Register scratch1, | 1478 Register scratch1, |
1459 Register scratch2, | 1479 Register scratch2, |
1460 Label* gc_required) { | 1480 Label* gc_required) { |
1461 // Allocate heap number in new space. | 1481 // Allocate heap number in new space. |
1462 AllocateInNewSpace(HeapNumber::kSize, | 1482 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, |
1463 result, | 1483 TAG_OBJECT, NEW_SPACE); |
1464 scratch1, | |
1465 scratch2, | |
1466 gc_required, | |
1467 TAG_OBJECT); | |
1468 | 1484 |
1469 // Set the map. | 1485 // Set the map. |
1470 mov(FieldOperand(result, HeapObject::kMapOffset), | 1486 mov(FieldOperand(result, HeapObject::kMapOffset), |
1471 Immediate(isolate()->factory()->heap_number_map())); | 1487 Immediate(isolate()->factory()->heap_number_map())); |
1472 } | 1488 } |
1473 | 1489 |
1474 | 1490 |
1475 void MacroAssembler::AllocateTwoByteString(Register result, | 1491 void MacroAssembler::AllocateTwoByteString(Register result, |
1476 Register length, | 1492 Register length, |
1477 Register scratch1, | 1493 Register scratch1, |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1545 | 1561 |
1546 | 1562 |
1547 void MacroAssembler::AllocateAsciiString(Register result, | 1563 void MacroAssembler::AllocateAsciiString(Register result, |
1548 int length, | 1564 int length, |
1549 Register scratch1, | 1565 Register scratch1, |
1550 Register scratch2, | 1566 Register scratch2, |
1551 Label* gc_required) { | 1567 Label* gc_required) { |
1552 ASSERT(length > 0); | 1568 ASSERT(length > 0); |
1553 | 1569 |
1554 // Allocate ASCII string in new space. | 1570 // Allocate ASCII string in new space. |
1555 AllocateInNewSpace(SeqOneByteString::SizeFor(length), | 1571 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2, |
1556 result, | 1572 gc_required, TAG_OBJECT, NEW_SPACE); |
1557 scratch1, | |
1558 scratch2, | |
1559 gc_required, | |
1560 TAG_OBJECT); | |
1561 | 1573 |
1562 // Set the map, length and hash field. | 1574 // Set the map, length and hash field. |
1563 mov(FieldOperand(result, HeapObject::kMapOffset), | 1575 mov(FieldOperand(result, HeapObject::kMapOffset), |
1564 Immediate(isolate()->factory()->ascii_string_map())); | 1576 Immediate(isolate()->factory()->ascii_string_map())); |
1565 mov(FieldOperand(result, String::kLengthOffset), | 1577 mov(FieldOperand(result, String::kLengthOffset), |
1566 Immediate(Smi::FromInt(length))); | 1578 Immediate(Smi::FromInt(length))); |
1567 mov(FieldOperand(result, String::kHashFieldOffset), | 1579 mov(FieldOperand(result, String::kHashFieldOffset), |
1568 Immediate(String::kEmptyHashField)); | 1580 Immediate(String::kEmptyHashField)); |
1569 } | 1581 } |
1570 | 1582 |
1571 | 1583 |
1572 void MacroAssembler::AllocateTwoByteConsString(Register result, | 1584 void MacroAssembler::AllocateTwoByteConsString(Register result, |
1573 Register scratch1, | 1585 Register scratch1, |
1574 Register scratch2, | 1586 Register scratch2, |
1575 Label* gc_required) { | 1587 Label* gc_required) { |
1576 // Allocate heap number in new space. | 1588 // Allocate heap number in new space. |
1577 AllocateInNewSpace(ConsString::kSize, | 1589 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
1578 result, | 1590 TAG_OBJECT, NEW_SPACE); |
1579 scratch1, | |
1580 scratch2, | |
1581 gc_required, | |
1582 TAG_OBJECT); | |
1583 | 1591 |
1584 // Set the map. The other fields are left uninitialized. | 1592 // Set the map. The other fields are left uninitialized. |
1585 mov(FieldOperand(result, HeapObject::kMapOffset), | 1593 mov(FieldOperand(result, HeapObject::kMapOffset), |
1586 Immediate(isolate()->factory()->cons_string_map())); | 1594 Immediate(isolate()->factory()->cons_string_map())); |
1587 } | 1595 } |
1588 | 1596 |
1589 | 1597 |
1590 void MacroAssembler::AllocateAsciiConsString(Register result, | 1598 void MacroAssembler::AllocateAsciiConsString(Register result, |
1591 Register scratch1, | 1599 Register scratch1, |
1592 Register scratch2, | 1600 Register scratch2, |
1593 Label* gc_required) { | 1601 Label* gc_required) { |
1594 // Allocate heap number in new space. | 1602 // Allocate heap number in new space. |
1595 AllocateInNewSpace(ConsString::kSize, | 1603 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
1596 result, | 1604 TAG_OBJECT, NEW_SPACE); |
1597 scratch1, | |
1598 scratch2, | |
1599 gc_required, | |
1600 TAG_OBJECT); | |
1601 | 1605 |
1602 // Set the map. The other fields are left uninitialized. | 1606 // Set the map. The other fields are left uninitialized. |
1603 mov(FieldOperand(result, HeapObject::kMapOffset), | 1607 mov(FieldOperand(result, HeapObject::kMapOffset), |
1604 Immediate(isolate()->factory()->cons_ascii_string_map())); | 1608 Immediate(isolate()->factory()->cons_ascii_string_map())); |
1605 } | 1609 } |
1606 | 1610 |
1607 | 1611 |
1608 void MacroAssembler::AllocateTwoByteSlicedString(Register result, | 1612 void MacroAssembler::AllocateTwoByteSlicedString(Register result, |
1609 Register scratch1, | 1613 Register scratch1, |
1610 Register scratch2, | 1614 Register scratch2, |
1611 Label* gc_required) { | 1615 Label* gc_required) { |
1612 // Allocate heap number in new space. | 1616 // Allocate heap number in new space. |
1613 AllocateInNewSpace(SlicedString::kSize, | 1617 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
1614 result, | 1618 TAG_OBJECT, NEW_SPACE); |
1615 scratch1, | |
1616 scratch2, | |
1617 gc_required, | |
1618 TAG_OBJECT); | |
1619 | 1619 |
1620 // Set the map. The other fields are left uninitialized. | 1620 // Set the map. The other fields are left uninitialized. |
1621 mov(FieldOperand(result, HeapObject::kMapOffset), | 1621 mov(FieldOperand(result, HeapObject::kMapOffset), |
1622 Immediate(isolate()->factory()->sliced_string_map())); | 1622 Immediate(isolate()->factory()->sliced_string_map())); |
1623 } | 1623 } |
1624 | 1624 |
1625 | 1625 |
1626 void MacroAssembler::AllocateAsciiSlicedString(Register result, | 1626 void MacroAssembler::AllocateAsciiSlicedString(Register result, |
1627 Register scratch1, | 1627 Register scratch1, |
1628 Register scratch2, | 1628 Register scratch2, |
1629 Label* gc_required) { | 1629 Label* gc_required) { |
1630 // Allocate heap number in new space. | 1630 // Allocate heap number in new space. |
1631 AllocateInNewSpace(SlicedString::kSize, | 1631 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
1632 result, | 1632 TAG_OBJECT, NEW_SPACE); |
1633 scratch1, | |
1634 scratch2, | |
1635 gc_required, | |
1636 TAG_OBJECT); | |
1637 | 1633 |
1638 // Set the map. The other fields are left uninitialized. | 1634 // Set the map. The other fields are left uninitialized. |
1639 mov(FieldOperand(result, HeapObject::kMapOffset), | 1635 mov(FieldOperand(result, HeapObject::kMapOffset), |
1640 Immediate(isolate()->factory()->sliced_ascii_string_map())); | 1636 Immediate(isolate()->factory()->sliced_ascii_string_map())); |
1641 } | 1637 } |
1642 | 1638 |
1643 | 1639 |
1644 // Copy memory, byte-by-byte, from source to destination. Not optimized for | 1640 // Copy memory, byte-by-byte, from source to destination. Not optimized for |
1645 // long or aligned copies. The contents of scratch and length are destroyed. | 1641 // long or aligned copies. The contents of scratch and length are destroyed. |
1646 // Source and destination are incremented by length. | 1642 // Source and destination are incremented by length. |
(...skipping 1434 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3081 j(greater, &no_info_available); | 3077 j(greater, &no_info_available); |
3082 cmp(MemOperand(scratch_reg, -AllocationSiteInfo::kSize), | 3078 cmp(MemOperand(scratch_reg, -AllocationSiteInfo::kSize), |
3083 Immediate(Handle<Map>(isolate()->heap()->allocation_site_info_map()))); | 3079 Immediate(Handle<Map>(isolate()->heap()->allocation_site_info_map()))); |
3084 bind(&no_info_available); | 3080 bind(&no_info_available); |
3085 } | 3081 } |
3086 | 3082 |
3087 | 3083 |
3088 } } // namespace v8::internal | 3084 } } // namespace v8::internal |
3089 | 3085 |
3090 #endif // V8_TARGET_ARCH_IA32 | 3086 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |