| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1320 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1331 } | 1331 } |
| 1332 | 1332 |
| 1333 | 1333 |
| 1334 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1334 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1335 ASSERT(ToRegister(instr->InputAt(0)).is(rdx)); | 1335 ASSERT(ToRegister(instr->InputAt(0)).is(rdx)); |
| 1336 ASSERT(ToRegister(instr->InputAt(1)).is(rax)); | 1336 ASSERT(ToRegister(instr->InputAt(1)).is(rax)); |
| 1337 ASSERT(ToRegister(instr->result()).is(rax)); | 1337 ASSERT(ToRegister(instr->result()).is(rax)); |
| 1338 | 1338 |
| 1339 BinaryOpStub stub(instr->op(), NO_OVERWRITE); | 1339 BinaryOpStub stub(instr->op(), NO_OVERWRITE); |
| 1340 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 1340 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 1341 __ nop(); // Signals no inlined code. |
| 1341 } | 1342 } |
| 1342 | 1343 |
| 1343 | 1344 |
| 1344 int LCodeGen::GetNextEmittedBlock(int block) { | 1345 int LCodeGen::GetNextEmittedBlock(int block) { |
| 1345 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { | 1346 for (int i = block + 1; i < graph()->blocks()->length(); ++i) { |
| 1346 LLabel* label = chunk_->GetLabel(i); | 1347 LLabel* label = chunk_->GetLabel(i); |
| 1347 if (!label->HasReplacement()) return i; | 1348 if (!label->HasReplacement()) return i; |
| 1348 } | 1349 } |
| 1349 return -1; | 1350 return -1; |
| 1350 } | 1351 } |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1391 if (type.IsBoolean()) { | 1392 if (type.IsBoolean()) { |
| 1392 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 1393 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
| 1393 EmitBranch(true_block, false_block, equal); | 1394 EmitBranch(true_block, false_block, equal); |
| 1394 } else if (type.IsSmi()) { | 1395 } else if (type.IsSmi()) { |
| 1395 __ SmiCompare(reg, Smi::FromInt(0)); | 1396 __ SmiCompare(reg, Smi::FromInt(0)); |
| 1396 EmitBranch(true_block, false_block, not_equal); | 1397 EmitBranch(true_block, false_block, not_equal); |
| 1397 } else { | 1398 } else { |
| 1398 Label* true_label = chunk_->GetAssemblyLabel(true_block); | 1399 Label* true_label = chunk_->GetAssemblyLabel(true_block); |
| 1399 Label* false_label = chunk_->GetAssemblyLabel(false_block); | 1400 Label* false_label = chunk_->GetAssemblyLabel(false_block); |
| 1400 | 1401 |
| 1401 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); | 1402 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
| 1402 __ j(equal, false_label); | 1403 // Avoid deopts in the case where we've never executed this path before. |
| 1403 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 1404 if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); |
| 1404 __ j(equal, true_label); | |
| 1405 __ CompareRoot(reg, Heap::kFalseValueRootIndex); | |
| 1406 __ j(equal, false_label); | |
| 1407 __ Cmp(reg, Smi::FromInt(0)); | |
| 1408 __ j(equal, false_label); | |
| 1409 __ JumpIfSmi(reg, true_label); | |
| 1410 | 1405 |
| 1411 // Test for double values. Plus/minus zero and NaN are false. | 1406 if (expected.Contains(ToBooleanStub::UNDEFINED)) { |
| 1412 Label call_stub; | 1407 // undefined -> false. |
| 1413 __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset), | 1408 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); |
| 1414 Heap::kHeapNumberMapRootIndex); | 1409 __ j(equal, false_label); |
| 1415 __ j(not_equal, &call_stub, Label::kNear); | 1410 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1411 // We've seen undefined for the first time -> deopt. |
| 1412 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); |
| 1413 DeoptimizeIf(equal, instr->environment()); |
| 1414 } |
| 1416 | 1415 |
| 1417 // HeapNumber => false iff +0, -0, or NaN. These three cases set the | 1416 if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
| 1418 // zero flag when compared to zero using ucomisd. | 1417 // true -> true. |
| 1419 __ xorps(xmm0, xmm0); | 1418 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
| 1420 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); | 1419 __ j(equal, true_label); |
| 1421 __ j(zero, false_label); | 1420 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1422 __ jmp(true_label); | 1421 // We've seen a boolean for the first time -> deopt. |
| 1422 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
| 1423 DeoptimizeIf(equal, instr->environment()); |
| 1424 } |
| 1423 | 1425 |
| 1424 // The conversion stub doesn't cause garbage collections so it's | 1426 if (expected.Contains(ToBooleanStub::BOOLEAN)) { |
| 1425 // safe to not record a safepoint after the call. | 1427 // false -> false. |
| 1426 __ bind(&call_stub); | 1428 __ CompareRoot(reg, Heap::kFalseValueRootIndex); |
| 1427 ToBooleanStub stub(rax); | 1429 __ j(equal, false_label); |
| 1428 __ Pushad(); | 1430 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1429 __ push(reg); | 1431 // We've seen a boolean for the first time -> deopt. |
| 1430 __ CallStub(&stub); | 1432 __ CompareRoot(reg, Heap::kFalseValueRootIndex); |
| 1431 __ testq(rax, rax); | 1433 DeoptimizeIf(equal, instr->environment()); |
| 1432 __ Popad(); | 1434 } |
| 1433 EmitBranch(true_block, false_block, not_zero); | 1435 |
| 1436 if (expected.Contains(ToBooleanStub::NULL_TYPE)) { |
| 1437 // 'null' -> false. |
| 1438 __ CompareRoot(reg, Heap::kNullValueRootIndex); |
| 1439 __ j(equal, false_label); |
| 1440 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1441 // We've seen null for the first time -> deopt. |
| 1442 __ CompareRoot(reg, Heap::kNullValueRootIndex); |
| 1443 DeoptimizeIf(equal, instr->environment()); |
| 1444 } |
| 1445 |
| 1446 if (expected.Contains(ToBooleanStub::SMI)) { |
| 1447 // Smis: 0 -> false, all other -> true. |
| 1448 __ Cmp(reg, Smi::FromInt(0)); |
| 1449 __ j(equal, false_label); |
| 1450 __ JumpIfSmi(reg, true_label); |
| 1451 } else if (expected.NeedsMap()) { |
| 1452 // If we need a map later and have a Smi -> deopt. |
| 1453 __ testb(reg, Immediate(kSmiTagMask)); |
| 1454 DeoptimizeIf(zero, instr->environment()); |
| 1455 } |
| 1456 |
| 1457 const Register map = kScratchRegister; |
| 1458 if (expected.NeedsMap()) { |
| 1459 __ movq(map, FieldOperand(reg, HeapObject::kMapOffset)); |
| 1460 // Everything with a map could be undetectable, so check this now. |
| 1461 __ testb(FieldOperand(map, Map::kBitFieldOffset), |
| 1462 Immediate(1 << Map::kIsUndetectable)); |
| 1463 // Undetectable -> false. |
| 1464 __ j(not_zero, false_label); |
| 1465 } |
| 1466 |
| 1467 if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { |
| 1468 // spec object -> true. |
| 1469 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); |
| 1470 __ j(above_equal, true_label); |
| 1471 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1472 // We've seen a spec object for the first time -> deopt. |
| 1473 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); |
| 1474 DeoptimizeIf(above_equal, instr->environment()); |
| 1475 } |
| 1476 |
| 1477 if (expected.Contains(ToBooleanStub::STRING)) { |
| 1478 // String value -> false iff empty. |
| 1479 Label not_string; |
| 1480 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); |
| 1481 __ j(above_equal, ¬_string, Label::kNear); |
| 1482 __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0)); |
| 1483 __ j(not_zero, true_label); |
| 1484 __ jmp(false_label); |
| 1485 __ bind(¬_string); |
| 1486 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1487 // We've seen a string for the first time -> deopt |
| 1488 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); |
| 1489 DeoptimizeIf(below, instr->environment()); |
| 1490 } |
| 1491 |
| 1492 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { |
| 1493 // heap number -> false iff +0, -0, or NaN. |
| 1494 Label not_heap_number; |
| 1495 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
| 1496 __ j(not_equal, ¬_heap_number, Label::kNear); |
| 1497 __ xorps(xmm0, xmm0); |
| 1498 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); |
| 1499 __ j(zero, false_label); |
| 1500 __ jmp(true_label); |
| 1501 __ bind(¬_heap_number); |
| 1502 } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1503 // We've seen a heap number for the first time -> deopt. |
| 1504 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
| 1505 DeoptimizeIf(equal, instr->environment()); |
| 1506 } |
| 1507 |
| 1508 if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) { |
| 1509 // internal objects -> true |
| 1510 __ jmp(true_label); |
| 1511 } else { |
| 1512 // We've seen something for the first time -> deopt. |
| 1513 DeoptimizeIf(no_condition, instr->environment()); |
| 1514 } |
| 1434 } | 1515 } |
| 1435 } | 1516 } |
| 1436 } | 1517 } |
| 1437 | 1518 |
| 1438 | 1519 |
| 1439 void LCodeGen::EmitGoto(int block) { | 1520 void LCodeGen::EmitGoto(int block) { |
| 1440 block = chunk_->LookupDestination(block); | 1521 block = chunk_->LookupDestination(block); |
| 1441 int next_block = GetNextEmittedBlock(current_block_); | 1522 int next_block = GetNextEmittedBlock(current_block_); |
| 1442 if (block != next_block) { | 1523 if (block != next_block) { |
| 1443 __ jmp(chunk_->GetAssemblyLabel(block)); | 1524 __ jmp(chunk_->GetAssemblyLabel(block)); |
| (...skipping 428 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1872 Label* map_check) { | 1953 Label* map_check) { |
| 1873 { | 1954 { |
| 1874 PushSafepointRegistersScope scope(this); | 1955 PushSafepointRegistersScope scope(this); |
| 1875 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( | 1956 InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( |
| 1876 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); | 1957 InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); |
| 1877 InstanceofStub stub(flags); | 1958 InstanceofStub stub(flags); |
| 1878 | 1959 |
| 1879 __ push(ToRegister(instr->InputAt(0))); | 1960 __ push(ToRegister(instr->InputAt(0))); |
| 1880 __ Push(instr->function()); | 1961 __ Push(instr->function()); |
| 1881 | 1962 |
| 1882 Register temp = ToRegister(instr->TempAt(0)); | |
| 1883 static const int kAdditionalDelta = 10; | 1963 static const int kAdditionalDelta = 10; |
| 1884 int delta = | 1964 int delta = |
| 1885 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | 1965 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
| 1886 ASSERT(delta >= 0); | 1966 ASSERT(delta >= 0); |
| 1887 __ push_imm32(delta); | 1967 __ push_imm32(delta); |
| 1888 | 1968 |
| 1889 // We are pushing three values on the stack but recording a | 1969 // We are pushing three values on the stack but recording a |
| 1890 // safepoint with two arguments because stub is going to | 1970 // safepoint with two arguments because stub is going to |
| 1891 // remove the third argument from the stack before jumping | 1971 // remove the third argument from the stack before jumping |
| 1892 // to instanceof builtin on the slow path. | 1972 // to instanceof builtin on the slow path. |
| (...skipping 355 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2248 FixedArray::kHeaderSize)); | 2328 FixedArray::kHeaderSize)); |
| 2249 | 2329 |
| 2250 // Check for the hole value. | 2330 // Check for the hole value. |
| 2251 if (instr->hydrogen()->RequiresHoleCheck()) { | 2331 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2252 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); | 2332 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); |
| 2253 DeoptimizeIf(equal, instr->environment()); | 2333 DeoptimizeIf(equal, instr->environment()); |
| 2254 } | 2334 } |
| 2255 } | 2335 } |
| 2256 | 2336 |
| 2257 | 2337 |
| 2258 Operand LCodeGen::BuildExternalArrayOperand( | 2338 void LCodeGen::DoLoadKeyedFastDoubleElement( |
| 2339 LLoadKeyedFastDoubleElement* instr) { |
| 2340 XMMRegister result(ToDoubleRegister(instr->result())); |
| 2341 |
| 2342 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2343 int offset = FixedDoubleArray::kHeaderSize - kHeapObjectTag + |
| 2344 sizeof(kHoleNanLower32); |
| 2345 Operand hole_check_operand = BuildFastArrayOperand( |
| 2346 instr->elements(), |
| 2347 instr->key(), |
| 2348 JSObject::FAST_DOUBLE_ELEMENTS, |
| 2349 offset); |
| 2350 __ cmpl(hole_check_operand, Immediate(kHoleNanUpper32)); |
| 2351 DeoptimizeIf(equal, instr->environment()); |
| 2352 } |
| 2353 |
| 2354 Operand double_load_operand = BuildFastArrayOperand( |
| 2355 instr->elements(), instr->key(), JSObject::FAST_DOUBLE_ELEMENTS, |
| 2356 FixedDoubleArray::kHeaderSize - kHeapObjectTag); |
| 2357 __ movsd(result, double_load_operand); |
| 2358 } |
| 2359 |
| 2360 |
| 2361 Operand LCodeGen::BuildFastArrayOperand( |
| 2259 LOperand* external_pointer, | 2362 LOperand* external_pointer, |
| 2260 LOperand* key, | 2363 LOperand* key, |
| 2261 JSObject::ElementsKind elements_kind) { | 2364 JSObject::ElementsKind elements_kind, |
| 2365 uint32_t offset) { |
| 2262 Register external_pointer_reg = ToRegister(external_pointer); | 2366 Register external_pointer_reg = ToRegister(external_pointer); |
| 2263 int shift_size = ElementsKindToShiftSize(elements_kind); | 2367 int shift_size = ElementsKindToShiftSize(elements_kind); |
| 2264 if (key->IsConstantOperand()) { | 2368 if (key->IsConstantOperand()) { |
| 2265 int constant_value = ToInteger32(LConstantOperand::cast(key)); | 2369 int constant_value = ToInteger32(LConstantOperand::cast(key)); |
| 2266 if (constant_value & 0xF0000000) { | 2370 if (constant_value & 0xF0000000) { |
| 2267 Abort("array index constant value too big"); | 2371 Abort("array index constant value too big"); |
| 2268 } | 2372 } |
| 2269 return Operand(external_pointer_reg, constant_value * (1 << shift_size)); | 2373 return Operand(external_pointer_reg, |
| 2374 constant_value * (1 << shift_size) + offset); |
| 2270 } else { | 2375 } else { |
| 2271 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); | 2376 ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size); |
| 2272 return Operand(external_pointer_reg, ToRegister(key), scale_factor, 0); | 2377 return Operand(external_pointer_reg, ToRegister(key), |
| 2378 scale_factor, offset); |
| 2273 } | 2379 } |
| 2274 } | 2380 } |
| 2275 | 2381 |
| 2276 | 2382 |
| 2277 void LCodeGen::DoLoadKeyedSpecializedArrayElement( | 2383 void LCodeGen::DoLoadKeyedSpecializedArrayElement( |
| 2278 LLoadKeyedSpecializedArrayElement* instr) { | 2384 LLoadKeyedSpecializedArrayElement* instr) { |
| 2279 JSObject::ElementsKind elements_kind = instr->elements_kind(); | 2385 JSObject::ElementsKind elements_kind = instr->elements_kind(); |
| 2280 Operand operand(BuildExternalArrayOperand(instr->external_pointer(), | 2386 Operand operand(BuildFastArrayOperand(instr->external_pointer(), |
| 2281 instr->key(), elements_kind)); | 2387 instr->key(), elements_kind, 0)); |
| 2282 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) { | 2388 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) { |
| 2283 XMMRegister result(ToDoubleRegister(instr->result())); | 2389 XMMRegister result(ToDoubleRegister(instr->result())); |
| 2284 __ movss(result, operand); | 2390 __ movss(result, operand); |
| 2285 __ cvtss2sd(result, result); | 2391 __ cvtss2sd(result, result); |
| 2286 } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) { | 2392 } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) { |
| 2287 __ movsd(ToDoubleRegister(instr->result()), operand); | 2393 __ movsd(ToDoubleRegister(instr->result()), operand); |
| 2288 } else { | 2394 } else { |
| 2289 Register result(ToRegister(instr->result())); | 2395 Register result(ToRegister(instr->result())); |
| 2290 switch (elements_kind) { | 2396 switch (elements_kind) { |
| 2291 case JSObject::EXTERNAL_BYTE_ELEMENTS: | 2397 case JSObject::EXTERNAL_BYTE_ELEMENTS: |
| (...skipping 705 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2997 Handle<Code> ic = instr->strict_mode() | 3103 Handle<Code> ic = instr->strict_mode() |
| 2998 ? isolate()->builtins()->StoreIC_Initialize_Strict() | 3104 ? isolate()->builtins()->StoreIC_Initialize_Strict() |
| 2999 : isolate()->builtins()->StoreIC_Initialize(); | 3105 : isolate()->builtins()->StoreIC_Initialize(); |
| 3000 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3106 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3001 } | 3107 } |
| 3002 | 3108 |
| 3003 | 3109 |
| 3004 void LCodeGen::DoStoreKeyedSpecializedArrayElement( | 3110 void LCodeGen::DoStoreKeyedSpecializedArrayElement( |
| 3005 LStoreKeyedSpecializedArrayElement* instr) { | 3111 LStoreKeyedSpecializedArrayElement* instr) { |
| 3006 JSObject::ElementsKind elements_kind = instr->elements_kind(); | 3112 JSObject::ElementsKind elements_kind = instr->elements_kind(); |
| 3007 Operand operand(BuildExternalArrayOperand(instr->external_pointer(), | 3113 Operand operand(BuildFastArrayOperand(instr->external_pointer(), |
| 3008 instr->key(), elements_kind)); | 3114 instr->key(), elements_kind, 0)); |
| 3009 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) { | 3115 if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) { |
| 3010 XMMRegister value(ToDoubleRegister(instr->value())); | 3116 XMMRegister value(ToDoubleRegister(instr->value())); |
| 3011 __ cvtsd2ss(value, value); | 3117 __ cvtsd2ss(value, value); |
| 3012 __ movss(operand, value); | 3118 __ movss(operand, value); |
| 3013 } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) { | 3119 } else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) { |
| 3014 __ movsd(operand, ToDoubleRegister(instr->value())); | 3120 __ movsd(operand, ToDoubleRegister(instr->value())); |
| 3015 } else { | 3121 } else { |
| 3016 Register value(ToRegister(instr->value())); | 3122 Register value(ToRegister(instr->value())); |
| 3017 switch (elements_kind) { | 3123 switch (elements_kind) { |
| 3018 case JSObject::EXTERNAL_PIXEL_ELEMENTS: | 3124 case JSObject::EXTERNAL_PIXEL_ELEMENTS: |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3075 // Compute address of modified element and store it into key register. | 3181 // Compute address of modified element and store it into key register. |
| 3076 __ lea(key, FieldOperand(elements, | 3182 __ lea(key, FieldOperand(elements, |
| 3077 key, | 3183 key, |
| 3078 times_pointer_size, | 3184 times_pointer_size, |
| 3079 FixedArray::kHeaderSize)); | 3185 FixedArray::kHeaderSize)); |
| 3080 __ RecordWrite(elements, key, value, kSaveFPRegs); | 3186 __ RecordWrite(elements, key, value, kSaveFPRegs); |
| 3081 } | 3187 } |
| 3082 } | 3188 } |
| 3083 | 3189 |
| 3084 | 3190 |
| 3191 void LCodeGen::DoStoreKeyedFastDoubleElement( |
| 3192 LStoreKeyedFastDoubleElement* instr) { |
| 3193 XMMRegister value = ToDoubleRegister(instr->value()); |
| 3194 Label have_value; |
| 3195 |
| 3196 __ ucomisd(value, value); |
| 3197 __ j(parity_odd, &have_value); // NaN. |
| 3198 |
| 3199 __ Set(kScratchRegister, BitCast<uint64_t>( |
| 3200 FixedDoubleArray::canonical_not_the_hole_nan_as_double())); |
| 3201 __ movq(value, kScratchRegister); |
| 3202 |
| 3203 __ bind(&have_value); |
| 3204 Operand double_store_operand = BuildFastArrayOperand( |
| 3205 instr->elements(), instr->key(), JSObject::FAST_DOUBLE_ELEMENTS, |
| 3206 FixedDoubleArray::kHeaderSize - kHeapObjectTag); |
| 3207 __ movsd(double_store_operand, value); |
| 3208 } |
| 3209 |
| 3085 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { | 3210 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) { |
| 3086 ASSERT(ToRegister(instr->object()).is(rdx)); | 3211 ASSERT(ToRegister(instr->object()).is(rdx)); |
| 3087 ASSERT(ToRegister(instr->key()).is(rcx)); | 3212 ASSERT(ToRegister(instr->key()).is(rcx)); |
| 3088 ASSERT(ToRegister(instr->value()).is(rax)); | 3213 ASSERT(ToRegister(instr->value()).is(rax)); |
| 3089 | 3214 |
| 3090 Handle<Code> ic = instr->strict_mode() | 3215 Handle<Code> ic = instr->strict_mode() |
| 3091 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() | 3216 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict() |
| 3092 : isolate()->builtins()->KeyedStoreIC_Initialize(); | 3217 : isolate()->builtins()->KeyedStoreIC_Initialize(); |
| 3093 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 3218 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 3094 } | 3219 } |
| (...skipping 975 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4070 RegisterEnvironmentForDeoptimization(environment); | 4195 RegisterEnvironmentForDeoptimization(environment); |
| 4071 ASSERT(osr_pc_offset_ == -1); | 4196 ASSERT(osr_pc_offset_ == -1); |
| 4072 osr_pc_offset_ = masm()->pc_offset(); | 4197 osr_pc_offset_ = masm()->pc_offset(); |
| 4073 } | 4198 } |
| 4074 | 4199 |
| 4075 #undef __ | 4200 #undef __ |
| 4076 | 4201 |
| 4077 } } // namespace v8::internal | 4202 } } // namespace v8::internal |
| 4078 | 4203 |
| 4079 #endif // V8_TARGET_ARCH_X64 | 4204 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |