OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS64 | 7 #if V8_TARGET_ARCH_MIPS64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
280 __ Branch(¬_identical, ne, a0, Operand(a1)); | 280 __ Branch(¬_identical, ne, a0, Operand(a1)); |
281 | 281 |
282 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); | 282 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); |
283 | 283 |
284 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), | 284 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), |
285 // so we do the second best thing - test it ourselves. | 285 // so we do the second best thing - test it ourselves. |
286 // They are both equal and they are not both Smis so both of them are not | 286 // They are both equal and they are not both Smis so both of them are not |
287 // Smis. If it's not a heap number, then return equal. | 287 // Smis. If it's not a heap number, then return equal. |
288 __ GetObjectType(a0, t0, t0); | 288 __ GetObjectType(a0, t0, t0); |
289 if (cc == less || cc == greater) { | 289 if (cc == less || cc == greater) { |
| 290 Label not_simd; |
290 // Call runtime on identical JSObjects. | 291 // Call runtime on identical JSObjects. |
291 __ Branch(slow, greater, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); | 292 __ Branch(slow, greater, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
292 // Call runtime on identical symbols since we need to throw a TypeError. | 293 // Call runtime on identical symbols since we need to throw a TypeError. |
293 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE)); | 294 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE)); |
294 // Call runtime on identical SIMD values since we must throw a TypeError. | 295 // Call runtime on identical SIMD values since we must throw a TypeError. |
295 __ Branch(slow, eq, t0, Operand(FLOAT32X4_TYPE)); | 296 __ Branch(¬_simd, lt, t0, Operand(FIRST_SIMD_VALUE_TYPE)); |
| 297 __ Branch(slow, le, t0, Operand(LAST_SIMD_VALUE_TYPE)); |
| 298 __ bind(¬_simd); |
296 if (is_strong(strength)) { | 299 if (is_strong(strength)) { |
297 // Call the runtime on anything that is converted in the semantics, since | 300 // Call the runtime on anything that is converted in the semantics, since |
298 // we need to throw a TypeError. Smis have already been ruled out. | 301 // we need to throw a TypeError. Smis have already been ruled out. |
299 __ Branch(&return_equal, eq, t0, Operand(HEAP_NUMBER_TYPE)); | 302 __ Branch(&return_equal, eq, t0, Operand(HEAP_NUMBER_TYPE)); |
300 __ And(t0, t0, Operand(kIsNotStringMask)); | 303 __ And(t0, t0, Operand(kIsNotStringMask)); |
301 __ Branch(slow, ne, t0, Operand(zero_reg)); | 304 __ Branch(slow, ne, t0, Operand(zero_reg)); |
302 } | 305 } |
303 } else { | 306 } else { |
| 307 Label not_simd; |
304 __ Branch(&heap_number, eq, t0, Operand(HEAP_NUMBER_TYPE)); | 308 __ Branch(&heap_number, eq, t0, Operand(HEAP_NUMBER_TYPE)); |
305 // Comparing JS objects with <=, >= is complicated. | 309 // Comparing JS objects with <=, >= is complicated. |
306 if (cc != eq) { | 310 if (cc != eq) { |
307 __ Branch(slow, greater, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); | 311 __ Branch(slow, greater, t0, Operand(FIRST_SPEC_OBJECT_TYPE)); |
308 // Call runtime on identical symbols since we need to throw a TypeError. | 312 // Call runtime on identical symbols since we need to throw a TypeError. |
309 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE)); | 313 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE)); |
310 // Call runtime on identical SIMD values since we must throw a TypeError. | 314 // Call runtime on identical SIMD values since we must throw a TypeError. |
311 __ Branch(slow, eq, t0, Operand(FLOAT32X4_TYPE)); | 315 __ Branch(¬_simd, lt, t0, Operand(FIRST_SIMD_VALUE_TYPE)); |
| 316 __ Branch(slow, le, t0, Operand(LAST_SIMD_VALUE_TYPE)); |
| 317 __ bind(¬_simd); |
312 if (is_strong(strength)) { | 318 if (is_strong(strength)) { |
313 // Call the runtime on anything that is converted in the semantics, | 319 // Call the runtime on anything that is converted in the semantics, |
314 // since we need to throw a TypeError. Smis and heap numbers have | 320 // since we need to throw a TypeError. Smis and heap numbers have |
315 // already been ruled out. | 321 // already been ruled out. |
316 __ And(t0, t0, Operand(kIsNotStringMask)); | 322 __ And(t0, t0, Operand(kIsNotStringMask)); |
317 __ Branch(slow, ne, t0, Operand(zero_reg)); | 323 __ Branch(slow, ne, t0, Operand(zero_reg)); |
318 } | 324 } |
319 // Normally here we fall through to return_equal, but undefined is | 325 // Normally here we fall through to return_equal, but undefined is |
320 // special: (undefined == undefined) == true, but | 326 // special: (undefined == undefined) == true, but |
321 // (undefined <= undefined) == false! See ECMAScript 11.8.5. | 327 // (undefined <= undefined) == false! See ECMAScript 11.8.5. |
(...skipping 4975 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5297 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 5303 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
5298 | 5304 |
5299 __ bind(&fast_elements_case); | 5305 __ bind(&fast_elements_case); |
5300 GenerateCase(masm, FAST_ELEMENTS); | 5306 GenerateCase(masm, FAST_ELEMENTS); |
5301 } | 5307 } |
5302 | 5308 |
5303 | 5309 |
5304 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5310 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { |
5305 Register context_reg = cp; | 5311 Register context_reg = cp; |
5306 Register slot_reg = a2; | 5312 Register slot_reg = a2; |
| 5313 Register name_reg = a3; |
5307 Register result_reg = v0; | 5314 Register result_reg = v0; |
5308 Label slow_case; | 5315 Label slow_case; |
5309 | 5316 |
5310 // Go up context chain to the script context. | 5317 // Go up context chain to the script context. |
5311 for (int i = 0; i < depth(); ++i) { | 5318 for (int i = 0; i < depth(); ++i) { |
5312 __ ld(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); | 5319 __ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); |
5313 context_reg = result_reg; | 5320 context_reg = result_reg; |
5314 } | 5321 } |
5315 | 5322 |
5316 // Load the PropertyCell value at the specified slot. | 5323 // Load the PropertyCell value at the specified slot. |
5317 __ dsll(at, slot_reg, kPointerSizeLog2); | 5324 __ dsll(at, slot_reg, kPointerSizeLog2); |
5318 __ Daddu(at, at, Operand(context_reg)); | 5325 __ Daddu(at, at, Operand(context_reg)); |
5319 __ ld(result_reg, ContextOperand(at, 0)); | 5326 __ Daddu(at, at, Context::SlotOffset(0)); |
| 5327 __ ld(result_reg, MemOperand(at)); |
5320 __ ld(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset)); | 5328 __ ld(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset)); |
5321 | 5329 |
5322 // Check that value is not the_hole. | 5330 // Check that value is not the_hole. |
5323 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 5331 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
5324 __ Branch(&slow_case, eq, result_reg, Operand(at)); | 5332 __ Branch(&slow_case, eq, result_reg, Operand(at)); |
5325 __ Ret(); | 5333 __ Ret(); |
5326 | 5334 |
5327 // Fallback to the runtime. | 5335 // Fallback to the runtime. |
5328 __ bind(&slow_case); | 5336 __ bind(&slow_case); |
5329 __ SmiTag(slot_reg); | 5337 __ SmiTag(slot_reg); |
5330 __ Push(slot_reg); | 5338 __ Push(slot_reg, name_reg); |
5331 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); | 5339 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); |
5332 } | 5340 } |
5333 | 5341 |
5334 | 5342 |
5335 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5343 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
5336 Register context_reg = cp; | 5344 Register context_reg = cp; |
5337 Register slot_reg = a2; | 5345 Register slot_reg = a2; |
| 5346 Register name_reg = a3; |
5338 Register value_reg = a0; | 5347 Register value_reg = a0; |
5339 Register cell_reg = a4; | 5348 Register cell_reg = a4; |
5340 Register cell_value_reg = a5; | 5349 Register cell_details_reg = a5; |
5341 Register cell_details_reg = a6; | |
5342 Label fast_heapobject_case, fast_smi_case, slow_case; | 5350 Label fast_heapobject_case, fast_smi_case, slow_case; |
5343 | 5351 |
5344 if (FLAG_debug_code) { | 5352 if (FLAG_debug_code) { |
5345 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 5353 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
5346 __ Check(ne, kUnexpectedValue, value_reg, Operand(at)); | 5354 __ Check(ne, kUnexpectedValue, value_reg, Operand(at)); |
| 5355 __ AssertName(name_reg); |
5347 } | 5356 } |
5348 | 5357 |
5349 // Go up context chain to the script context. | 5358 // Go up context chain to the script context. |
5350 for (int i = 0; i < depth(); ++i) { | 5359 for (int i = 0; i < depth(); ++i) { |
5351 __ ld(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); | 5360 __ ld(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); |
5352 context_reg = cell_reg; | 5361 context_reg = cell_reg; |
5353 } | 5362 } |
5354 | 5363 |
5355 // Load the PropertyCell at the specified slot. | 5364 // Load the PropertyCell at the specified slot. |
5356 __ dsll(at, slot_reg, kPointerSizeLog2); | 5365 __ dsll(at, slot_reg, kPointerSizeLog2); |
5357 __ Daddu(at, at, Operand(context_reg)); | 5366 __ Daddu(at, at, Operand(context_reg)); |
5358 __ ld(cell_reg, ContextOperand(at, 0)); | 5367 __ Daddu(at, at, Context::SlotOffset(0)); |
| 5368 __ ld(cell_reg, MemOperand(at)); |
5359 | 5369 |
5360 // Load PropertyDetails for the cell (actually only the cell_type and kind). | 5370 // Load PropertyDetails for the cell (actually only the cell_type and kind). |
5361 __ ld(cell_details_reg, | 5371 __ ld(cell_details_reg, |
5362 FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset)); | 5372 FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset)); |
5363 __ SmiUntag(cell_details_reg); | 5373 __ SmiUntag(cell_details_reg); |
5364 __ And(cell_details_reg, cell_details_reg, | 5374 __ And(cell_details_reg, cell_details_reg, |
5365 PropertyDetails::PropertyCellTypeField::kMask | | 5375 PropertyDetails::PropertyCellTypeField::kMask | |
5366 PropertyDetails::KindField::kMask | | 5376 PropertyDetails::KindField::kMask); |
5367 PropertyDetails::kAttributesReadOnlyMask); | |
5368 | 5377 |
5369 // Check if PropertyCell holds mutable data. | 5378 // Check if PropertyCell holds mutable data. |
5370 Label not_mutable_data; | 5379 Label not_mutable_data; |
5371 __ Branch(¬_mutable_data, ne, cell_details_reg, | 5380 __ Branch(¬_mutable_data, ne, cell_details_reg, |
5372 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5381 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5373 PropertyCellType::kMutable) | | 5382 PropertyCellType::kMutable) | |
5374 PropertyDetails::KindField::encode(kData))); | 5383 PropertyDetails::KindField::encode(kData))); |
5375 __ JumpIfSmi(value_reg, &fast_smi_case); | 5384 __ JumpIfSmi(value_reg, &fast_smi_case); |
5376 __ bind(&fast_heapobject_case); | 5385 __ bind(&fast_heapobject_case); |
5377 __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); | 5386 __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5378 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, | 5387 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, |
5379 cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs, | 5388 cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs, |
5380 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 5389 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
5381 // RecordWriteField clobbers the value register, so we need to reload. | 5390 // RecordWriteField clobbers the value register, so we need to reload. |
5382 __ Ret(USE_DELAY_SLOT); | 5391 __ Ret(USE_DELAY_SLOT); |
5383 __ ld(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); | 5392 __ ld(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5384 __ bind(¬_mutable_data); | 5393 __ bind(¬_mutable_data); |
5385 | 5394 |
5386 // Check if PropertyCell value matches the new value (relevant for Constant, | 5395 // Check if PropertyCell value matches the new value (relevant for Constant, |
5387 // ConstantType and Undefined cells). | 5396 // ConstantType and Undefined cells). |
5388 Label not_same_value; | 5397 Label not_same_value; |
5389 __ ld(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); | 5398 __ ld(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5390 __ Branch(¬_same_value, ne, value_reg, Operand(cell_value_reg)); | 5399 __ Branch(¬_same_value, ne, value_reg, Operand(at)); |
5391 // Make sure the PropertyCell is not marked READ_ONLY. | |
5392 __ And(at, cell_details_reg, PropertyDetails::kAttributesReadOnlyMask); | |
5393 __ Branch(&slow_case, ne, at, Operand(zero_reg)); | |
5394 if (FLAG_debug_code) { | 5400 if (FLAG_debug_code) { |
5395 Label done; | 5401 Label done; |
5396 // This can only be true for Constant, ConstantType and Undefined cells, | 5402 // This can only be true for Constant, ConstantType and Undefined cells, |
5397 // because we never store the_hole via this stub. | 5403 // because we never store the_hole via this stub. |
5398 __ Branch(&done, eq, cell_details_reg, | 5404 __ Branch(&done, eq, cell_details_reg, |
5399 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5405 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5400 PropertyCellType::kConstant) | | 5406 PropertyCellType::kConstant) | |
5401 PropertyDetails::KindField::encode(kData))); | 5407 PropertyDetails::KindField::encode(kData))); |
5402 __ Branch(&done, eq, cell_details_reg, | 5408 __ Branch(&done, eq, cell_details_reg, |
5403 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5409 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5404 PropertyCellType::kConstantType) | | 5410 PropertyCellType::kConstantType) | |
5405 PropertyDetails::KindField::encode(kData))); | 5411 PropertyDetails::KindField::encode(kData))); |
5406 __ Check(eq, kUnexpectedValue, cell_details_reg, | 5412 __ Check(eq, kUnexpectedValue, cell_details_reg, |
5407 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5413 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5408 PropertyCellType::kUndefined) | | 5414 PropertyCellType::kUndefined) | |
5409 PropertyDetails::KindField::encode(kData))); | 5415 PropertyDetails::KindField::encode(kData))); |
5410 __ bind(&done); | 5416 __ bind(&done); |
5411 } | 5417 } |
5412 __ Ret(); | 5418 __ Ret(); |
5413 __ bind(¬_same_value); | 5419 __ bind(¬_same_value); |
5414 | 5420 |
5415 // Check if PropertyCell contains data with constant type (and is not | 5421 // Check if PropertyCell contains data with constant type. |
5416 // READ_ONLY). | |
5417 __ Branch(&slow_case, ne, cell_details_reg, | 5422 __ Branch(&slow_case, ne, cell_details_reg, |
5418 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5423 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5419 PropertyCellType::kConstantType) | | 5424 PropertyCellType::kConstantType) | |
5420 PropertyDetails::KindField::encode(kData))); | 5425 PropertyDetails::KindField::encode(kData))); |
5421 | 5426 |
5422 // Now either both old and new values must be SMIs or both must be heap | 5427 // Now either both old and new values must be SMIs or both must be heap |
5423 // objects with same map. | 5428 // objects with same map. |
5424 Label value_is_heap_object; | 5429 Label value_is_heap_object; |
| 5430 Register cell_value_reg = cell_details_reg; |
| 5431 __ ld(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5425 __ JumpIfNotSmi(value_reg, &value_is_heap_object); | 5432 __ JumpIfNotSmi(value_reg, &value_is_heap_object); |
5426 __ JumpIfNotSmi(cell_value_reg, &slow_case); | 5433 __ JumpIfNotSmi(cell_value_reg, &slow_case); |
5427 // Old and new values are SMIs, no need for a write barrier here. | 5434 // Old and new values are SMIs, no need for a write barrier here. |
5428 __ bind(&fast_smi_case); | 5435 __ bind(&fast_smi_case); |
5429 __ Ret(USE_DELAY_SLOT); | 5436 __ Ret(USE_DELAY_SLOT); |
5430 __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); | 5437 __ sd(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5431 __ bind(&value_is_heap_object); | 5438 __ bind(&value_is_heap_object); |
5432 __ JumpIfSmi(cell_value_reg, &slow_case); | 5439 __ JumpIfSmi(cell_value_reg, &slow_case); |
5433 Register cell_value_map_reg = cell_value_reg; | 5440 Register cell_value_map_reg = cell_value_reg; |
5434 __ ld(cell_value_map_reg, | 5441 __ ld(cell_value_map_reg, |
5435 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); | 5442 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); |
5436 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, | 5443 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, |
5437 FieldMemOperand(value_reg, HeapObject::kMapOffset)); | 5444 FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
5438 | 5445 |
5439 // Fallback to the runtime. | 5446 // Fallback to the runtime. |
5440 __ bind(&slow_case); | 5447 __ bind(&slow_case); |
5441 __ SmiTag(slot_reg); | 5448 __ SmiTag(slot_reg); |
5442 __ Push(slot_reg, value_reg); | 5449 __ Push(slot_reg, name_reg, value_reg); |
5443 __ TailCallRuntime(is_strict(language_mode()) | 5450 __ TailCallRuntime(is_strict(language_mode()) |
5444 ? Runtime::kStoreGlobalViaContext_Strict | 5451 ? Runtime::kStoreGlobalViaContext_Strict |
5445 : Runtime::kStoreGlobalViaContext_Sloppy, | 5452 : Runtime::kStoreGlobalViaContext_Sloppy, |
5446 2, 1); | 5453 3, 1); |
5447 } | 5454 } |
5448 | 5455 |
5449 | 5456 |
5450 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5457 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
5451 int64_t offset = (ref0.address() - ref1.address()); | 5458 int64_t offset = (ref0.address() - ref1.address()); |
5452 DCHECK(static_cast<int>(offset) == offset); | 5459 DCHECK(static_cast<int>(offset) == offset); |
5453 return static_cast<int>(offset); | 5460 return static_cast<int>(offset); |
5454 } | 5461 } |
5455 | 5462 |
5456 | 5463 |
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5750 MemOperand(fp, 6 * kPointerSize), NULL); | 5757 MemOperand(fp, 6 * kPointerSize), NULL); |
5751 } | 5758 } |
5752 | 5759 |
5753 | 5760 |
5754 #undef __ | 5761 #undef __ |
5755 | 5762 |
5756 } // namespace internal | 5763 } // namespace internal |
5757 } // namespace v8 | 5764 } // namespace v8 |
5758 | 5765 |
5759 #endif // V8_TARGET_ARCH_MIPS64 | 5766 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |