OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
284 __ Branch(¬_identical, ne, a0, Operand(a1)); | 284 __ Branch(¬_identical, ne, a0, Operand(a1)); |
285 | 285 |
286 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); | 286 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); |
287 | 287 |
288 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), | 288 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), |
289 // so we do the second best thing - test it ourselves. | 289 // so we do the second best thing - test it ourselves. |
290 // They are both equal and they are not both Smis so both of them are not | 290 // They are both equal and they are not both Smis so both of them are not |
291 // Smis. If it's not a heap number, then return equal. | 291 // Smis. If it's not a heap number, then return equal. |
292 __ GetObjectType(a0, t4, t4); | 292 __ GetObjectType(a0, t4, t4); |
293 if (cc == less || cc == greater) { | 293 if (cc == less || cc == greater) { |
| 294 Label not_simd; |
294 // Call runtime on identical JSObjects. | 295 // Call runtime on identical JSObjects. |
295 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE)); | 296 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE)); |
296 // Call runtime on identical symbols since we need to throw a TypeError. | 297 // Call runtime on identical symbols since we need to throw a TypeError. |
297 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE)); | 298 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE)); |
298 // Call runtime on identical SIMD values since we must throw a TypeError. | 299 // Call runtime on identical SIMD values since we must throw a TypeError. |
299 __ Branch(slow, eq, t4, Operand(FLOAT32X4_TYPE)); | 300 __ Branch(¬_simd, lt, t4, Operand(FIRST_SIMD_VALUE_TYPE)); |
| 301 __ Branch(slow, le, t4, Operand(LAST_SIMD_VALUE_TYPE)); |
| 302 __ bind(¬_simd); |
300 if (is_strong(strength)) { | 303 if (is_strong(strength)) { |
301 // Call the runtime on anything that is converted in the semantics, since | 304 // Call the runtime on anything that is converted in the semantics, since |
302 // we need to throw a TypeError. Smis have already been ruled out. | 305 // we need to throw a TypeError. Smis have already been ruled out. |
303 __ Branch(&return_equal, eq, t4, Operand(HEAP_NUMBER_TYPE)); | 306 __ Branch(&return_equal, eq, t4, Operand(HEAP_NUMBER_TYPE)); |
304 __ And(t4, t4, Operand(kIsNotStringMask)); | 307 __ And(t4, t4, Operand(kIsNotStringMask)); |
305 __ Branch(slow, ne, t4, Operand(zero_reg)); | 308 __ Branch(slow, ne, t4, Operand(zero_reg)); |
306 } | 309 } |
307 } else { | 310 } else { |
| 311 Label not_simd; |
308 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE)); | 312 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE)); |
309 // Comparing JS objects with <=, >= is complicated. | 313 // Comparing JS objects with <=, >= is complicated. |
310 if (cc != eq) { | 314 if (cc != eq) { |
311 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE)); | 315 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE)); |
312 // Call runtime on identical symbols since we need to throw a TypeError. | 316 // Call runtime on identical symbols since we need to throw a TypeError. |
313 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE)); | 317 __ Branch(slow, eq, t4, Operand(SYMBOL_TYPE)); |
314 // Call runtime on identical SIMD values since we must throw a TypeError. | 318 // Call runtime on identical SIMD values since we must throw a TypeError. |
315 __ Branch(slow, eq, t4, Operand(FLOAT32X4_TYPE)); | 319 __ Branch(¬_simd, lt, t4, Operand(FIRST_SIMD_VALUE_TYPE)); |
| 320 __ Branch(slow, le, t4, Operand(LAST_SIMD_VALUE_TYPE)); |
| 321 __ bind(¬_simd); |
316 if (is_strong(strength)) { | 322 if (is_strong(strength)) { |
317 // Call the runtime on anything that is converted in the semantics, | 323 // Call the runtime on anything that is converted in the semantics, |
318 // since we need to throw a TypeError. Smis and heap numbers have | 324 // since we need to throw a TypeError. Smis and heap numbers have |
319 // already been ruled out. | 325 // already been ruled out. |
320 __ And(t4, t4, Operand(kIsNotStringMask)); | 326 __ And(t4, t4, Operand(kIsNotStringMask)); |
321 __ Branch(slow, ne, t4, Operand(zero_reg)); | 327 __ Branch(slow, ne, t4, Operand(zero_reg)); |
322 } | 328 } |
323 // Normally here we fall through to return_equal, but undefined is | 329 // Normally here we fall through to return_equal, but undefined is |
324 // special: (undefined == undefined) == true, but | 330 // special: (undefined == undefined) == true, but |
325 // (undefined <= undefined) == false! See ECMAScript 11.8.5. | 331 // (undefined <= undefined) == false! See ECMAScript 11.8.5. |
(...skipping 4940 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5266 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 5272 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
5267 | 5273 |
5268 __ bind(&fast_elements_case); | 5274 __ bind(&fast_elements_case); |
5269 GenerateCase(masm, FAST_ELEMENTS); | 5275 GenerateCase(masm, FAST_ELEMENTS); |
5270 } | 5276 } |
5271 | 5277 |
5272 | 5278 |
5273 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5279 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { |
5274 Register context_reg = cp; | 5280 Register context_reg = cp; |
5275 Register slot_reg = a2; | 5281 Register slot_reg = a2; |
| 5282 Register name_reg = a3; |
5276 Register result_reg = v0; | 5283 Register result_reg = v0; |
5277 Label slow_case; | 5284 Label slow_case; |
5278 | 5285 |
5279 // Go up context chain to the script context. | 5286 // Go up context chain to the script context. |
5280 for (int i = 0; i < depth(); ++i) { | 5287 for (int i = 0; i < depth(); ++i) { |
5281 __ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); | 5288 __ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); |
5282 context_reg = result_reg; | 5289 context_reg = result_reg; |
5283 } | 5290 } |
5284 | 5291 |
5285 // Load the PropertyCell value at the specified slot. | 5292 // Load the PropertyCell value at the specified slot. |
5286 __ sll(at, slot_reg, kPointerSizeLog2); | 5293 __ sll(at, slot_reg, kPointerSizeLog2); |
5287 __ Addu(at, at, Operand(context_reg)); | 5294 __ Addu(at, at, Operand(context_reg)); |
5288 __ lw(result_reg, ContextOperand(at, 0)); | 5295 __ Addu(at, at, Context::SlotOffset(0)); |
| 5296 __ lw(result_reg, MemOperand(at)); |
5289 __ lw(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset)); | 5297 __ lw(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset)); |
5290 | 5298 |
5291 // Check that value is not the_hole. | 5299 // Check that value is not the_hole. |
5292 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 5300 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
5293 __ Branch(&slow_case, eq, result_reg, Operand(at)); | 5301 __ Branch(&slow_case, eq, result_reg, Operand(at)); |
5294 __ Ret(); | 5302 __ Ret(); |
5295 | 5303 |
5296 // Fallback to the runtime. | 5304 // Fallback to the runtime. |
5297 __ bind(&slow_case); | 5305 __ bind(&slow_case); |
5298 __ SmiTag(slot_reg); | 5306 __ SmiTag(slot_reg); |
5299 __ Push(slot_reg); | 5307 __ Push(slot_reg, name_reg); |
5300 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1); | 5308 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); |
5301 } | 5309 } |
5302 | 5310 |
5303 | 5311 |
5304 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { | 5312 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
5305 Register context_reg = cp; | 5313 Register context_reg = cp; |
5306 Register slot_reg = a2; | 5314 Register slot_reg = a2; |
| 5315 Register name_reg = a3; |
5307 Register value_reg = a0; | 5316 Register value_reg = a0; |
5308 Register cell_reg = t0; | 5317 Register cell_reg = t0; |
5309 Register cell_value_reg = t1; | 5318 Register cell_details_reg = t1; |
5310 Register cell_details_reg = t2; | |
5311 Label fast_heapobject_case, fast_smi_case, slow_case; | 5319 Label fast_heapobject_case, fast_smi_case, slow_case; |
5312 | 5320 |
5313 if (FLAG_debug_code) { | 5321 if (FLAG_debug_code) { |
5314 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); | 5322 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
5315 __ Check(ne, kUnexpectedValue, value_reg, Operand(at)); | 5323 __ Check(ne, kUnexpectedValue, value_reg, Operand(at)); |
| 5324 __ AssertName(name_reg); |
5316 } | 5325 } |
5317 | 5326 |
5318 // Go up context chain to the script context. | 5327 // Go up context chain to the script context. |
5319 for (int i = 0; i < depth(); ++i) { | 5328 for (int i = 0; i < depth(); ++i) { |
5320 __ lw(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); | 5329 __ lw(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); |
5321 context_reg = cell_reg; | 5330 context_reg = cell_reg; |
5322 } | 5331 } |
5323 | 5332 |
5324 // Load the PropertyCell at the specified slot. | 5333 // Load the PropertyCell at the specified slot. |
5325 __ sll(at, slot_reg, kPointerSizeLog2); | 5334 __ sll(at, slot_reg, kPointerSizeLog2); |
5326 __ Addu(at, at, Operand(context_reg)); | 5335 __ Addu(at, at, Operand(context_reg)); |
5327 __ lw(cell_reg, ContextOperand(at, 0)); | 5336 __ Addu(at, at, Context::SlotOffset(0)); |
| 5337 __ lw(cell_reg, MemOperand(at)); |
5328 | 5338 |
5329 // Load PropertyDetails for the cell (actually only the cell_type and kind). | 5339 // Load PropertyDetails for the cell (actually only the cell_type and kind). |
5330 __ lw(cell_details_reg, | 5340 __ lw(cell_details_reg, |
5331 FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset)); | 5341 FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset)); |
5332 __ SmiUntag(cell_details_reg); | 5342 __ SmiUntag(cell_details_reg); |
5333 __ And(cell_details_reg, cell_details_reg, | 5343 __ And(cell_details_reg, cell_details_reg, |
5334 PropertyDetails::PropertyCellTypeField::kMask | | 5344 PropertyDetails::PropertyCellTypeField::kMask | |
5335 PropertyDetails::KindField::kMask | | 5345 PropertyDetails::KindField::kMask); |
5336 PropertyDetails::kAttributesReadOnlyMask); | |
5337 | 5346 |
5338 // Check if PropertyCell holds mutable data. | 5347 // Check if PropertyCell holds mutable data. |
5339 Label not_mutable_data; | 5348 Label not_mutable_data; |
5340 __ Branch(¬_mutable_data, ne, cell_details_reg, | 5349 __ Branch(¬_mutable_data, ne, cell_details_reg, |
5341 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5350 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5342 PropertyCellType::kMutable) | | 5351 PropertyCellType::kMutable) | |
5343 PropertyDetails::KindField::encode(kData))); | 5352 PropertyDetails::KindField::encode(kData))); |
5344 __ JumpIfSmi(value_reg, &fast_smi_case); | 5353 __ JumpIfSmi(value_reg, &fast_smi_case); |
5345 __ bind(&fast_heapobject_case); | 5354 __ bind(&fast_heapobject_case); |
5346 __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); | 5355 __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5347 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, | 5356 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, |
5348 cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs, | 5357 cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs, |
5349 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 5358 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
5350 // RecordWriteField clobbers the value register, so we need to reload. | 5359 // RecordWriteField clobbers the value register, so we need to reload. |
5351 __ Ret(USE_DELAY_SLOT); | |
5352 __ lw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); | 5360 __ lw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
| 5361 __ Ret(); |
5353 __ bind(¬_mutable_data); | 5362 __ bind(¬_mutable_data); |
5354 | 5363 |
5355 // Check if PropertyCell value matches the new value (relevant for Constant, | 5364 // Check if PropertyCell value matches the new value (relevant for Constant, |
5356 // ConstantType and Undefined cells). | 5365 // ConstantType and Undefined cells). |
5357 Label not_same_value; | 5366 Label not_same_value; |
5358 __ lw(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); | 5367 __ lw(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5359 __ Branch(¬_same_value, ne, value_reg, Operand(cell_value_reg)); | 5368 __ Branch(¬_same_value, ne, value_reg, Operand(at)); |
5360 // Make sure the PropertyCell is not marked READ_ONLY. | |
5361 __ And(at, cell_details_reg, PropertyDetails::kAttributesReadOnlyMask); | |
5362 __ Branch(&slow_case, ne, at, Operand(zero_reg)); | |
5363 if (FLAG_debug_code) { | 5369 if (FLAG_debug_code) { |
5364 Label done; | 5370 Label done; |
5365 // This can only be true for Constant, ConstantType and Undefined cells, | 5371 // This can only be true for Constant, ConstantType and Undefined cells, |
5366 // because we never store the_hole via this stub. | 5372 // because we never store the_hole via this stub. |
5367 __ Branch(&done, eq, cell_details_reg, | 5373 __ Branch(&done, eq, cell_details_reg, |
5368 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5374 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5369 PropertyCellType::kConstant) | | 5375 PropertyCellType::kConstant) | |
5370 PropertyDetails::KindField::encode(kData))); | 5376 PropertyDetails::KindField::encode(kData))); |
5371 __ Branch(&done, eq, cell_details_reg, | 5377 __ Branch(&done, eq, cell_details_reg, |
5372 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5378 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5373 PropertyCellType::kConstantType) | | 5379 PropertyCellType::kConstantType) | |
5374 PropertyDetails::KindField::encode(kData))); | 5380 PropertyDetails::KindField::encode(kData))); |
5375 __ Check(eq, kUnexpectedValue, cell_details_reg, | 5381 __ Check(eq, kUnexpectedValue, cell_details_reg, |
5376 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5382 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5377 PropertyCellType::kUndefined) | | 5383 PropertyCellType::kUndefined) | |
5378 PropertyDetails::KindField::encode(kData))); | 5384 PropertyDetails::KindField::encode(kData))); |
5379 __ bind(&done); | 5385 __ bind(&done); |
5380 } | 5386 } |
5381 __ Ret(); | 5387 __ Ret(); |
5382 __ bind(¬_same_value); | 5388 __ bind(¬_same_value); |
5383 | 5389 |
5384 // Check if PropertyCell contains data with constant type (and is not | 5390 // Check if PropertyCell contains data with constant type. |
5385 // READ_ONLY). | |
5386 __ Branch(&slow_case, ne, cell_details_reg, | 5391 __ Branch(&slow_case, ne, cell_details_reg, |
5387 Operand(PropertyDetails::PropertyCellTypeField::encode( | 5392 Operand(PropertyDetails::PropertyCellTypeField::encode( |
5388 PropertyCellType::kConstantType) | | 5393 PropertyCellType::kConstantType) | |
5389 PropertyDetails::KindField::encode(kData))); | 5394 PropertyDetails::KindField::encode(kData))); |
5390 | 5395 |
5391 // Now either both old and new values must be SMIs or both must be heap | 5396 // Now either both old and new values must be SMIs or both must be heap |
5392 // objects with same map. | 5397 // objects with same map. |
5393 Label value_is_heap_object; | 5398 Label value_is_heap_object; |
| 5399 Register cell_value_reg = cell_details_reg; |
| 5400 __ lw(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5394 __ JumpIfNotSmi(value_reg, &value_is_heap_object); | 5401 __ JumpIfNotSmi(value_reg, &value_is_heap_object); |
5395 __ JumpIfNotSmi(cell_value_reg, &slow_case); | 5402 __ JumpIfNotSmi(cell_value_reg, &slow_case); |
5396 // Old and new values are SMIs, no need for a write barrier here. | 5403 // Old and new values are SMIs, no need for a write barrier here. |
5397 __ bind(&fast_smi_case); | 5404 __ bind(&fast_smi_case); |
5398 __ Ret(USE_DELAY_SLOT); | 5405 __ Ret(USE_DELAY_SLOT); |
5399 __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); | 5406 __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
5400 __ bind(&value_is_heap_object); | 5407 __ bind(&value_is_heap_object); |
5401 __ JumpIfSmi(cell_value_reg, &slow_case); | 5408 __ JumpIfSmi(cell_value_reg, &slow_case); |
5402 Register cell_value_map_reg = cell_value_reg; | 5409 Register cell_value_map_reg = cell_value_reg; |
5403 __ lw(cell_value_map_reg, | 5410 __ lw(cell_value_map_reg, |
5404 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); | 5411 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); |
5405 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, | 5412 __ Branch(&fast_heapobject_case, eq, cell_value_map_reg, |
5406 FieldMemOperand(value_reg, HeapObject::kMapOffset)); | 5413 FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
5407 | 5414 |
5408 // Fallback to the runtime. | 5415 // Fallback to the runtime. |
5409 __ bind(&slow_case); | 5416 __ bind(&slow_case); |
5410 __ SmiTag(slot_reg); | 5417 __ SmiTag(slot_reg); |
5411 __ Push(slot_reg, value_reg); | 5418 __ Push(slot_reg, name_reg, value_reg); |
5412 __ TailCallRuntime(is_strict(language_mode()) | 5419 __ TailCallRuntime(is_strict(language_mode()) |
5413 ? Runtime::kStoreGlobalViaContext_Strict | 5420 ? Runtime::kStoreGlobalViaContext_Strict |
5414 : Runtime::kStoreGlobalViaContext_Sloppy, | 5421 : Runtime::kStoreGlobalViaContext_Sloppy, |
5415 2, 1); | 5422 3, 1); |
5416 } | 5423 } |
5417 | 5424 |
5418 | 5425 |
5419 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5426 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
5420 return ref0.address() - ref1.address(); | 5427 return ref0.address() - ref1.address(); |
5421 } | 5428 } |
5422 | 5429 |
5423 | 5430 |
5424 // Calls an API function. Allocates HandleScope, extracts returned value | 5431 // Calls an API function. Allocates HandleScope, extracts returned value |
5425 // from handle and propagates exceptions. Restores context. stack_space | 5432 // from handle and propagates exceptions. Restores context. stack_space |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5718 MemOperand(fp, 6 * kPointerSize), NULL); | 5725 MemOperand(fp, 6 * kPointerSize), NULL); |
5719 } | 5726 } |
5720 | 5727 |
5721 | 5728 |
5722 #undef __ | 5729 #undef __ |
5723 | 5730 |
5724 } // namespace internal | 5731 } // namespace internal |
5725 } // namespace v8 | 5732 } // namespace v8 |
5726 | 5733 |
5727 #endif // V8_TARGET_ARCH_MIPS | 5734 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |