OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_MIPS | 7 #if V8_TARGET_ARCH_MIPS |
8 | 8 |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
(...skipping 5259 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5270 | 5270 |
5271 Label fast_elements_case; | 5271 Label fast_elements_case; |
5272 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); | 5272 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); |
5273 GenerateCase(masm, FAST_HOLEY_ELEMENTS); | 5273 GenerateCase(masm, FAST_HOLEY_ELEMENTS); |
5274 | 5274 |
5275 __ bind(&fast_elements_case); | 5275 __ bind(&fast_elements_case); |
5276 GenerateCase(masm, FAST_ELEMENTS); | 5276 GenerateCase(masm, FAST_ELEMENTS); |
5277 } | 5277 } |
5278 | 5278 |
5279 | 5279 |
| 5280 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5281 Register context_reg = cp; |
| 5282 Register slot_reg = a2; |
| 5283 Register name_reg = a3; |
| 5284 Register result_reg = v0; |
| 5285 Label slow_case; |
| 5286 |
| 5287 // Go up context chain to the script context. |
| 5288 for (int i = 0; i < depth(); ++i) { |
| 5289 __ lw(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); |
| 5290 context_reg = result_reg; |
| 5291 } |
| 5292 |
| 5293 // Load the PropertyCell value at the specified slot. |
| 5294 __ sll(at, slot_reg, kPointerSizeLog2); |
| 5295 __ Addu(at, at, Operand(cp)); |
| 5296 __ Addu(at, at, Context::SlotOffset(0)); |
| 5297 __ lw(result_reg, MemOperand(at)); |
| 5298 __ lw(result_reg, FieldMemOperand(result_reg, PropertyCell::kValueOffset)); |
| 5299 |
| 5300 // Check that value is not the_hole. |
| 5301 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 5302 __ Branch(&slow_case, eq, result_reg, Operand(at)); |
| 5303 __ Ret(); |
| 5304 |
| 5305 // Fallback to the runtime. |
| 5306 __ bind(&slow_case); |
| 5307 __ SmiTag(slot_reg); |
| 5308 __ Drop(1); // Pop return address. |
| 5309 __ Push(slot_reg, name_reg, result_reg); |
| 5310 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 2, 1); |
| 5311 } |
| 5312 |
| 5313 |
| 5314 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { |
| 5315 Register context_reg = cp; |
| 5316 Register slot_reg = a2; |
| 5317 Register name_reg = a3; |
| 5318 Register value_reg = a0; |
| 5319 Register cell_reg = t0; |
| 5320 Register cell_details_reg = t1; |
| 5321 Label fast_case, slow_case; |
| 5322 |
| 5323 if (FLAG_debug_code) { |
| 5324 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); |
| 5325 __ Check(ne, kUnexpectedValue, value_reg, Operand(at)); |
| 5326 __ AssertName(name_reg); |
| 5327 } |
| 5328 |
| 5329 // Go up context chain to the script context. |
| 5330 for (int i = 0; i < depth(); ++i) { |
| 5331 __ lw(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); |
| 5332 context_reg = cell_reg; |
| 5333 } |
| 5334 |
| 5335 // Load the PropertyCell at the specified slot. |
| 5336 __ sll(at, slot_reg, kPointerSizeLog2); |
| 5337 __ Addu(at, at, Operand(cp)); |
| 5338 __ Addu(at, at, Context::SlotOffset(0)); |
| 5339 __ lw(cell_reg, MemOperand(at)); |
| 5340 |
| 5341 // Load PropertyDetails for the cell (actually only the cell_type and kind). |
| 5342 __ lw(cell_details_reg, |
| 5343 FieldMemOperand(cell_reg, PropertyCell::kDetailsOffset)); |
| 5344 __ SmiUntag(cell_details_reg); |
| 5345 __ And(cell_details_reg, cell_details_reg, |
| 5346 PropertyDetails::PropertyCellTypeField::kMask | |
| 5347 PropertyDetails::KindField::kMask); |
| 5348 |
| 5349 // Check if PropertyCell holds mutable data. |
| 5350 Label not_mutable_data; |
| 5351 __ Branch(¬_mutable_data, ne, cell_details_reg, |
| 5352 Operand(PropertyDetails::PropertyCellTypeField::encode( |
| 5353 PropertyCellType::kMutable) | |
| 5354 PropertyDetails::KindField::encode(kData))); |
| 5355 __ bind(&fast_case); |
| 5356 __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
| 5357 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, |
| 5358 cell_details_reg, kRAHasNotBeenSaved, kDontSaveFPRegs); |
| 5359 // RecordWriteField clobbers the value register, so we need to reload. |
| 5360 __ lw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
| 5361 __ Ret(); |
| 5362 __ bind(¬_mutable_data); |
| 5363 |
| 5364 // Check if PropertyCell value matches the new value (relevant for Constant |
| 5365 // and ConstantType cells). |
| 5366 Label not_same_value; |
| 5367 __ lw(at, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
| 5368 __ Branch(¬_same_value, ne, value_reg, Operand(at)); |
| 5369 if (FLAG_debug_code) { |
| 5370 Label done; |
| 5371 // This can only be true for Constant and ConstantType cells, because we |
| 5372 // never store the_hole via this stub. |
| 5373 __ Branch(&done, eq, cell_details_reg, |
| 5374 Operand(PropertyDetails::PropertyCellTypeField::encode( |
| 5375 PropertyCellType::kConstant) | |
| 5376 PropertyDetails::KindField::encode(kData))); |
| 5377 __ Check(eq, kUnexpectedValue, cell_details_reg, |
| 5378 Operand(PropertyDetails::PropertyCellTypeField::encode( |
| 5379 PropertyCellType::kConstantType) | |
| 5380 PropertyDetails::KindField::encode(kData))); |
| 5381 __ bind(&done); |
| 5382 } |
| 5383 __ Ret(); |
| 5384 __ bind(¬_same_value); |
| 5385 |
| 5386 // Check if PropertyCell contains data with constant type. |
| 5387 __ Branch(&slow_case, ne, cell_details_reg, |
| 5388 Operand(PropertyDetails::PropertyCellTypeField::encode( |
| 5389 PropertyCellType::kConstantType) | |
| 5390 PropertyDetails::KindField::encode(kData))); |
| 5391 |
| 5392 // Now either both old and new values must be SMIs or both must be heap |
| 5393 // objects with same map. |
| 5394 Label value_is_heap_object; |
| 5395 Register cell_value_reg = cell_details_reg; |
| 5396 __ lw(cell_value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
| 5397 __ JumpIfNotSmi(value_reg, &value_is_heap_object); |
| 5398 __ JumpIfNotSmi(cell_value_reg, &slow_case); |
| 5399 // Old and new values are SMIs, no need for a write barrier here. |
| 5400 __ Ret(USE_DELAY_SLOT); |
| 5401 __ sw(value_reg, FieldMemOperand(cell_reg, PropertyCell::kValueOffset)); |
| 5402 __ bind(&value_is_heap_object); |
| 5403 __ JumpIfSmi(cell_value_reg, &slow_case); |
| 5404 Register cell_value_map_reg = cell_value_reg; |
| 5405 __ lw(cell_value_map_reg, |
| 5406 FieldMemOperand(cell_value_reg, HeapObject::kMapOffset)); |
| 5407 __ Branch(&fast_case, eq, cell_value_map_reg, |
| 5408 FieldMemOperand(value_reg, HeapObject::kMapOffset)); |
| 5409 |
| 5410 // Fallback to the runtime. |
| 5411 __ bind(&slow_case); |
| 5412 __ SmiTag(slot_reg); |
| 5413 __ Drop(1); // Pop return address. |
| 5414 __ Push(slot_reg, name_reg, value_reg, cell_reg); |
| 5415 __ TailCallRuntime(is_strict(language_mode()) |
| 5416 ? Runtime::kStoreGlobalViaContext_Strict |
| 5417 : Runtime::kStoreGlobalViaContext_Sloppy, |
| 5418 3, 1); |
| 5419 } |
| 5420 |
| 5421 |
5280 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { | 5422 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) { |
5281 return ref0.address() - ref1.address(); | 5423 return ref0.address() - ref1.address(); |
5282 } | 5424 } |
5283 | 5425 |
5284 | 5426 |
5285 // Calls an API function. Allocates HandleScope, extracts returned value | 5427 // Calls an API function. Allocates HandleScope, extracts returned value |
5286 // from handle and propagates exceptions. Restores context. stack_space | 5428 // from handle and propagates exceptions. Restores context. stack_space |
5287 // - space to be unwound on exit (includes the call JS arguments space and | 5429 // - space to be unwound on exit (includes the call JS arguments space and |
5288 // the additional space allocated for the fast call). | 5430 // the additional space allocated for the fast call). |
5289 static void CallApiFunctionAndReturn( | 5431 static void CallApiFunctionAndReturn( |
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5579 MemOperand(fp, 6 * kPointerSize), NULL); | 5721 MemOperand(fp, 6 * kPointerSize), NULL); |
5580 } | 5722 } |
5581 | 5723 |
5582 | 5724 |
5583 #undef __ | 5725 #undef __ |
5584 | 5726 |
5585 } // namespace internal | 5727 } // namespace internal |
5586 } // namespace v8 | 5728 } // namespace v8 |
5587 | 5729 |
5588 #endif // V8_TARGET_ARCH_MIPS | 5730 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |