OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 6305 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6316 | 6316 |
6317 | 6317 |
6318 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { | 6318 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) { |
6319 Register object = ToRegister(instr->value()); | 6319 Register object = ToRegister(instr->value()); |
6320 __ cmp(ToRegister(instr->map()), | 6320 __ cmp(ToRegister(instr->map()), |
6321 FieldOperand(object, HeapObject::kMapOffset)); | 6321 FieldOperand(object, HeapObject::kMapOffset)); |
6322 DeoptimizeIf(not_equal, instr->environment()); | 6322 DeoptimizeIf(not_equal, instr->environment()); |
6323 } | 6323 } |
6324 | 6324 |
6325 | 6325 |
| 6326 void LCodeGen::DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, |
| 6327 Register object, |
| 6328 Register index) { |
| 6329 PushSafepointRegistersScope scope(this); |
| 6330 __ push(object); |
| 6331 __ push(index); |
| 6332 __ xor_(esi, esi); |
| 6333 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble); |
| 6334 RecordSafepointWithRegisters( |
| 6335 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt); |
| 6336 __ StoreToSafepointRegisterSlot(object, eax); |
| 6337 } |
| 6338 |
| 6339 |
6326 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { | 6340 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) { |
| 6341 class DeferredLoadMutableDouble V8_FINAL : public LDeferredCode { |
| 6342 public: |
| 6343 DeferredLoadMutableDouble(LCodeGen* codegen, |
| 6344 LLoadFieldByIndex* instr, |
| 6345 Register object, |
| 6346 Register index, |
| 6347 const X87Stack& x87_stack) |
| 6348 : LDeferredCode(codegen, x87_stack), |
| 6349 instr_(instr), |
| 6350 object_(object), |
| 6351 index_(index) { |
| 6352 } |
| 6353 virtual void Generate() V8_OVERRIDE { |
| 6354 codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_); |
| 6355 } |
| 6356 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 6357 private: |
| 6358 LLoadFieldByIndex* instr_; |
| 6359 Register object_; |
| 6360 Register index_; |
| 6361 }; |
| 6362 |
6327 Register object = ToRegister(instr->object()); | 6363 Register object = ToRegister(instr->object()); |
6328 Register index = ToRegister(instr->index()); | 6364 Register index = ToRegister(instr->index()); |
6329 | 6365 |
| 6366 DeferredLoadMutableDouble* deferred; |
| 6367 deferred = new(zone()) DeferredLoadMutableDouble( |
| 6368 this, instr, object, index, x87_stack_); |
| 6369 |
6330 Label out_of_object, done; | 6370 Label out_of_object, done; |
| 6371 __ test(index, Immediate(Smi::FromInt(1))); |
| 6372 __ j(not_zero, deferred->entry()); |
| 6373 |
| 6374 __ sar(index, 1); |
| 6375 |
6331 __ cmp(index, Immediate(0)); | 6376 __ cmp(index, Immediate(0)); |
6332 __ j(less, &out_of_object, Label::kNear); | 6377 __ j(less, &out_of_object, Label::kNear); |
6333 __ mov(object, FieldOperand(object, | 6378 __ mov(object, FieldOperand(object, |
6334 index, | 6379 index, |
6335 times_half_pointer_size, | 6380 times_half_pointer_size, |
6336 JSObject::kHeaderSize)); | 6381 JSObject::kHeaderSize)); |
6337 __ jmp(&done, Label::kNear); | 6382 __ jmp(&done, Label::kNear); |
6338 | 6383 |
6339 __ bind(&out_of_object); | 6384 __ bind(&out_of_object); |
6340 __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset)); | 6385 __ mov(object, FieldOperand(object, JSObject::kPropertiesOffset)); |
6341 __ neg(index); | 6386 __ neg(index); |
6342 // Index is now equal to out of object property index plus 1. | 6387 // Index is now equal to out of object property index plus 1. |
6343 __ mov(object, FieldOperand(object, | 6388 __ mov(object, FieldOperand(object, |
6344 index, | 6389 index, |
6345 times_half_pointer_size, | 6390 times_half_pointer_size, |
6346 FixedArray::kHeaderSize - kPointerSize)); | 6391 FixedArray::kHeaderSize - kPointerSize)); |
| 6392 __ bind(deferred->exit()); |
6347 __ bind(&done); | 6393 __ bind(&done); |
6348 } | 6394 } |
6349 | 6395 |
6350 | 6396 |
6351 #undef __ | 6397 #undef __ |
6352 | 6398 |
6353 } } // namespace v8::internal | 6399 } } // namespace v8::internal |
6354 | 6400 |
6355 #endif // V8_TARGET_ARCH_IA32 | 6401 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |