OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2423 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2434 Register map = temp; | 2434 Register map = temp; |
2435 __ lw(map, FieldMemOperand(object, HeapObject::kMapOffset)); | 2435 __ lw(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
2436 | 2436 |
2437 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 2437 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
2438 __ bind(deferred->map_check()); // Label for calculating code patching. | 2438 __ bind(deferred->map_check()); // Label for calculating code patching. |
2439 // We use Factory::the_hole_value() on purpose instead of loading from the | 2439 // We use Factory::the_hole_value() on purpose instead of loading from the |
2440 // root array to force relocation to be able to later patch with | 2440 // root array to force relocation to be able to later patch with |
2441 // the cached map. | 2441 // the cached map. |
2442 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); | 2442 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); |
2443 __ li(at, Operand(Handle<Object>(cell))); | 2443 __ li(at, Operand(Handle<Object>(cell))); |
2444 __ lw(at, FieldMemOperand(at, JSGlobalPropertyCell::kValueOffset)); | 2444 __ lw(at, FieldMemOperand(at, PropertyCell::kValueOffset)); |
2445 __ Branch(&cache_miss, ne, map, Operand(at)); | 2445 __ Branch(&cache_miss, ne, map, Operand(at)); |
2446 // We use Factory::the_hole_value() on purpose instead of loading from the | 2446 // We use Factory::the_hole_value() on purpose instead of loading from the |
2447 // root array to force relocation to be able to later patch | 2447 // root array to force relocation to be able to later patch |
2448 // with true or false. | 2448 // with true or false. |
2449 __ li(result, Operand(factory()->the_hole_value()), CONSTANT_SIZE); | 2449 __ li(result, Operand(factory()->the_hole_value()), CONSTANT_SIZE); |
2450 __ Branch(&done); | 2450 __ Branch(&done); |
2451 | 2451 |
2452 // The inlined call site cache did not match. Check null and string before | 2452 // The inlined call site cache did not match. Check null and string before |
2453 // calling the deferred code. | 2453 // calling the deferred code. |
2454 __ bind(&cache_miss); | 2454 __ bind(&cache_miss); |
(...skipping 2555 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5010 } | 5010 } |
5011 } | 5011 } |
5012 | 5012 |
5013 | 5013 |
5014 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { | 5014 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { |
5015 Register reg = ToRegister(instr->value()); | 5015 Register reg = ToRegister(instr->value()); |
5016 Handle<JSFunction> target = instr->hydrogen()->target(); | 5016 Handle<JSFunction> target = instr->hydrogen()->target(); |
5017 AllowDeferredHandleDereference smi_check; | 5017 AllowDeferredHandleDereference smi_check; |
5018 if (isolate()->heap()->InNewSpace(*target)) { | 5018 if (isolate()->heap()->InNewSpace(*target)) { |
5019 Register reg = ToRegister(instr->value()); | 5019 Register reg = ToRegister(instr->value()); |
5020 Handle<Cell> cell = isolate()->factory()->NewJSGlobalPropertyCell(target); | 5020 Handle<Cell> cell = isolate()->factory()->NewPropertyCell(target); |
5021 __ li(at, Operand(Handle<Object>(cell))); | 5021 __ li(at, Operand(Handle<Object>(cell))); |
5022 __ lw(at, FieldMemOperand(at, Cell::kValueOffset)); | 5022 __ lw(at, FieldMemOperand(at, Cell::kValueOffset)); |
5023 DeoptimizeIf(ne, instr->environment(), reg, | 5023 DeoptimizeIf(ne, instr->environment(), reg, |
5024 Operand(at)); | 5024 Operand(at)); |
5025 } else { | 5025 } else { |
5026 DeoptimizeIf(ne, instr->environment(), reg, | 5026 DeoptimizeIf(ne, instr->environment(), reg, |
5027 Operand(target)); | 5027 Operand(target)); |
5028 } | 5028 } |
5029 } | 5029 } |
5030 | 5030 |
(...skipping 650 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5681 __ Subu(scratch, result, scratch); | 5681 __ Subu(scratch, result, scratch); |
5682 __ lw(result, FieldMemOperand(scratch, | 5682 __ lw(result, FieldMemOperand(scratch, |
5683 FixedArray::kHeaderSize - kPointerSize)); | 5683 FixedArray::kHeaderSize - kPointerSize)); |
5684 __ bind(&done); | 5684 __ bind(&done); |
5685 } | 5685 } |
5686 | 5686 |
5687 | 5687 |
5688 #undef __ | 5688 #undef __ |
5689 | 5689 |
5690 } } // namespace v8::internal | 5690 } } // namespace v8::internal |
OLD | NEW |