| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2746 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2757 // Block constant pool emission to ensure the positions of instructions are | 2757 // Block constant pool emission to ensure the positions of instructions are |
| 2758 // as expected by the patcher. See InstanceofStub::Generate(). | 2758 // as expected by the patcher. See InstanceofStub::Generate(). |
| 2759 Assembler::BlockConstPoolScope block_const_pool(masm()); | 2759 Assembler::BlockConstPoolScope block_const_pool(masm()); |
| 2760 __ bind(deferred->map_check()); // Label for calculating code patching. | 2760 __ bind(deferred->map_check()); // Label for calculating code patching. |
| 2761 // We use Factory::the_hole_value() on purpose instead of loading from the | 2761 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 2762 // root array to force relocation to be able to later patch with | 2762 // root array to force relocation to be able to later patch with |
| 2763 // the cached map. | 2763 // the cached map. |
| 2764 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize); | 2764 PredictableCodeSizeScope predictable(masm_, 5 * Assembler::kInstrSize); |
| 2765 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); | 2765 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); |
| 2766 __ mov(ip, Operand(Handle<Object>(cell))); | 2766 __ mov(ip, Operand(Handle<Object>(cell))); |
| 2767 __ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset)); | 2767 __ ldr(ip, FieldMemOperand(ip, PropertyCell::kValueOffset)); |
| 2768 __ cmp(map, Operand(ip)); | 2768 __ cmp(map, Operand(ip)); |
| 2769 __ b(ne, &cache_miss); | 2769 __ b(ne, &cache_miss); |
| 2770 // We use Factory::the_hole_value() on purpose instead of loading from the | 2770 // We use Factory::the_hole_value() on purpose instead of loading from the |
| 2771 // root array to force relocation to be able to later patch | 2771 // root array to force relocation to be able to later patch |
| 2772 // with true or false. | 2772 // with true or false. |
| 2773 __ mov(result, Operand(factory()->the_hole_value())); | 2773 __ mov(result, Operand(factory()->the_hole_value())); |
| 2774 } | 2774 } |
| 2775 __ b(&done); | 2775 __ b(&done); |
| 2776 | 2776 |
| 2777 // The inlined call site cache did not match. Check null and string before | 2777 // The inlined call site cache did not match. Check null and string before |
| (...skipping 2468 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5246 } | 5246 } |
| 5247 } | 5247 } |
| 5248 | 5248 |
| 5249 | 5249 |
| 5250 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { | 5250 void LCodeGen::DoCheckFunction(LCheckFunction* instr) { |
| 5251 Register reg = ToRegister(instr->value()); | 5251 Register reg = ToRegister(instr->value()); |
| 5252 Handle<JSFunction> target = instr->hydrogen()->target(); | 5252 Handle<JSFunction> target = instr->hydrogen()->target(); |
| 5253 AllowDeferredHandleDereference smi_check; | 5253 AllowDeferredHandleDereference smi_check; |
| 5254 if (isolate()->heap()->InNewSpace(*target)) { | 5254 if (isolate()->heap()->InNewSpace(*target)) { |
| 5255 Register reg = ToRegister(instr->value()); | 5255 Register reg = ToRegister(instr->value()); |
| 5256 Handle<Cell> cell = isolate()->factory()->NewJSGlobalPropertyCell(target); | 5256 Handle<Cell> cell = isolate()->factory()->NewPropertyCell(target); |
| 5257 __ mov(ip, Operand(Handle<Object>(cell))); | 5257 __ mov(ip, Operand(Handle<Object>(cell))); |
| 5258 __ ldr(ip, FieldMemOperand(ip, Cell::kValueOffset)); | 5258 __ ldr(ip, FieldMemOperand(ip, Cell::kValueOffset)); |
| 5259 __ cmp(reg, ip); | 5259 __ cmp(reg, ip); |
| 5260 } else { | 5260 } else { |
| 5261 __ cmp(reg, Operand(target)); | 5261 __ cmp(reg, Operand(target)); |
| 5262 } | 5262 } |
| 5263 DeoptimizeIf(ne, instr->environment()); | 5263 DeoptimizeIf(ne, instr->environment()); |
| 5264 } | 5264 } |
| 5265 | 5265 |
| 5266 | 5266 |
| (...skipping 613 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5880 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); | 5880 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index)); |
| 5881 __ ldr(result, FieldMemOperand(scratch, | 5881 __ ldr(result, FieldMemOperand(scratch, |
| 5882 FixedArray::kHeaderSize - kPointerSize)); | 5882 FixedArray::kHeaderSize - kPointerSize)); |
| 5883 __ bind(&done); | 5883 __ bind(&done); |
| 5884 } | 5884 } |
| 5885 | 5885 |
| 5886 | 5886 |
| 5887 #undef __ | 5887 #undef __ |
| 5888 | 5888 |
| 5889 } } // namespace v8::internal | 5889 } } // namespace v8::internal |
| OLD | NEW |