| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 44 | 44 |
| 45 | 45 |
| 46 void FastCodeGenerator::EmitLoadReceiver() { | 46 void FastCodeGenerator::EmitLoadReceiver() { |
| 47 // Offset 2 is due to return address and saved frame pointer. | 47 // Offset 2 is due to return address and saved frame pointer. |
| 48 int index = 2 + scope()->num_parameters(); | 48 int index = 2 + scope()->num_parameters(); |
| 49 __ ldr(receiver_reg(), MemOperand(sp, index * kPointerSize)); | 49 __ ldr(receiver_reg(), MemOperand(sp, index * kPointerSize)); |
| 50 } | 50 } |
| 51 | 51 |
| 52 | 52 |
| 53 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) { | 53 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) { |
| 54 ASSERT(!destination().is(no_reg)); |
| 54 ASSERT(cell->IsJSGlobalPropertyCell()); | 55 ASSERT(cell->IsJSGlobalPropertyCell()); |
| 55 __ mov(accumulator0(), Operand(cell)); | 56 |
| 56 __ ldr(accumulator0(), | 57 __ mov(destination(), Operand(cell)); |
| 57 FieldMemOperand(accumulator0(), JSGlobalPropertyCell::kValueOffset)); | 58 __ ldr(destination(), |
| 59 FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset)); |
| 58 if (FLAG_debug_code) { | 60 if (FLAG_debug_code) { |
| 59 __ mov(ip, Operand(Factory::the_hole_value())); | 61 __ mov(ip, Operand(Factory::the_hole_value())); |
| 60 __ cmp(accumulator0(), ip); | 62 __ cmp(destination(), ip); |
| 61 __ Check(ne, "DontDelete cells can't contain the hole"); | 63 __ Check(ne, "DontDelete cells can't contain the hole"); |
| 62 } | 64 } |
| 63 } | 65 } |
| 64 | 66 |
| 65 | 67 |
| 66 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { | 68 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { |
| 67 LookupResult lookup; | 69 LookupResult lookup; |
| 68 info()->receiver()->Lookup(*name, &lookup); | 70 info()->receiver()->Lookup(*name, &lookup); |
| 69 | 71 |
| 70 ASSERT(lookup.holder() == *info()->receiver()); | 72 ASSERT(lookup.holder() == *info()->receiver()); |
| 71 ASSERT(lookup.type() == FIELD); | 73 ASSERT(lookup.type() == FIELD); |
| 72 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); | 74 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
| 73 int index = lookup.GetFieldIndex() - map->inobject_properties(); | 75 int index = lookup.GetFieldIndex() - map->inobject_properties(); |
| 74 int offset = index * kPointerSize; | 76 int offset = index * kPointerSize; |
| 75 | 77 |
| 76 // Negative offsets are inobject properties. | 78 // Negative offsets are inobject properties. |
| 77 if (offset < 0) { | 79 if (offset < 0) { |
| 78 offset += map->instance_size(); | 80 offset += map->instance_size(); |
| 79 __ mov(scratch0(), receiver_reg()); // Copy receiver for write barrier. | 81 __ mov(scratch0(), receiver_reg()); // Copy receiver for write barrier. |
| 80 } else { | 82 } else { |
| 81 offset += FixedArray::kHeaderSize; | 83 offset += FixedArray::kHeaderSize; |
| 82 __ ldr(scratch0(), | 84 __ ldr(scratch0(), |
| 83 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); | 85 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
| 84 } | 86 } |
| 85 // Perform the store. | 87 // Perform the store. |
| 86 __ str(accumulator0(), FieldMemOperand(scratch0(), offset)); | 88 __ str(accumulator0(), FieldMemOperand(scratch0(), offset)); |
| 87 __ mov(scratch1(), Operand(offset)); | 89 __ mov(scratch1(), Operand(offset)); |
| 88 __ RecordWrite(scratch0(), scratch1(), ip); | 90 __ RecordWrite(scratch0(), scratch1(), ip); |
| 91 if (destination().is(accumulator1())) { |
| 92 __ mov(accumulator1(), accumulator0()); |
| 93 } |
| 89 } | 94 } |
| 90 | 95 |
| 91 | 96 |
| 97 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) { |
| 98 ASSERT(!destination().is(no_reg)); |
| 99 LookupResult lookup; |
| 100 info()->receiver()->Lookup(*name, &lookup); |
| 101 |
| 102 ASSERT(lookup.holder() == *info()->receiver()); |
| 103 ASSERT(lookup.type() == FIELD); |
| 104 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
| 105 int index = lookup.GetFieldIndex() - map->inobject_properties(); |
| 106 int offset = index * kPointerSize; |
| 107 |
| 108 // Perform the load. Negative offsets are inobject properties. |
| 109 if (offset < 0) { |
| 110 offset += map->instance_size(); |
| 111 __ ldr(destination(), FieldMemOperand(receiver_reg(), offset)); |
| 112 } else { |
| 113 offset += FixedArray::kHeaderSize; |
| 114 __ ldr(scratch0(), |
| 115 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
| 116 __ ldr(destination(), FieldMemOperand(scratch0(), offset)); |
| 117 } |
| 118 } |
| 119 |
| 120 |
| 121 void FastCodeGenerator::EmitBitOr() { |
| 122 Register check; // A register is used for the smi check/operation. |
| 123 if (destination().is(no_reg)) { |
| 124 check = scratch0(); // Do not clobber either operand register. |
| 125 } else { |
| 126 // Preserve whichever operand shares the destination register in case we |
| 127 // have to bail out. |
| 128 __ mov(scratch0(), destination()); |
| 129 check = destination(); |
| 130 } |
| 131 __ orr(check, accumulator1(), Operand(accumulator0())); |
| 132 // Restore the clobbered operand if necessary. |
| 133 if (destination().is(no_reg)) { |
| 134 __ BranchOnNotSmi(check, bailout()); |
| 135 } else { |
| 136 Label done; |
| 137 __ BranchOnSmi(check, &done); |
| 138 __ mov(destination(), scratch0()); |
| 139 __ jmp(bailout()); |
| 140 __ bind(&done); |
| 141 } |
| 142 } |
| 143 |
| 144 |
| 92 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { | 145 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { |
| 93 ASSERT(info_ == NULL); | 146 ASSERT(info_ == NULL); |
| 94 info_ = compilation_info; | 147 info_ = compilation_info; |
| 95 | 148 |
| 96 // Save the caller's frame pointer and set up our own. | 149 // Save the caller's frame pointer and set up our own. |
| 97 Comment prologue_cmnt(masm(), ";; Prologue"); | 150 Comment prologue_cmnt(masm(), ";; Prologue"); |
| 98 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); | 151 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); |
| 99 __ add(fp, sp, Operand(2 * kPointerSize)); | 152 __ add(fp, sp, Operand(2 * kPointerSize)); |
| 100 // Note that we keep a live register reference to cp (context) at | 153 // Note that we keep a live register reference to cp (context) at |
| 101 // this point. | 154 // this point. |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 140 __ Jump(lr); | 193 __ Jump(lr); |
| 141 | 194 |
| 142 __ bind(&bailout_); | 195 __ bind(&bailout_); |
| 143 } | 196 } |
| 144 | 197 |
| 145 | 198 |
| 146 #undef __ | 199 #undef __ |
| 147 | 200 |
| 148 | 201 |
| 149 } } // namespace v8::internal | 202 } } // namespace v8::internal |
| OLD | NEW |