| OLD | NEW |
| 1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 55 ASSERT(cell->IsJSGlobalPropertyCell()); | 55 ASSERT(cell->IsJSGlobalPropertyCell()); |
| 56 | 56 |
| 57 __ mov(destination(), Operand(cell)); | 57 __ mov(destination(), Operand(cell)); |
| 58 __ ldr(destination(), | 58 __ ldr(destination(), |
| 59 FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset)); | 59 FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset)); |
| 60 if (FLAG_debug_code) { | 60 if (FLAG_debug_code) { |
| 61 __ mov(ip, Operand(Factory::the_hole_value())); | 61 __ mov(ip, Operand(Factory::the_hole_value())); |
| 62 __ cmp(destination(), ip); | 62 __ cmp(destination(), ip); |
| 63 __ Check(ne, "DontDelete cells can't contain the hole"); | 63 __ Check(ne, "DontDelete cells can't contain the hole"); |
| 64 } | 64 } |
| 65 |
| 66 // The loaded value is not known to be a smi. |
| 67 clear_as_smi(destination()); |
| 65 } | 68 } |
| 66 | 69 |
| 67 | 70 |
| 68 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { | 71 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { |
| 69 LookupResult lookup; | 72 LookupResult lookup; |
| 70 info()->receiver()->Lookup(*name, &lookup); | 73 info()->receiver()->Lookup(*name, &lookup); |
| 71 | 74 |
| 72 ASSERT(lookup.holder() == *info()->receiver()); | 75 ASSERT(lookup.holder() == *info()->receiver()); |
| 73 ASSERT(lookup.type() == FIELD); | 76 ASSERT(lookup.type() == FIELD); |
| 74 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); | 77 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
| 75 int index = lookup.GetFieldIndex() - map->inobject_properties(); | 78 int index = lookup.GetFieldIndex() - map->inobject_properties(); |
| 76 int offset = index * kPointerSize; | 79 int offset = index * kPointerSize; |
| 77 | 80 |
| 81 // We will emit the write barrier unless the stored value is statically |
| 82 // known to be a smi. |
| 83 bool needs_write_barrier = !is_smi(accumulator0()); |
| 84 |
| 78 // Negative offsets are inobject properties. | 85 // Negative offsets are inobject properties. |
| 79 if (offset < 0) { | 86 if (offset < 0) { |
| 80 offset += map->instance_size(); | 87 offset += map->instance_size(); |
| 81 __ mov(scratch0(), receiver_reg()); // Copy receiver for write barrier. | 88 __ str(accumulator0(), FieldMemOperand(receiver_reg(), offset)); |
| 89 if (needs_write_barrier) { |
| 90 // Preserve receiver from write barrier. |
| 91 __ mov(scratch0(), receiver_reg()); |
| 92 } |
| 82 } else { | 93 } else { |
| 83 offset += FixedArray::kHeaderSize; | 94 offset += FixedArray::kHeaderSize; |
| 84 __ ldr(scratch0(), | 95 __ ldr(scratch0(), |
| 85 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); | 96 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
| 97 __ str(accumulator0(), FieldMemOperand(scratch0(), offset)); |
| 86 } | 98 } |
| 87 // Perform the store. | 99 |
| 88 __ str(accumulator0(), FieldMemOperand(scratch0(), offset)); | 100 if (needs_write_barrier) { |
| 89 __ mov(scratch1(), Operand(offset)); | 101 __ mov(scratch1(), Operand(offset)); |
| 90 __ RecordWrite(scratch0(), scratch1(), ip); | 102 __ RecordWrite(scratch0(), scratch1(), ip); |
| 103 } |
| 104 |
| 91 if (destination().is(accumulator1())) { | 105 if (destination().is(accumulator1())) { |
| 92 __ mov(accumulator1(), accumulator0()); | 106 __ mov(accumulator1(), accumulator0()); |
| 107 if (is_smi(accumulator0())) { |
| 108 set_as_smi(accumulator1()); |
| 109 } else { |
| 110 clear_as_smi(accumulator1()); |
| 111 } |
| 93 } | 112 } |
| 94 } | 113 } |
| 95 | 114 |
| 96 | 115 |
| 97 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) { | 116 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) { |
| 98 ASSERT(!destination().is(no_reg)); | 117 ASSERT(!destination().is(no_reg)); |
| 99 LookupResult lookup; | 118 LookupResult lookup; |
| 100 info()->receiver()->Lookup(*name, &lookup); | 119 info()->receiver()->Lookup(*name, &lookup); |
| 101 | 120 |
| 102 ASSERT(lookup.holder() == *info()->receiver()); | 121 ASSERT(lookup.holder() == *info()->receiver()); |
| 103 ASSERT(lookup.type() == FIELD); | 122 ASSERT(lookup.type() == FIELD); |
| 104 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); | 123 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
| 105 int index = lookup.GetFieldIndex() - map->inobject_properties(); | 124 int index = lookup.GetFieldIndex() - map->inobject_properties(); |
| 106 int offset = index * kPointerSize; | 125 int offset = index * kPointerSize; |
| 107 | 126 |
| 108 // Perform the load. Negative offsets are inobject properties. | 127 // Perform the load. Negative offsets are inobject properties. |
| 109 if (offset < 0) { | 128 if (offset < 0) { |
| 110 offset += map->instance_size(); | 129 offset += map->instance_size(); |
| 111 __ ldr(destination(), FieldMemOperand(receiver_reg(), offset)); | 130 __ ldr(destination(), FieldMemOperand(receiver_reg(), offset)); |
| 112 } else { | 131 } else { |
| 113 offset += FixedArray::kHeaderSize; | 132 offset += FixedArray::kHeaderSize; |
| 114 __ ldr(scratch0(), | 133 __ ldr(scratch0(), |
| 115 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); | 134 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
| 116 __ ldr(destination(), FieldMemOperand(scratch0(), offset)); | 135 __ ldr(destination(), FieldMemOperand(scratch0(), offset)); |
| 117 } | 136 } |
| 137 |
| 138 // The loaded value is not known to be a smi. |
| 139 clear_as_smi(destination()); |
| 118 } | 140 } |
| 119 | 141 |
| 120 | 142 |
| 121 void FastCodeGenerator::EmitBitOr() { | 143 void FastCodeGenerator::EmitBitOr() { |
| 122 Register check; // A register is used for the smi check/operation. | 144 if (is_smi(accumulator0()) && is_smi(accumulator1())) { |
| 123 if (destination().is(no_reg)) { | 145 // If both operands are known to be a smi then there is no need to check |
| 124 check = scratch0(); // Do not clobber either operand register. | 146 // the operands or result. There is no need to perform the operation in |
| 147 // an effect context. |
| 148 if (!destination().is(no_reg)) { |
| 149 __ orr(destination(), accumulator1(), Operand(accumulator0())); |
| 150 } |
| 151 } else if (destination().is(no_reg)) { |
| 152 // Result is not needed but do not clobber the operands in case of |
| 153 // bailout. |
| 154 __ orr(scratch0(), accumulator1(), Operand(accumulator0())); |
| 155 __ BranchOnNotSmi(scratch0(), bailout()); |
| 125 } else { | 156 } else { |
| 126 // Preserve whichever operand shares the destination register in case we | 157 // Preserve the destination operand in a scratch register in case of |
| 127 // have to bail out. | 158 // bailout. |
| 159 Label done; |
| 128 __ mov(scratch0(), destination()); | 160 __ mov(scratch0(), destination()); |
| 129 check = destination(); | 161 __ orr(destination(), accumulator1(), Operand(accumulator0())); |
| 130 } | 162 __ BranchOnSmi(destination(), &done); |
| 131 __ orr(check, accumulator1(), Operand(accumulator0())); | |
| 132 // Restore the clobbered operand if necessary. | |
| 133 if (destination().is(no_reg)) { | |
| 134 __ BranchOnNotSmi(check, bailout()); | |
| 135 } else { | |
| 136 Label done; | |
| 137 __ BranchOnSmi(check, &done); | |
| 138 __ mov(destination(), scratch0()); | 163 __ mov(destination(), scratch0()); |
| 139 __ jmp(bailout()); | 164 __ jmp(bailout()); |
| 140 __ bind(&done); | 165 __ bind(&done); |
| 141 } | 166 } |
| 167 |
| 168 // If we didn't bailout, the result (in fact, both inputs too) is known to |
| 169 // be a smi. |
| 170 set_as_smi(accumulator0()); |
| 171 set_as_smi(accumulator1()); |
| 142 } | 172 } |
| 143 | 173 |
| 144 | 174 |
| 145 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { | 175 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { |
| 146 ASSERT(info_ == NULL); | 176 ASSERT(info_ == NULL); |
| 147 info_ = compilation_info; | 177 info_ = compilation_info; |
| 148 | 178 |
| 149 // Save the caller's frame pointer and set up our own. | 179 // Save the caller's frame pointer and set up our own. |
| 150 Comment prologue_cmnt(masm(), ";; Prologue"); | 180 Comment prologue_cmnt(masm(), ";; Prologue"); |
| 151 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); | 181 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 193 __ Jump(lr); | 223 __ Jump(lr); |
| 194 | 224 |
| 195 __ bind(&bailout_); | 225 __ bind(&bailout_); |
| 196 } | 226 } |
| 197 | 227 |
| 198 | 228 |
| 199 #undef __ | 229 #undef __ |
| 200 | 230 |
| 201 | 231 |
| 202 } } // namespace v8::internal | 232 } } // namespace v8::internal |
| OLD | NEW |