OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
46 void FastCodeGenerator::EmitLoadReceiver() { | 46 void FastCodeGenerator::EmitLoadReceiver() { |
47 // Offset 2 is due to return address and saved frame pointer. | 47 // Offset 2 is due to return address and saved frame pointer. |
48 int index = 2 + scope()->num_parameters(); | 48 int index = 2 + scope()->num_parameters(); |
49 __ movq(receiver_reg(), Operand(rbp, index * kPointerSize)); | 49 __ movq(receiver_reg(), Operand(rbp, index * kPointerSize)); |
50 } | 50 } |
51 | 51 |
52 | 52 |
53 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) { | 53 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) { |
54 ASSERT(!destination().is(no_reg)); | 54 ASSERT(!destination().is(no_reg)); |
55 ASSERT(cell->IsJSGlobalPropertyCell()); | 55 ASSERT(cell->IsJSGlobalPropertyCell()); |
| 56 |
56 __ Move(destination(), cell); | 57 __ Move(destination(), cell); |
57 __ movq(destination(), | 58 __ movq(destination(), |
58 FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset)); | 59 FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset)); |
59 if (FLAG_debug_code) { | 60 if (FLAG_debug_code) { |
60 __ Cmp(destination(), Factory::the_hole_value()); | 61 __ Cmp(destination(), Factory::the_hole_value()); |
61 __ Check(not_equal, "DontDelete cells can't contain the hole"); | 62 __ Check(not_equal, "DontDelete cells can't contain the hole"); |
62 } | 63 } |
| 64 |
| 65 // The loaded value is not known to be a smi. |
| 66 clear_as_smi(destination()); |
63 } | 67 } |
64 | 68 |
65 | 69 |
66 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { | 70 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { |
67 LookupResult lookup; | 71 LookupResult lookup; |
68 info()->receiver()->Lookup(*name, &lookup); | 72 info()->receiver()->Lookup(*name, &lookup); |
69 | 73 |
70 ASSERT(lookup.holder() == *info()->receiver()); | 74 ASSERT(lookup.holder() == *info()->receiver()); |
71 ASSERT(lookup.type() == FIELD); | 75 ASSERT(lookup.type() == FIELD); |
72 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); | 76 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
73 int index = lookup.GetFieldIndex() - map->inobject_properties(); | 77 int index = lookup.GetFieldIndex() - map->inobject_properties(); |
74 int offset = index * kPointerSize; | 78 int offset = index * kPointerSize; |
75 | 79 |
76 // Negative offsets are inobject properties. | 80 // We will emit the write barrier unless the stored value is statically |
| 81 // known to be a smi. |
| 82 bool needs_write_barrier = !is_smi(accumulator0()); |
| 83 |
| 84 // Perform the store. Negative offsets are inobject properties. |
77 if (offset < 0) { | 85 if (offset < 0) { |
78 offset += map->instance_size(); | 86 offset += map->instance_size(); |
79 __ movq(scratch0(), receiver_reg()); // Copy receiver for write barrier. | 87 __ movq(FieldOperand(receiver_reg(), offset), accumulator0()); |
| 88 if (needs_write_barrier) { |
| 89 // Preserve receiver from write barrier. |
| 90 __ movq(scratch0(), receiver_reg()); |
| 91 } |
80 } else { | 92 } else { |
81 offset += FixedArray::kHeaderSize; | 93 offset += FixedArray::kHeaderSize; |
82 __ movq(scratch0(), | 94 __ movq(scratch0(), |
83 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset)); | 95 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
| 96 __ movq(FieldOperand(scratch0(), offset), accumulator0()); |
84 } | 97 } |
85 // Perform the store. | 98 |
86 __ movq(FieldOperand(scratch0(), offset), accumulator0()); | 99 if (needs_write_barrier) { |
87 if (destination().is(no_reg)) { | 100 if (destination().is(no_reg)) { |
88 __ RecordWrite(scratch0(), offset, accumulator0(), scratch1()); | 101 // After RecordWrite accumulator0 is only accidently a smi, but it is |
89 } else { | 102 // already marked as not known to be one. |
90 // Copy the value to the other accumulator to preserve a copy from the | 103 __ RecordWrite(scratch0(), offset, accumulator0(), scratch1()); |
91 // write barrier. One of the accumulators is available as a scratch | 104 } else { |
92 // register. | 105 // Copy the value to the other accumulator to preserve a copy from the |
| 106 // write barrier. One of the accumulators is available as a scratch |
| 107 // register. Neither is a smi. |
| 108 __ movq(accumulator1(), accumulator0()); |
| 109 clear_as_smi(accumulator1()); |
| 110 Register value_scratch = other_accumulator(destination()); |
| 111 __ RecordWrite(scratch0(), offset, value_scratch, scratch1()); |
| 112 } |
| 113 } else if (destination().is(accumulator1())) { |
93 __ movq(accumulator1(), accumulator0()); | 114 __ movq(accumulator1(), accumulator0()); |
94 Register value_scratch = other_accumulator(destination()); | 115 if (is_smi(accumulator0())) { |
95 __ RecordWrite(scratch0(), offset, value_scratch, scratch1()); | 116 set_as_smi(accumulator1()); |
| 117 } else { |
| 118 clear_as_smi(accumulator1()); |
| 119 } |
96 } | 120 } |
97 } | 121 } |
98 | 122 |
99 | 123 |
100 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) { | 124 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) { |
101 ASSERT(!destination().is(no_reg)); | 125 ASSERT(!destination().is(no_reg)); |
102 LookupResult lookup; | 126 LookupResult lookup; |
103 info()->receiver()->Lookup(*name, &lookup); | 127 info()->receiver()->Lookup(*name, &lookup); |
104 | 128 |
105 ASSERT(lookup.holder() == *info()->receiver()); | 129 ASSERT(lookup.holder() == *info()->receiver()); |
106 ASSERT(lookup.type() == FIELD); | 130 ASSERT(lookup.type() == FIELD); |
107 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); | 131 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
108 int index = lookup.GetFieldIndex() - map->inobject_properties(); | 132 int index = lookup.GetFieldIndex() - map->inobject_properties(); |
109 int offset = index * kPointerSize; | 133 int offset = index * kPointerSize; |
110 | 134 |
111 // Perform the load. Negative offsets are inobject properties. | 135 // Perform the load. Negative offsets are inobject properties. |
112 if (offset < 0) { | 136 if (offset < 0) { |
113 offset += map->instance_size(); | 137 offset += map->instance_size(); |
114 __ movq(destination(), FieldOperand(receiver_reg(), offset)); | 138 __ movq(destination(), FieldOperand(receiver_reg(), offset)); |
115 } else { | 139 } else { |
116 offset += FixedArray::kHeaderSize; | 140 offset += FixedArray::kHeaderSize; |
117 __ movq(scratch0(), | 141 __ movq(scratch0(), |
118 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset)); | 142 FieldOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
119 __ movq(destination(), FieldOperand(scratch0(), offset)); | 143 __ movq(destination(), FieldOperand(scratch0(), offset)); |
120 } | 144 } |
| 145 |
| 146 // The loaded value is not known to be a smi. |
| 147 clear_as_smi(destination()); |
121 } | 148 } |
122 | 149 |
123 | 150 |
124 void FastCodeGenerator::EmitBitOr() { | 151 void FastCodeGenerator::EmitBitOr() { |
125 Register copied; // One operand is copied to a scratch register. | 152 if (is_smi(accumulator0()) && is_smi(accumulator1())) { |
126 Register other; // The other is not modified by the operation. | 153 // If both operands are known to be a smi then there is no need to check |
127 Register check; // A register is used for the smi check/operation. | 154 // the operands or result. |
128 if (destination().is(no_reg)) { | 155 if (destination().is(no_reg)) { |
129 copied = accumulator1(); // Arbitrary choice of operand to copy. | 156 __ or_(accumulator1(), accumulator0()); |
130 other = accumulator0(); | 157 } else { |
131 check = scratch0(); // Do not clobber either operand register. | 158 // Leave the result in the destination register. Bitwise or is |
| 159 // commutative. |
| 160 __ or_(destination(), other_accumulator(destination())); |
| 161 } |
| 162 } else if (destination().is(no_reg)) { |
| 163 // Result is not needed but do not clobber the operands in case of |
| 164 // bailout. |
| 165 __ movq(scratch0(), accumulator1()); |
| 166 __ or_(scratch0(), accumulator0()); |
| 167 __ JumpIfNotSmi(scratch0(), bailout()); |
132 } else { | 168 } else { |
133 copied = destination(); | 169 // Preserve the destination operand in a scratch register in case of |
134 other = other_accumulator(destination()); | 170 // bailout. |
135 check = destination(); | |
136 } | |
137 __ movq(scratch0(), copied); | |
138 __ or_(check, other); | |
139 // Restore the clobbered operand if necessary. | |
140 if (destination().is(no_reg)) { | |
141 __ JumpIfNotSmi(check, bailout()); | |
142 } else { | |
143 Label done; | 171 Label done; |
144 __ JumpIfSmi(check, &done); | 172 __ movq(scratch0(), destination()); |
145 __ movq(copied, scratch0()); | 173 __ or_(destination(), other_accumulator(destination())); |
| 174 __ JumpIfSmi(destination(), &done); |
| 175 __ movq(destination(), scratch0()); |
146 __ jmp(bailout()); | 176 __ jmp(bailout()); |
147 __ bind(&done); | 177 __ bind(&done); |
148 } | 178 } |
| 179 |
| 180 |
| 181 // If we didn't bailout, the result (in fact, both inputs too) is known to |
| 182 // be a smi. |
| 183 set_as_smi(accumulator0()); |
| 184 set_as_smi(accumulator1()); |
149 } | 185 } |
150 | 186 |
151 | 187 |
152 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { | 188 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { |
153 ASSERT(info_ == NULL); | 189 ASSERT(info_ == NULL); |
154 info_ = compilation_info; | 190 info_ = compilation_info; |
155 | 191 |
156 // Save the caller's frame pointer and set up our own. | 192 // Save the caller's frame pointer and set up our own. |
157 Comment prologue_cmnt(masm(), ";; Prologue"); | 193 Comment prologue_cmnt(masm(), ";; Prologue"); |
158 __ push(rbp); | 194 __ push(rbp); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
200 __ ret((scope()->num_parameters() + 1) * kPointerSize); | 236 __ ret((scope()->num_parameters() + 1) * kPointerSize); |
201 | 237 |
202 __ bind(&bailout_); | 238 __ bind(&bailout_); |
203 } | 239 } |
204 | 240 |
205 | 241 |
206 #undef __ | 242 #undef __ |
207 | 243 |
208 | 244 |
209 } } // namespace v8::internal | 245 } } // namespace v8::internal |
OLD | NEW |