Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(489)

Side by Side Diff: src/arm/fast-codegen-arm.cc

Issue 3152016: Remove experimental fast-codegen. We are no longer working on this (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 10 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/codegen-arm.cc ('k') | src/arm/full-codegen-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_ARM)
31
32 #include "codegen-inl.h"
33 #include "fast-codegen.h"
34 #include "scopes.h"
35
36 namespace v8 {
37 namespace internal {
38
39 #define __ ACCESS_MASM(masm())
40
41 Register FastCodeGenerator::accumulator0() { return r0; }
42 Register FastCodeGenerator::accumulator1() { return r1; }
43 Register FastCodeGenerator::scratch0() { return r3; }
44 Register FastCodeGenerator::scratch1() { return r4; }
45 Register FastCodeGenerator::scratch2() { return r5; }
46 Register FastCodeGenerator::receiver_reg() { return r2; }
47 Register FastCodeGenerator::context_reg() { return cp; }
48
49
50 void FastCodeGenerator::EmitLoadReceiver() {
51 // Offset 2 is due to return address and saved frame pointer.
52 int index = 2 + scope()->num_parameters();
53 __ ldr(receiver_reg(), MemOperand(sp, index * kPointerSize));
54 }
55
56
57 void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) {
58 ASSERT(!destination().is(no_reg));
59 ASSERT(cell->IsJSGlobalPropertyCell());
60
61 __ mov(destination(), Operand(cell));
62 __ ldr(destination(),
63 FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset));
64 if (FLAG_debug_code) {
65 __ mov(ip, Operand(Factory::the_hole_value()));
66 __ cmp(destination(), ip);
67 __ Check(ne, "DontDelete cells can't contain the hole");
68 }
69
70 // The loaded value is not known to be a smi.
71 clear_as_smi(destination());
72 }
73
74
75 void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) {
76 LookupResult lookup;
77 info()->receiver()->Lookup(*name, &lookup);
78
79 ASSERT(lookup.holder() == *info()->receiver());
80 ASSERT(lookup.type() == FIELD);
81 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
82 int index = lookup.GetFieldIndex() - map->inobject_properties();
83 int offset = index * kPointerSize;
84
85 // We will emit the write barrier unless the stored value is statically
86 // known to be a smi.
87 bool needs_write_barrier = !is_smi(accumulator0());
88
89 // Negative offsets are inobject properties.
90 if (offset < 0) {
91 offset += map->instance_size();
92 __ str(accumulator0(), FieldMemOperand(receiver_reg(), offset));
93 if (needs_write_barrier) {
94 // Preserve receiver from write barrier.
95 __ mov(scratch0(), receiver_reg());
96 }
97 } else {
98 offset += FixedArray::kHeaderSize;
99 __ ldr(scratch0(),
100 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset));
101 __ str(accumulator0(), FieldMemOperand(scratch0(), offset));
102 }
103
104 if (needs_write_barrier) {
105 __ RecordWrite(scratch0(), Operand(offset), scratch1(), scratch2());
106 }
107
108 if (destination().is(accumulator1())) {
109 __ mov(accumulator1(), accumulator0());
110 if (is_smi(accumulator0())) {
111 set_as_smi(accumulator1());
112 } else {
113 clear_as_smi(accumulator1());
114 }
115 }
116 }
117
118
119 void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) {
120 ASSERT(!destination().is(no_reg));
121 LookupResult lookup;
122 info()->receiver()->Lookup(*name, &lookup);
123
124 ASSERT(lookup.holder() == *info()->receiver());
125 ASSERT(lookup.type() == FIELD);
126 Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map());
127 int index = lookup.GetFieldIndex() - map->inobject_properties();
128 int offset = index * kPointerSize;
129
130 // Perform the load. Negative offsets are inobject properties.
131 if (offset < 0) {
132 offset += map->instance_size();
133 __ ldr(destination(), FieldMemOperand(receiver_reg(), offset));
134 } else {
135 offset += FixedArray::kHeaderSize;
136 __ ldr(scratch0(),
137 FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset));
138 __ ldr(destination(), FieldMemOperand(scratch0(), offset));
139 }
140
141 // The loaded value is not known to be a smi.
142 clear_as_smi(destination());
143 }
144
145
146 void FastCodeGenerator::EmitBitOr() {
147 if (is_smi(accumulator0()) && is_smi(accumulator1())) {
148 // If both operands are known to be a smi then there is no need to check
149 // the operands or result. There is no need to perform the operation in
150 // an effect context.
151 if (!destination().is(no_reg)) {
152 __ orr(destination(), accumulator1(), Operand(accumulator0()));
153 }
154 } else {
155 // Left is in accumulator1, right in accumulator0.
156 if (destination().is(accumulator0())) {
157 __ mov(scratch0(), accumulator0());
158 __ orr(destination(), accumulator1(), Operand(accumulator1()));
159 Label* bailout =
160 info()->AddBailout(accumulator1(), scratch0()); // Left, right.
161 __ BranchOnNotSmi(destination(), bailout);
162 } else if (destination().is(accumulator1())) {
163 __ mov(scratch0(), accumulator1());
164 __ orr(destination(), accumulator1(), Operand(accumulator0()));
165 Label* bailout = info()->AddBailout(scratch0(), accumulator0());
166 __ BranchOnNotSmi(destination(), bailout);
167 } else {
168 ASSERT(destination().is(no_reg));
169 __ orr(scratch0(), accumulator1(), Operand(accumulator0()));
170 Label* bailout = info()->AddBailout(accumulator1(), accumulator0());
171 __ BranchOnNotSmi(scratch0(), bailout);
172 }
173 }
174
175 // If we didn't bailout, the result (in fact, both inputs too) is known to
176 // be a smi.
177 set_as_smi(accumulator0());
178 set_as_smi(accumulator1());
179 }
180
181
182 void FastCodeGenerator::Generate(CompilationInfo* compilation_info) {
183 ASSERT(info_ == NULL);
184 info_ = compilation_info;
185 Comment cmnt(masm_, "[ function compiled by fast code generator");
186
187 // Save the caller's frame pointer and set up our own.
188 Comment prologue_cmnt(masm(), ";; Prologue");
189 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
190 __ add(fp, sp, Operand(2 * kPointerSize));
191 // Note that we keep a live register reference to cp (context) at
192 // this point.
193
194 Label* bailout_to_beginning = info()->AddBailout();
195 // Receiver (this) is allocated to a fixed register.
196 if (info()->has_this_properties()) {
197 Comment cmnt(masm(), ";; MapCheck(this)");
198 if (FLAG_print_ir) {
199 PrintF("MapCheck(this)\n");
200 }
201 ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject());
202 Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver());
203 Handle<Map> map(object->map());
204 EmitLoadReceiver();
205 __ CheckMap(receiver_reg(), scratch0(), map, bailout_to_beginning, false);
206 }
207
208 // If there is a global variable access check if the global object is the
209 // same as at lazy-compilation time.
210 if (info()->has_globals()) {
211 Comment cmnt(masm(), ";; MapCheck(GLOBAL)");
212 if (FLAG_print_ir) {
213 PrintF("MapCheck(GLOBAL)\n");
214 }
215 ASSERT(info()->has_global_object());
216 Handle<Map> map(info()->global_object()->map());
217 __ ldr(scratch0(), CodeGenerator::GlobalObject());
218 __ CheckMap(scratch0(), scratch1(), map, bailout_to_beginning, true);
219 }
220
221 VisitStatements(function()->body());
222
223 Comment return_cmnt(masm(), ";; Return(<undefined>)");
224 if (FLAG_print_ir) {
225 PrintF("Return(<undefined>)\n");
226 }
227 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
228 __ mov(sp, fp);
229 __ ldm(ia_w, sp, fp.bit() | lr.bit());
230 int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize;
231 __ add(sp, sp, Operand(sp_delta));
232 __ Jump(lr);
233 }
234
235
236 #undef __
237
238
239 } } // namespace v8::internal
240
241 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/codegen-arm.cc ('k') | src/arm/full-codegen-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698